private void RecordWorker() { try { byte[] buf = new byte[mBufferLength]; MemoryStream memStream = new MemoryStream(buf); int readLength = mBufferLength / 4; int latestReadCursor = 0; int latestReadPosition = 0; mBuffer.Start(true); setState(AudioDeviceState.Recording); double[] maxDbs; while (true) { int captureCursor, readCursor; mBuffer.GetCurrentPosition(out captureCursor, out readCursor); if (readCursor < latestReadCursor) { mCycleCount++; } int currentReadPosition = readCursor + (mCycleCount * mBufferLength); //Do read if there is more than readLength bytes to read or if recording has stopped if (mHasRecordingStopped) { memStream.Position = 0; mBuffer.Read(latestReadPosition % mBufferLength, memStream, currentReadPosition - latestReadPosition, LockFlag.None); mPCMOutoutStream.Write(buf, 0, currentReadPosition - latestReadPosition); handleRecordedData(buf, 0, currentReadPosition - latestReadPosition, out maxDbs); FireTime(getTimeEquivalent(readCursor + (mCycleCount * mBufferLength)), maxDbs); break; } else if (latestReadPosition + readLength < currentReadPosition) { memStream.Position = 0; mBuffer.Read(latestReadPosition % mBufferLength, memStream, currentReadPosition - latestReadPosition, LockFlag.None); mPCMOutoutStream.Write(buf, 0, currentReadPosition - latestReadPosition); handleRecordedData(buf, 0, currentReadPosition - latestReadPosition, out maxDbs); latestReadPosition = currentReadPosition; FireTime(getTimeEquivalent(readCursor + (mCycleCount * mBufferLength)), maxDbs); } latestReadCursor = readCursor; Thread.Sleep(WAIT_TIME_MS); } mBuffer.Dispose(); } catch (ThreadAbortException) { if (mBuffer != null) { if (!mBuffer.Disposed) { mBuffer.Dispose(); } } } }
/// <summary> /// 开始录音 /// </summary> public void RecStart() { // 创建录音文件 CreateSoundFile(); // 创建一个录音缓冲区,并开始录音 CreateCaptureBuffer(); // 建立通知消息,当缓冲区满的时候处理方法 InitNotifications(); mRecBuffer.Start(true); }
public void StartRecording(int deviceIndex) { if (mCaptureBuffer != null) { if (mCaptureBuffer.Capturing) { mCaptureBuffer.Stop(); } mCaptureBuffer.Dispose(); mCaptureBuffer = null; } CaptureDevicesCollection audioDevices = new CaptureDevicesCollection(); if (deviceIndex != -1 && deviceIndex < audioDevices.Count - 1) { // initialize the capture buffer and start the animation thread Capture capture = new Capture(audioDevices[deviceIndex].DriverGuid); CaptureBufferDescription captureBufferDescription = new CaptureBufferDescription(); WaveFormat waveFormat = new WaveFormat(); waveFormat.BitsPerSample = 16; waveFormat.SamplesPerSecond = 8000; waveFormat.Channels = 1; waveFormat.BlockAlign = (short)(waveFormat.Channels * waveFormat.BitsPerSample / 8); waveFormat.AverageBytesPerSecond = waveFormat.BlockAlign * waveFormat.SamplesPerSecond; waveFormat.FormatTag = WaveFormatTag.Pcm; captureBufferDescription.Format = waveFormat; captureBufferDescription.BufferBytes = waveFormat.SamplesPerSecond * 120; mCaptureBuffer = new Microsoft.DirectX.DirectSound.CaptureBuffer(captureBufferDescription, capture); mCaptureBuffer.Start(true); } }
void StartOrStopRecord(bool StartRecording) { //----------------------------------------------------------------------------- // Name: StartOrStopRecord() // Desc: Starts or stops the capture buffer from recording //----------------------------------------------------------------------------- if (StartRecording) { // Create a capture buffer, and tell the capture // buffer to start recording CreateCaptureBuffer(); applicationBuffer.Start(true); } else { // Stop the buffer, and read any data that was not // caught by a notification applicationBuffer.Stop(); RecordCapturedData(); Writer.Seek(4, SeekOrigin.Begin); // Seek to the length descriptor of the RIFF file. Writer.Write((int)(SampleCount + 36)); // Write the file length, minus first 8 bytes of RIFF description. Writer.Seek(40, SeekOrigin.Begin); // Seek to the data length descriptor of the RIFF file. Writer.Write(SampleCount); // Write the length of the sample data in bytes. Writer.Close(); // Close the file now. Writer = null; // Set the writer to null. WaveFile = null; // Set the FileStream to null. } }
public void StartCapturing() { try { captureBuffer = new CaptureBuffer(captureBufferDescription, capture); // Set Buffer Size,Voice Recording Format & Input Voice Device SetBufferEvents(); // Set the events Positions to Send While Recording int halfBuffer = bufferSize / 2; // Take the half buffer size captureBuffer.Start(true); // start capturing bool readFirstBufferPart = true; // to know which part has been filled (the buufer has been divided into tow parts) int offset = 0; // at point 0 MemoryStream memStream = new MemoryStream(halfBuffer); // set the half buffer size to the memory stream while (true) // Looping until Stoploop=true Set by the talker { //WaitOne() Blocks the current thread until the current WaitHandle receives a signal //WaitHandle("Encapsulates operating system–specific objects that wait for exclusive access to shared resources") autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); //Sets the position within the current stream to 0 captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); // capturing and set to MemoryStream readFirstBufferPart = !readFirstBufferPart; // reflecting the boolean value to set the new comming buffer to the other part offset = readFirstBufferPart ? 0 : halfBuffer; // if readFirstBufferPart set to true then set the offset to 0 else set the offset to the half buffer byte[] dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); // G.711 Encoding, Compress to less then 50% if (!StopLoop) { OnBufferFulfill(dataToWrite, null); } } } catch {} }
public float GetSignFromDevice(out float response) { cap = new Capture(deviceGuid); desc = new CaptureBufferDescription(); WaveFormat wf = new WaveFormat(); wf.BitsPerSample = bitsPerSample; wf.SamplesPerSecond = sampleRate; wf.Channels = channels; wf.BlockAlign = (short)(wf.Channels * wf.BitsPerSample / 8); wf.AverageBytesPerSecond = wf.BlockAlign * wf.SamplesPerSecond; wf.FormatTag = WaveFormatTag.Pcm; desc.Format = wf; desc.BufferBytes = SAMPLES * wf.BlockAlign; buffer = new Microsoft.DirectX.DirectSound.CaptureBuffer(desc, cap); buffer.Start(true); while (start) { Array samples = buffer.Read(0, typeof(Int16), LockFlag.FromWriteCursor, SAMPLE_FORMAT_ARRAY); response = ((float)samples.GetValue(0, 0, 0)) / 100; } response = 0.0f; return(0.0f); }
public static void StartCapturing() { try { captureBuffer = new CaptureBuffer(captureBufferDescription, capture); SetBufferEvents(); int halfBuffer = bufferSize / 2; captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); while (true) { autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; byte[] dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); if (!StopLoop) { OnBufferFulfill(dataToWrite, null); } } } catch (Exception e) { // } }
private void ThreadLoop() { buffer.Start(true); try { int nextCapturePosition = 0; WaitHandle[] handles = new WaitHandle[] { terminated, positionEvent }; while (WaitHandle.WaitAny(handles) > 0) { int capturePosition, readPosition; buffer.GetCurrentPosition(out capturePosition, out readPosition); int lockSize = readPosition - nextCapturePosition; if (lockSize < 0) { lockSize += bufferLength; } if ((lockSize & 1) != 0) { lockSize--; } int itemsCount = lockSize >> 1; short[] data = (short[])buffer.Read(nextCapturePosition, typeof(short), LockFlag.None, itemsCount); ProcessData(data); nextCapturePosition = (nextCapturePosition + lockSize) % bufferLength; } } finally { buffer.Stop(); } }
public void StartCapturing() { try { _captureBuffer = new CaptureBuffer(_capBufDescr, _capture); SetBufferEvents(); int halfBuffer = _bufferSize / 2; _captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); while (true) { _eventToReset.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); _captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; byte[] dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); if (!_StopLoop) { OnBufferFulfill(dataToWrite, null); } } } catch {} }
static void InicialiceCaptureBuffer() { try { CaptureDevicesCollection audioDevices = new CaptureDevicesCollection(); // initialize the capture buffer and start the animation thread Capture cap = new Capture(audioDevices[1].DriverGuid); CaptureBufferDescription desc = new CaptureBufferDescription(); WaveFormat wf = new WaveFormat(); wf.BitsPerSample = 16; wf.SamplesPerSecond = 44100; wf.Channels = (short)cap.Caps.Channels; wf.BlockAlign = (short)(wf.Channels * wf.BitsPerSample / 8); wf.AverageBytesPerSecond = wf.BlockAlign * wf.SamplesPerSecond; wf.FormatTag = WaveFormatTag.Pcm; desc.Format = wf; desc.BufferBytes = SAMPLES * wf.BlockAlign; buffer = new CaptureBuffer(desc, cap); buffer.Start(true); } catch { Console.WriteLine("Error al iniciar el capturador de sonido"); } }
private void Send() { try { captureBuffer = new CaptureBuffer(captureBufferDescription, cap); CreateNotifyPositions(); int num = checked ((int)Math.Round(unchecked ((double)bufferSize / 2.0))); captureBuffer.Start(true); bool flag = true; int bufferStartingLocation = 0; MemoryStream memoryStream = new MemoryStream(num); while (flagSrarting) { autoResetEvent.WaitOne(); memoryStream.Seek(0L, SeekOrigin.Begin); captureBuffer.Read(bufferStartingLocation, memoryStream, num, LockFlag.None); flag = !flag; bufferStartingLocation = ((!flag) ? num : 0); byte[] buffer = memoryStream.GetBuffer(); udpSend.Send(buffer, buffer.Length, send_Com); } } catch (Exception ex) { ProjectData.SetProjectError(ex); Exception ex2 = ex; ProjectData.ClearProjectError(); } finally { captureBuffer.Stop(); captureBuffer.Dispose(); udpSend.Close(); } }
public void StartRecord(string str) { CreateWaveFile(str); CreateCaptureDevice(); CreateCaptureBuffer(); CreateNotification(); capturebuffer.Start(true); }
private void StreamSource(ISource aSource) { SendResponse("200 OK"); iSocket.Send(iWavFileHeader); const int kAudioChunkBytes = 144 * 1024; const int kAudioChunks = 4; CaptureBuffer capture = CreateCaptureBuffer(aSource, kAudioChunks * kAudioChunkBytes); int offset = 0; NotificationPosition[] notifications = new NotificationPosition[kAudioChunks]; WaitHandle[] handles = new WaitHandle[kAudioChunks]; for (uint i = 0; i < kAudioChunks; i++) { NotificationPosition notification = new NotificationPosition(); notification.Offset = offset; notification.Event = new ManualResetEvent(false); handles[i] = notification.Event; notifications[i] = notification; offset += kAudioChunkBytes; } capture.SetNotificationPositions(notifications); // Rotate notifications for (uint i = 0; i < kAudioChunks - 1; i++) { WaitHandle a = handles[i]; handles[i] = handles[i + 1]; handles[i + 1] = a; } byte[] audio = new byte[kAudioChunkBytes]; capture.Start(true); try { while (true) { int x = WaitHandle.WaitAny(handles); ManualResetEvent manual = handles[x] as ManualResetEvent; manual.Reset(); capture.Read <byte>(audio, 0, kAudioChunkBytes, notifications[x].Offset, false); iSocket.Send(audio); } } catch (SocketException) { } capture.Stop(); }
public void Start() { if (onProcessAudio == null) { throw new Exception("onProcessAudio must be assigned before starting conversion"); } captureBuffer.Start(true); }
public void Start() { //设置文件头 //CreateWaveFile(); //设置缓冲区 初始化捕捉缓冲区 CreateCaptureBuffer(); //设置通知 CreateNotification(); capturebuffer.Start(true); }
/* * Send synchronously sends data captured from microphone across the network on port 1550. */ private void Send() { try { //The following lines get audio from microphone and then send them //across network. captureBuffer = new CaptureBuffer(captureBufferDescription, capture); CreateNotifyPositions(); int halfBuffer = bufferSize / 2; captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); bStop = false; while (!bStop) { autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; byte[] dataToWrite = memStream.GetBuffer(); udpClient.Send(dataToWrite, dataToWrite.Length, "169.254.133.4", 1550); } } catch (Exception ex) { MessageBox.Show(ex.Message, "VoiceChat-Send ()", MessageBoxButtons.OK, MessageBoxIcon.Error); } finally { captureBuffer.Stop(); //Increment flag by one. nUdpClientFlag += 1; //When flag is two then it means we have got out of loops in Send and Receive. while (nUdpClientFlag != 2) { } //Clear the flag. nUdpClientFlag = 0; //Close the socket. udpClient.Close(); } }
/* * Send synchronously sends data captured from microphone across the network on port 1550. */ private void UDP_Send() { try { //The following lines get audio from microphone and then send them //across network. captureBuffer = new CaptureBuffer(captureBufferDescription, capture); UDP_CreateNotifyPositions(); int halfBuffer = bufferSize / 2; captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); bStop = false; while (!bStop) { autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; //TODO: Fix this ugly way of initializing differently. //Choose the vocoder. And then send the data to other party at port 1550. byte[] dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString(), 6068); } } catch (Exception ex) { MessageBox.Show(ex.Message, "VoiceChat-Send ()", MessageBoxButtons.OK, MessageBoxIcon.Error); } finally { captureBuffer.Stop(); //Increment flag by one. nUdpClientFlag += 1; //When flag is two then it means we have got out of loops in Send and Receive. while (nUdpClientFlag != 2) { } //Clear the flag. nUdpClientFlag = 0; //Close the socket. udpClient.Close(); } }
/// <summary> /// 常時録音を開始する。 /// </summary> public void Start() { try { recordingThread.Start(); applicationBuffer.Start(true); } catch (Exception e) { SoundControl.WriteErrorLog(e.ToString()); } }
public void MicCall(int deviceNumber, int comboboxNumber) { fft.selectNumber = deviceNumber; if (fft.numberEmpty()) { mWavFormat = SetWaveFormat(); CreateSelectCaputerDevice(deviceNumber); CreateCaptureBuffer(); CreateNotification(); capturebuffer.Start(true); } fft.numberAdd((byte)comboboxNumber); }
public void RecStart() { // 创建录音文件 AddLogInfo(DateTime.Now + "写入数据:创建录音文件"); CreateSoundFile(); // 创建一个录音缓冲区,并开始录音 AddLogInfo(DateTime.Now + "写入数据:创建一个录音缓冲区,并开始录音"); CreateCaptureBuffer(); // 建立通知消息,当缓冲区满的时候处理方法 AddLogInfo(DateTime.Now + "写入数据:建立通知消息,当缓冲区满的时候处理方法 "); InitNotifications(); mRecBuffer.Start(true); }
//it will start actual recording, append if there is data //in the wave file through the RecordCaptureData() public void InitRecording(bool SRecording) { //if no device is set then it is informed then no device is set if (null == m_cApplicationDevice) { throw new Exception("no device is set for recording"); } //format of the capture buffer and the input format is compared //if not same then it is informed that formats do not match if (dsc.Format.ToString() != InputFormat.ToString()) { throw new Exception("formats do not match"); } if (SRecording) { StateChanged mStateChanged = new StateChanged(mState); mState = AudioRecorderState.Recording; FireEvent(mStateChanged); CreateCaptureBuffer(Index); applicationBuffer.Start(true); //it will set the looping till the stop is used } else { applicationBuffer.Stop(); RecordCapturedData(); Writer = new BinaryWriter(File.OpenWrite(m_sFileName)); long Audiolength = (long)(SampleCount + 44); CalculationFunctions cf = new CalculationFunctions(); for (int i = 0; i < 4; i++) { Writer.BaseStream.Position = i + 4; Writer.Write(Convert.ToByte(cf.ConvertFromDecimal(Audiolength)[i])); } for (int i = 0; i < 4; i++) { Writer.BaseStream.Position = i + 40; Writer.Write(Convert.ToByte(cf.ConvertFromDecimal(SampleCount)[i])); } Writer.Close(); // Close the file now. Writer = null; // Set the writer to null. //m_AudioMediaAsset = new AudioMediaAsset(ProjectDirectory+"\\"+m_sFileName); m_AudioMediaAsset = new AudioMediaAsset(m_sFileName); if (OldAsset.SizeInBytes > 44) { OldAsset.MergeWith(m_AudioMediaAsset); //m_AudioMediaAsset = OldAsset; } } }
/// /// 开始录音 /// public void RecStart() { // 创建录音文件 CreateSoundFile(); // 创建一个录音缓冲区,并开始录音 CreateCaptureBuffer(); // 建立通知消息,当缓冲区满的时候处理方法 InitNotifications(); try { mRecBuffer.Start(true); } catch (NullReferenceException) { } }
static void Listen() { while (isWorking) { if (captureBuffer.Capturing) { if ((!BufferAverage_Calculating && !BufferAmp_Calculating && !LastNValues_Busy) && !stopFlag) { int pos = captureBuffer.CurrentRealPosition; int startIndex = (pos > BufferSize) ? pos - BufferSize : 0; int count = (pos > BufferSize) ? ((pos + BufferSize > captureBufferDescription.BufferBytes) ? (captureBufferDescription.BufferBytes - pos) : BufferSize) : pos; if (count == 0) { continue; } kek = captureBuffer.Lock(startIndex, count, LockFlags.None, out kek2); byte[] _tmpBuf = new byte[count]; kek.Read(_tmpBuf, 0, count); Buffer.BlockCopy(_tmpBuf, 0, _Buffer, (int)(BufferCursor % BufferSize), count); BufferCursor += count / 2; FourierTransformer.SetValues(LastNValues(ref _Buffer, 48000), 1.0f); captureBuffer.Unlock(kek, kek2); //stopFlag = true; //captureBuffer.Read(_Buffer, 0, count, startIndex, LockFlags.None); } dBytes = captureBuffer.CurrentCapturePosition - pCurrentCapturePosition; if (dBytes < 0) { dBytes = dBytes + 192000; } pCurrentCapturePosition = captureBuffer.CurrentCapturePosition; SecondsCaptured += 2 * (float)dBytes / captureBufferDescription.BufferBytes; } else { captureBuffer.Start(new RawBool()); } Thread.Sleep(5); } }
/* * Send synchronously sends data captured from microphone across the network on port 1550. */ private void Send() { try { udpAudioSending = new UdpClient(); //The following lines get audio from microphone and then send them //across network. captureBuffer = new CaptureBuffer(captureBufferDescription, capture); CreateNotifyPositions(); int halfBuffer = bufferSize / 2; captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); audioSend = true; while (true) { autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; byte[] dataToWrite = memStream.GetBuffer(); udpAudioSending.Send(dataToWrite, dataToWrite.Length, serverIPAddress, 1550); } } catch (Exception ex) { if (captureBuffer.Capturing) { captureBuffer.Stop(); } Send(); //MessageBox.Show(ex.Message, "VoiceChat-Send ()", MessageBoxButtons.OK, MessageBoxIcon.Error); //return; } }
public void InitRecording(bool SRecording) { //if no device is set then it is informed then no device is set if (null == m_cApplicationDevice) { throw new Exception("no device is set for recording"); } //format of the capture buffer and the input format is compared //if not same then it is informed that formats do not match if (applicationBuffer.Format.ToString() != InputFormat.ToString()) { throw new Exception("formats do not match"); } if (SRecording) { CreateCaptureBuffer(); applicationBuffer.Start(true); //it will set the looping till the stop is used } else { applicationBuffer.Stop(); RecordCapturedData(); BinaryWriter Writer = new BinaryWriter(File.OpenWrite(m_sFileName)); long Audiolength = (long)(SampleCount + 44); CalculationFunctions cf = new CalculationFunctions(); for (int i = 0; i < 4; i++) { Writer.BaseStream.Position = i + 4; Writer.Write(Convert.ToByte(cf.ConvertFromDecimal(Audiolength)[i])); } for (int i = 0; i < 4; i++) { Writer.BaseStream.Position = i + 40; Writer.Write(Convert.ToByte(cf.ConvertFromDecimal(SampleCount)[i])); } Writer.Close(); // Close the file now. //Set the writer to null. Writer = null; SampleCount = 0; Audiolength = 0; AudioClip NewRecordedClip = new AudioClip(m_sFileName); mAsset.AddClip(NewRecordedClip); //NotifyThread = null; } }
public bool Start(string filename) { if (capture_ == null) { return(false); } waveFormat_ = CreateWaveFormat(); InitCaptureBuffer(); InitNotifications(); InitWaveFile(filename); captureExit_ = false; ThreadPool.QueueUserWorkItem(new WaitCallback(ThreadCaptureData)); captureBuffer_.Start(true); return(true); }
public void Init() { server = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp); server.Bind(new IPEndPoint(IPAddress.Any, 7000));//绑定端口号和IP //server.Blocking = false; server.ReceiveTimeout = 100; iNotifyNum = 100; try { InitVoice(); capturebuffer.Start(true); th = new Thread(ReciveMsg); th.Start();//开启接收消息线程 } catch { server.Close(); MessageBox.Show("提醒:\n 请先插入麦克风与扬声器,在点击确定", "提醒"); Init(); //递归调用,如果一直没有对讲设备,一直提醒用户 } }
private void SendAudio() { try { // Capturamos el audio y lo enviamos por la red int halfbuffer = buffersize / 2; captureBuffer = new CaptureBuffer(captureBuffDesc, capture); CreateNotifyPositions(); captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfbuffer); while (!button7.Enabled) { // Esperamos un evento autoResetEvent.WaitOne(); // Ponemos el puntero al principio del MS memStream.Seek(0, SeekOrigin.Begin); // Leemos el Buffer de Captura y lo guardamos en la primera mitad captureBuffer.Read(offset, memStream, halfbuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfbuffer; // Preparamos el stream de datos //byte[] data = memStream.GetBuffer(); // Sin compresión byte[] data = ALawEncoder.ALawEncode(memStream.GetBuffer()); // Enviamos via RTP al usuario. audio.sendALaw(data); num_audio++; } } catch (Exception ex) { MessageBox.Show("Error sending audio."); } }
public void StartCapture(int sampleRate, Capture captureDevice) { StopCapture(); EmptyRequest(); this.sampleRate = sampleRate; readPos = 0; IsRecording = false; record = null; recordTime = 0; noRecordTime = 0; lastSample = null; lastSize = 0; capture = (captureDevice == null) ? new Capture() : captureDevice; WaveFormat waveFormat = new WaveFormat();// Load the sound waveFormat.BitsPerSample = 16; waveFormat.BlockAlign = 2; waveFormat.Channels = 1; waveFormat.AverageBytesPerSecond = sampleRate * 2; waveFormat.SamplesPerSecond = sampleRate; waveFormat.FormatTag = WaveFormatTag.Pcm; CaptureBufferDescription captureBuffDesc = new CaptureBufferDescription(); captureBuffDesc.BufferBytes = bufferSize; captureBuffDesc.Format = waveFormat; captureBuffer = new CaptureBuffer(captureBuffDesc, capture); captureBuffer.Start(true); captureThread = new Thread(captureLoop); captureThread.Start(); new Thread(EmptyRequest).Start(); }
private void Form1_Load(object sender, EventArgs e) { waveFormat.Channels = 1; //モノラル waveFormat.FormatTag = WaveFormatTag.Pcm; //PCM指定 waveFormat.BitsPerSample = 16; //16bit waveFormat.SamplesPerSecond = 44100; //44.1KHz waveFormat.BlockAlign = (short)(waveFormat.Channels * (waveFormat.BitsPerSample / (short)8));//1サンプルあたりのバイト数 waveFormat.AverageBytesPerSecond = waveFormat.BlockAlign * waveFormat.SamplesPerSecond;//1秒間あたりのバイト数 // バッファの確保 //バッファを0.1秒分確保 captureBufferDescription.BufferBytes = waveFormat.AverageBytesPerSecond / 10; captureBufferDescription.Format = waveFormat; // 録音デバイスの準備 DeviceInformation deviceInfo = captureDevice[0]; capture = new Capture(deviceInfo.DriverGuid); //バッファ作成 try { //フォーマット構造体・デバイス指定 buffer取得 captureBuffer = new CaptureBuffer(captureBufferDescription, capture); } catch { MessageBox.Show("録音デバイスが無いか、録音フォーマットをサポートしていません。"); Close(); return; } if (null == capture) throw new NullReferenceException(); captureBuffer.Start(true); timer1.Enabled = true; }
private void _DoCapture() { int bufferPortionSamples = _BufferPortionSize / sizeof(byte); // Buffer type must match this.waveFormat.FormatTag and this.waveFormat.BitsPerSample var bufferPortion = new byte[bufferPortionSamples]; _CaptureBuffer.Start(true); while (_Running) { int bufferPortionIndex = WaitHandle.WaitAny(_WaitHandles); _CaptureBuffer.Read( bufferPortion, 0, bufferPortionSamples, _BufferPortionSize * Math.Abs((bufferPortionIndex - 1) % _BufferPortionCount)); SampleDataReady(this, new CSampleDataEventArgs(bufferPortion, _Guid)); } _CaptureBuffer.Stop(); }
/// <summary> /// Worker thread. /// </summary> /// private void WorkerThread() { // Get the selected capture device DirectSoundCapture captureDevice = new DirectSoundCapture(device); // Set the capture format WaveFormat format = new WaveFormat(); format.Channels = 1; format.SamplesPerSecond = sampleRate; format.FormatTag = sampleFormat.ToWaveFormat(); format.BitsPerSample = (short)Signal.GetSampleSize(sampleFormat); format.BlockAlignment = (short)(format.BitsPerSample / 8); format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlignment; // Setup the capture buffer CaptureBufferDescription captureBufferDescription = new CaptureBufferDescription(); captureBufferDescription.Format = format; captureBufferDescription.BufferBytes = 2 * desiredCaptureSize * format.BlockAlignment; captureBufferDescription.WaveMapped = true; captureBufferDescription.ControlEffects = false; CaptureBuffer captureBuffer = null; NotificationPosition[] notifications = new NotificationPosition[2]; try { captureBuffer = new CaptureBuffer(captureDevice, captureBufferDescription); // Setup the notification positions int bufferPortionSize = captureBuffer.SizeInBytes / 2; notifications[0] = new NotificationPosition(); notifications[0].Offset = bufferPortionSize - 1; notifications[0].Event = new AutoResetEvent(false); notifications[1] = new NotificationPosition(); notifications[1].Offset = bufferPortionSize - 1 + bufferPortionSize; notifications[1].Event = new AutoResetEvent(false); captureBuffer.SetNotificationPositions(notifications); // Make a copy of the wait handles WaitHandle[] waitHandles = new WaitHandle[notifications.Length]; for (int i = 0; i < notifications.Length; i++) waitHandles[i] = notifications[i].Event; // Start capturing captureBuffer.Start(true); if (sampleFormat == SampleFormat.Format32BitIeeeFloat) { float[] currentSample = new float[desiredCaptureSize]; while (!stopEvent.WaitOne(0, true)) { int bufferPortionIndex = WaitHandle.WaitAny(waitHandles); captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex); OnNewFrame(currentSample); } } else if (sampleFormat == SampleFormat.Format16Bit) { short[] currentSample = new short[desiredCaptureSize]; while (!stopEvent.WaitOne(0, true)) { int bufferPortionIndex = WaitHandle.WaitAny(waitHandles); captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex); OnNewFrame(currentSample); } } } catch (Exception ex) { if (AudioSourceError != null) AudioSourceError(this, new AudioSourceErrorEventArgs(ex.Message)); else throw; } finally { if (captureBuffer != null) { captureBuffer.Stop(); captureBuffer.Dispose(); } if (captureDevice != null) captureDevice.Dispose(); for (int i = 0; i < notifications.Length; i++) if (notifications[i].Event != null) notifications[i].Event.Close(); } }
/* * Send synchronously sends data captured from microphone across the network on port 1550. */ private void Send() { try { //The following lines get audio from microphone and then send them //across network. captureBuffer = new CaptureBuffer(captureBufferDescription, capture); CreateNotifyPositions(); int halfBuffer = bufferSize / 2; captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); bStop = false; while (!bStop) { autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; //TODO: Fix this ugly way of initializing differently. //Choose the vocoder. And then send the data to other party at port 1550. if (vocoder == Vocoder.ALaw) { byte[] dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString (), 1550); } else if (vocoder == Vocoder.uLaw) { byte[] dataToWrite = MuLawEncoder.MuLawEncode(memStream.GetBuffer()); udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString(), 1550); } else { byte[] dataToWrite = memStream.GetBuffer(); udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString(), 1550); } } } catch (Exception ex) { MessageBox.Show(ex.Message, "VoiceChat-Send ()", MessageBoxButtons.OK, MessageBoxIcon.Error); } finally { captureBuffer.Stop(); //Increment flag by one. nUdpClientFlag += 1; //When flag is two then it means we have got out of loops in Send and Receive. while (nUdpClientFlag != 2) { } //Clear the flag. nUdpClientFlag = 0; //Close the socket. udpClient.Close(); } }
public void StartCapture(int sampleRate, Capture captureDevice) { StopCapture(); EmptyRequest(); this.sampleRate = sampleRate; readPos = 0; IsRecording = false; record = null; recordTime = 0; noRecordTime = 0; lastSample = null; lastSize = 0; capture = (captureDevice == null) ? new Capture() : captureDevice; WaveFormat waveFormat = new WaveFormat();// Load the sound waveFormat.BitsPerSample = 16; waveFormat.BlockAlign = 2; waveFormat.Channels = 1; waveFormat.AverageBytesPerSecond = sampleRate * 2; waveFormat.SamplesPerSecond = sampleRate; waveFormat.FormatTag = WaveFormatTag.Pcm; CaptureBufferDescription captureBuffDesc = new CaptureBufferDescription(); captureBuffDesc.BufferBytes = bufferSize; captureBuffDesc.Format = waveFormat; captureBuffer = new CaptureBuffer(captureBuffDesc, capture); captureBuffer.Start(true); captureThread = new Thread(captureLoop); captureThread.Start(); new Thread(EmptyRequest).Start(); }
/// <summary> /// Records sound data from the given audio input. /// </summary> /// /// <remarks> /// Note that this method will block forever. Threading will be required /// to get the data back. /// </remarks> /// /// <param name="capture">The input to record from.</param> /// <returns>The audio data recorded from the input.</returns> public void Record(Capture capture) { if (Recording) { throw new Exception("Already recording."); } WaveFormat format = (WaveFormat) GetAmibiguousType(typeof(WaveFormat)); format.SamplesPerSecond = 96000; format.BitsPerSample = 16; format.Channels = 1; format.FormatTag = WaveFormatTag.Pcm; format.BlockAlign = (Int16) (format.Channels * (format.BitsPerSample / 8)); format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlign; int notifySize = Math.Max(4096, format.AverageBytesPerSecond / 16); notifySize -= notifySize % format.BlockAlign; // This is a fairly arbitrary choice. int inputSize = notifySize * 16; // Output is half of input, as every two bytes is a piece of sound data. int outputSize = inputSize / 2; CaptureBufferDescription description = (CaptureBufferDescription) GetAmibiguousType(typeof(CaptureBufferDescription)); description.Format = format; description.BufferBytes = inputSize; CaptureBuffer buffer; try { buffer = new CaptureBuffer(description, capture); } catch { throw new IOException( "An error occurred attempting to set up a read buffer."); } AutoResetEvent reset = new AutoResetEvent(false); Notify notify = new Notify(buffer); BufferPositionNotify bpn1 = (BufferPositionNotify) GetAmibiguousType(typeof(BufferPositionNotify)); bpn1.Offset = buffer.Caps.BufferBytes / 2 - 1; bpn1.EventNotifyHandle = reset.SafeWaitHandle.DangerousGetHandle(); BufferPositionNotify bpn2 = (BufferPositionNotify) GetAmibiguousType(typeof(BufferPositionNotify)); bpn2.Offset = buffer.Caps.BufferBytes - 1; bpn2.EventNotifyHandle = reset.SafeWaitHandle.DangerousGetHandle(); notify.SetNotificationPositions(new BufferPositionNotify[] { bpn1, bpn2 }); int offset = 0; Data = new List<Int16>(); Recording = true; new Thread((ThreadStart) delegate { buffer.Start(true); while (Recording) { // Let the buffer fill up from the last read. reset.WaitOne(); byte[] read; try { read = (byte[]) buffer.Read(offset, typeof(byte), LockFlag.None, outputSize); } catch { throw new IOException( "An error occurred attempting to read the input data."); } offset = (offset + outputSize) % inputSize; bool written = false; Int16 old = 0; foreach (byte b in read) { if (!written) { old = (Int16) b; } else { old = (Int16) (old | (((Int16) (b << 8)))); Data.Add(old); } written = !written; } } buffer.Stop(); }).Start(); }
public bool StartCodec(ref string strFault) { bool functionReturnValue = false; //Returns true if successful Thread.Sleep(100); // This delay is necessary for reliable starup following a StopCodec lock (objCodecLock) { dttLastSoundCardSample = Now; bool blnSpectrumSave = MCB.DisplaySpectrum; bool blnWaterfallSave = MCB.DisplayWaterfall; System.DateTime dttStartWait = Now; MCB.DisplayWaterfall = false; MCB.DisplaySpectrum = false; string[] strCaptureDevices = EnumerateCaptureDevices(); string[] strPlaybackDevices = EnumeratePlaybackDevices(); functionReturnValue = false; DeviceInformation objDI = new DeviceInformation(); int intPtr = 0; // Playback devices try { cllPlaybackDevices = null; cllPlaybackDevices = new Microsoft.DirectX.DirectSound.DevicesCollection(); if ((devSelectedPlaybackDevice != null)) { devSelectedPlaybackDevice.Dispose(); devSelectedPlaybackDevice = null; } foreach (DeviceInformation objDI in cllPlaybackDevices) { DeviceDescription objDD = new DeviceDescription(objDI); if (strPlaybackDevices(intPtr) == MCB.PlaybackDevice) { if (MCB.DebugLog) Logs.WriteDebug("[Main.StartCodec] Setting SelectedPlaybackDevice = " + MCB.PlaybackDevice); devSelectedPlaybackDevice = new Device(objDD.info.DriverGuid); functionReturnValue = true; break; // TODO: might not be correct. Was : Exit For } intPtr += 1; } if (!functionReturnValue) { strFault = "Playback Device setup, Device " + MCB.PlaybackDevice + " not found in Windows enumerated Playback Devices"; } } catch (Exception ex) { strFault = Err.Number.ToString + "/" + Err.Description; Logs.Exception("[StartCodec], Playback Device setup] Err: " + ex.ToString); functionReturnValue = false; } if (functionReturnValue) { // Capture Device CaptureBufferDescription dscheckboxd = new CaptureBufferDescription(); try { functionReturnValue = false; cllCaptureDevices = null; cllCaptureDevices = new CaptureDevicesCollection(); intPtr = 0; for (int i = 0; i <= cllCaptureDevices.Count - 1; i++) { if (MCB.CaptureDevice == strCaptureDevices(i)) { objCaptureDeviceGuid = cllCaptureDevices(i).DriverGuid; devCaptureDevice = new Capture(objCaptureDeviceGuid); stcSCFormat.SamplesPerSecond = 12000; // 12000 Hz sample rate stcSCFormat.Channels = 1; stcSCFormat.BitsPerSample = 16; stcSCFormat.BlockAlign = 2; stcSCFormat.AverageBytesPerSecond = 2 * 12000; stcSCFormat.FormatTag = WaveFormatTag.Pcm; objApplicationNotify = null; objCapture = null; // Set the buffer sizes intCaptureBufferSize = intNotifySize * intNumberRecordNotifications; // Create the capture buffer dscheckboxd.BufferBytes = intCaptureBufferSize; stcSCFormat.FormatTag = WaveFormatTag.Pcm; dscheckboxd.Format = stcSCFormat; // Set the format during creatation if ((objCapture != null)) { objCapture.Dispose(); objCapture = null; } //objCapture = New CaptureBuffer(dscheckboxd, devCaptureDevice) intNextCaptureOffset = 0; WriteTextToSpectrum("CODEC Start OK", Brushes.LightGreen); while (Now.Subtract(dttStartWait).TotalSeconds < 3) { Application.DoEvents(); Thread.Sleep(100); } objCapture = new CaptureBuffer(dscheckboxd, devCaptureDevice); InititializeNotifications(); objCapture.Start(true); // start with looping InititializeSpectrum(Color.Black); functionReturnValue = true; } } if (!functionReturnValue) { strFault = "Could not find DirectSound capture device " + MCB.CaptureDevice.ToUpper; //Logs.Exception("[Main.StartCodec] Could not find DirectSound capture device " & MCB.CaptureDevice & " in Windows enumerated Capture Devices") } } catch (Exception ex) { strFault = Err.Number.ToString + "/" + Err.Description; functionReturnValue = false; //Logs.Exception("[Main.StartCodec] Err: " & ex.ToString) } } if (functionReturnValue) { if (MCB.DebugLog) Logs.WriteDebug("[Main.StartCodec] Successful start of codec"); objProtocol.ARDOPProtocolState = ProtocolState.DISC; } else { if (MCB.DebugLog) Logs.WriteDebug("[Main.StartCodec] CODEC Start Failed"); WriteTextToSpectrum("CODEC Start Failed", Brushes.Red); objProtocol.ARDOPProtocolState = ProtocolState.OFFLINE; while (Now.Subtract(dttStartWait).TotalSeconds < 3) { Application.DoEvents(); Thread.Sleep(100); } tmrStartCODEC.Interval = 5000; tmrStartCODEC.Start(); } InititializeSpectrum(Color.Black); MCB.DisplayWaterfall = blnWaterfallSave; MCB.DisplaySpectrum = blnSpectrumSave; } return functionReturnValue; }
private void StartRecordAndSend() { try { Capture capture = null; CaptureDevicesCollection captureDeviceCollection = new CaptureDevicesCollection(); try { capture = new Capture(captureDeviceCollection[ConfSingleton.Instance.CaptureDeviceIndex].DriverGuid); } catch { capture = new Capture(captureDeviceCollection[0].DriverGuid); } captureBuffer = new CaptureBuffer(captureBufferDescription, capture); SetBufferEvents(); int halfBuffer = bufferSize / 2; captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); bStop = false; while (!bStop) { autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; rtpSender.Send(ConfSingleton.Instance.Compression ? ALawEncoder.ALawEncode(memStream.GetBuffer()) : memStream.GetBuffer()); } } catch (ThreadAbortException) { /* This is OK. It's raised when the record thread is stopped. */ } /* Catch DirectSound's uninformative exceptions and attempt to expand on them... */ catch (Exception ex) { if (OnCaptureError != null) { AudioCaptureException captureException = new AudioCaptureException("There was a problem in the audio capture process. This is often due to no working capture device being available.", ex); OnCaptureError(this, new AudioCaptureExceptionEventArgs() { Exception = captureException }); } } finally { try { if (captureBuffer != null) captureBuffer.Stop(); bStop = true; } catch { } } }
static void InicialiceCaptureBuffer() { try { CaptureDevicesCollection audioDevices = new CaptureDevicesCollection(); // initialize the capture buffer and start the animation thread Capture cap = new Capture(audioDevices[1].DriverGuid); CaptureBufferDescription desc = new CaptureBufferDescription(); WaveFormat wf = new WaveFormat(); wf.BitsPerSample = 16; wf.SamplesPerSecond = 44100; wf.Channels = (short)cap.Caps.Channels; wf.BlockAlign = (short)(wf.Channels * wf.BitsPerSample / 8); wf.AverageBytesPerSecond = wf.BlockAlign * wf.SamplesPerSecond; wf.FormatTag = WaveFormatTag.Pcm; desc.Format = wf; desc.BufferBytes = SAMPLES * wf.BlockAlign; buffer = new CaptureBuffer(desc, cap); buffer.Start(true); } catch { Console.WriteLine("Error al iniciar el capturador de sonido"); } }
/// <summary> /// Records sound data from the given audio input. /// </summary> /// /// <remarks> /// Note that this method will block forever. Threading will be required /// to get the data back. /// </remarks> /// /// <param name="capture">The input to record from.</param> /// <returns>The audio data recorded from the input.</returns> public bool Record(Capture cap) { if(recording){ return false; } // string captureDescriptor – string for eg “Mic”, “Input” // Control owner – maybe Window or Form would do for this – was Native.GetDesktopWindow() // if windowless application use desktop window as message broker // Returns true for setup done and thread started, false for problem // Choose a Wave format, calculating BlockAlign and AverageBytesPerSecond ConstructorInfo nom = typeof(WaveFormat).GetConstructor(Type.EmptyTypes); format = (WaveFormat)nom.Invoke(null); format.SamplesPerSecond = 96000; format.BitsPerSample = 16; format.Channels = 1; format.FormatTag = WaveFormatTag.Pcm; SData = new List<Int16>(); // Both of these are calculate for All channels // BlockAlign = BytesPerSampleAllChannels, AverageBytesPerSecond = BytesPerSecondAllChannels format.BlockAlign = (short)(format.Channels * (format.BitsPerSample / 8)); format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlign; // Set the size of input and output buffers // Multiplier of both delay and minimum buffer size in units of 1/16th secs, int NUM_BUFFERS = 8; // Sets _dwNotifySize to enough bytes for 1/16th of a second, all channels // Note that this was 1/8th (ie line ended ‘/ 8);’), and output buffer size = capture size/2 // But this was changed to allow output buffer size to be a multiple of BlockAlign int _dwNotifySize = Math.Max(4096, format.AverageBytesPerSecond / (8 * 2)); // rounds _dwNotifySize to a multiple of BlockAlign (BytesPerSampleAllChannel) _dwNotifySize -= _dwNotifySize % format.BlockAlign; // Capture buffer is looped – when the end is reached, it starts from the beginning again. // Capturing one should be twice as large as output – so that when completed capture // is being read to output buffer there is still room to for the buffer to keep filling // without overwriting the output. I think. int _dwCaptureBufferSize = NUM_BUFFERS * _dwNotifySize * 2; int _dwOutputBufferSize = NUM_BUFFERS * _dwNotifySize; // Check a matching capture device was found if (cap == null) return false; // no matching sound card/capture device { // Make the description and create a CaptureBuffer accordingly ConstructorInfo capnom = typeof(CaptureBufferDescription).GetConstructor(Type.EmptyTypes); var capDesc = (CaptureBufferDescription)capnom.Invoke(null); capDesc.Format = format; capDesc.BufferBytes = _dwCaptureBufferSize; var _dwCapBuffer = new CaptureBuffer(capDesc, cap); // Create two output buffers – this seems to avoid the buffer being locked and written // to while it's still playing, helping to avoid a sound glitch on my machine. var _dwDevBuffers = new SecondaryBuffer[2]; // Set autoResetEvent to be fired when it's filled and subscribe to buffer notifications var _resetEvent = new AutoResetEvent(false); var _notify = new Notify(_dwCapBuffer); // Half&half – one notification halfway through the output buffer, one at the end ConstructorInfo buffnom = typeof(BufferPositionNotify).GetConstructor(Type.EmptyTypes); var bpn1 = (BufferPositionNotify)buffnom.Invoke(null); bpn1.Offset = _dwCapBuffer.Caps.BufferBytes / 2 - 1; bpn1.EventNotifyHandle = _resetEvent.SafeWaitHandle.DangerousGetHandle(); var bpn2 = (BufferPositionNotify)buffnom.Invoke(null); bpn2.Offset = _dwCapBuffer.Caps.BufferBytes - 1; bpn2.EventNotifyHandle = _resetEvent.SafeWaitHandle.DangerousGetHandle(); _notify.SetNotificationPositions(new BufferPositionNotify[] { bpn1, bpn2 }); recording = true; // ready to capture sound // Fire worker thread to take care of messages // Note that on a uniprocessor, the new thread may not get any processor time // until the main thread is preempted or yields, eg by ending button click event or // calling Thread.Sleep(0) // botch – not sure if these are thread safe for multiple threads int offset = 0; int devbuffer = 0; // Make a new thread – as countained in the { } Thread _dwCaptureThread = new Thread((ThreadStart)delegate { _dwCapBuffer.Start(true); // start capture // IsReady – This should be true while you wish to capture and then output the sound. while (recording) { _resetEvent.WaitOne(); // blocks thread until _dwCapBuffer is half/totally full // Read the capture buffer into an array, and output it to the next DevBuffer byte[] read = (byte[])_dwCapBuffer.Read(offset, typeof(byte), LockFlag.None, _dwOutputBufferSize); for (int i = 0; i < read.Length; i++) { SData.Add(Int16.Parse(read[i].ToString())); } // _dwDevBuffers[devbuffer].Write(0, read, LockFlag.EntireBuffer); // Update offset offset = (offset + _dwOutputBufferSize) % _dwCaptureBufferSize; devbuffer = 1 - devbuffer; // toggle between 0 and 1 } _dwCapBuffer.Stop(); // stop capture }); _dwCaptureThread.Start(); // start the new Thread return true; } }
/* * Send synchronously sends data captured from microphone across the network on port 1550. */ private void Send() { try { //The following lines get audio from microphone and then send them //across network. captureBuffer = new CaptureBuffer(captureBufferDescription, capture); CreateNotifyPositions(); int halfBuffer = bufferSize / 2; captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); bStop = false; while (!bStop) { autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; //TODO: Fix this ugly way of initializing differently. //Choose the vocoder. And then send the data to other party at port 1550. //循环聊天室里面的用户发送语音数据 List<ChatUser> chatUserlist = LoginRoler.chatUserlist; if (chatUserlist!=null && chatUserlist.Count>0) { //chatroomusers for (int a = 0; a < chatUserlist.Count; a++) { //Console.WriteLine("ip=" + chatroomusers.Items[a].Text.ToString() + "进入聊天"); string ip = (((ChatUser)chatUserlist[a]).ChatIp).ToString(); if (ip.Equals(LoginRoler.ip)) continue; //Console.WriteLine("发送音频数据到:" + ip); if (vocoder == Vocoder.ALaw) { byte[] dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); //udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString(), 1550); udpClient.Send(dataToWrite, dataToWrite.Length, ip, 1550); } else if (vocoder == Vocoder.uLaw) { byte[] dataToWrite = MuLawEncoder.MuLawEncode(memStream.GetBuffer()); //udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString(), 1550); udpClient.Send(dataToWrite, dataToWrite.Length, ip, 1550); //udpClient.Send(dataToWrite, dataToWrite.Length, "192.168.0.104", 1550); } else { byte[] dataToWrite = memStream.GetBuffer(); //udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString(), 1550); udpClient.Send(dataToWrite, dataToWrite.Length, ip, 1550); } } } } } catch (Exception ex) { MessageBox.Show(ex.Message, "VoiceChat-Send ()", MessageBoxButtons.OK, MessageBoxIcon.Error); } finally { captureBuffer.Stop(); //Increment flag by one. nUdpClientFlag += 1; //When flag is two then it means we have got out of loops in Send and Receive. while (nUdpClientFlag != 2) { } //Clear the flag. nUdpClientFlag = 0; //Close the socket. //udpClient.Close(); } }
/* * Send synchronously sends data captured from microphone across the network on port 1550. */ private void Send() { try { IsThreadSendEnd = false; //The following lines get audio from microphone and then send them //across network. int users_count = 0; captureBuffer = new CaptureBuffer(captureBufferDescription, capture); CreateNotifyPositions(); int halfBuffer = bufferSize / 2; captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); bStop = false; LogAppend("Sending Started"); while (!bStop) { lock (otherPartyIPs) { users_count = otherPartyIPs.Count; if (users_count > 0) { autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; //TODO: Fix this ugly way of initializing differently. //Choose the vocoder. And then send the data to other party at port 1550. //if (vocoder == Vocoder.ALaw) //{ //byte[] dataToWrite = MuLawEncoder.MuLawEncode(memStream.GetBuffer()); //MULAW //byte[] dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); //ALAW (RECOMENdADO) byte[] dataToWrite = memStream.GetBuffer(); //NORMAL if (bStop) return; for (int i = 0; i < users_count; i++) udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIPs[i].Address.ToString(), 1550); } } } IsThreadSendEnd = true; LogAppend("Sending Ended"); } catch (Exception ex) { // MessageBox.Show(ex.Message, "VoiceChat-Send ()", MessageBoxButtons.OK, MessageBoxIcon.Error); LogAppend("VoiceChat-Send >> " + ex.Message); } finally { captureBuffer.Stop(); //Increment flag by one. nUdpClientFlag += 1; //When flag is two then it means we have got out of loops in Send and Receive. while (nUdpClientFlag != 2) { } //Clear the flag. nUdpClientFlag = 0; //Close the socket. udpClient.Close(); } }