private void RecordWorker() { try { byte[] buf = new byte[mBufferLength]; MemoryStream memStream = new MemoryStream(buf); int readLength = mBufferLength / 4; int latestReadCursor = 0; int latestReadPosition = 0; mBuffer.Start(true); setState(AudioDeviceState.Recording); double[] maxDbs; while (true) { int captureCursor, readCursor; mBuffer.GetCurrentPosition(out captureCursor, out readCursor); if (readCursor < latestReadCursor) { mCycleCount++; } int currentReadPosition = readCursor + (mCycleCount * mBufferLength); //Do read if there is more than readLength bytes to read or if recording has stopped if (mHasRecordingStopped) { memStream.Position = 0; mBuffer.Read(latestReadPosition % mBufferLength, memStream, currentReadPosition - latestReadPosition, LockFlag.None); mPCMOutoutStream.Write(buf, 0, currentReadPosition - latestReadPosition); handleRecordedData(buf, 0, currentReadPosition - latestReadPosition, out maxDbs); FireTime(getTimeEquivalent(readCursor + (mCycleCount * mBufferLength)), maxDbs); break; } else if (latestReadPosition + readLength < currentReadPosition) { memStream.Position = 0; mBuffer.Read(latestReadPosition % mBufferLength, memStream, currentReadPosition - latestReadPosition, LockFlag.None); mPCMOutoutStream.Write(buf, 0, currentReadPosition - latestReadPosition); handleRecordedData(buf, 0, currentReadPosition - latestReadPosition, out maxDbs); latestReadPosition = currentReadPosition; FireTime(getTimeEquivalent(readCursor + (mCycleCount * mBufferLength)), maxDbs); } latestReadCursor = readCursor; Thread.Sleep(WAIT_TIME_MS); } mBuffer.Dispose(); } catch (ThreadAbortException) { if (mBuffer != null) { if (!mBuffer.Disposed) { mBuffer.Dispose(); } } } }
private void CreateCaptureBuffer() { // 缓冲区的描述对象 CaptureBufferDescription bufferdescription = new CaptureBufferDescription(); if (null != mNotify) { mNotify.Dispose(); mNotify = null; } if (null != mRecBuffer) { mRecBuffer.Dispose(); mRecBuffer = null; } // 设定通知的大小,默认为1s钟 mNotifySize = (1024 > mWavFormat.AverageBytesPerSecond / 8) ? 1024 : (mWavFormat.AverageBytesPerSecond / 8); mNotifySize -= mNotifySize % mWavFormat.BlockAlign; // 设定缓冲区大小 mBufferSize = mNotifySize * cNotifyNum; // 创建缓冲区描述 bufferdescription.BufferBytes = mBufferSize; bufferdescription.Format = mWavFormat; // 录音格式 // 创建缓冲区 mRecBuffer = new CaptureBuffer(bufferdescription, mCapDev); mNextCaptureOffset = 0; }
WaveFormat TryNewFormat(int hz, short bits, short channels) { WaveFormat format = new WaveFormat(); format.FormatTag = WaveFormatTag.Pcm; format.SamplesPerSecond = hz; format.BitsPerSample = bits; format.Channels = channels; format.BlockAlign = (short)(format.Channels * (format.BitsPerSample / 8)); format.AverageBytesPerSecond = format.BlockAlign * format.SamplesPerSecond; CaptureBufferDescription dscheckboxd = new CaptureBufferDescription(); CaptureBuffer pDSCaptureBuffer = null; dscheckboxd.BufferBytes = format.AverageBytesPerSecond; dscheckboxd.Format = format; try { pDSCaptureBuffer = new CaptureBuffer(dscheckboxd, device); pDSCaptureBuffer.Dispose(); return(format); } catch { // Can't return null, because WaveFormat is a value type. throw; } }
public void Stop() { _Running = false; if (_CaptureThread != null) { _CaptureThread.Join(); _CaptureThread = null; } if (_CaptureBuffer != null) { _CaptureBuffer.Dispose(); _CaptureBuffer = null; } if (_Notifications != null) { foreach (NotificationPosition notification in _Notifications) { notification.Event.Close(); } _Notifications.Clear(); _Notifications = null; } }
// Helper method to test a specific WaveFormat instance. private void VerifyFormat(WaveFormat newFormat) { if (this._captureDevice == null) { throw new InvalidOperationException("Capture device is null."); } CaptureBufferDescription capBuffDesc = new CaptureBufferDescription(); capBuffDesc.BufferBytes = newFormat.AverageBytesPerSecond; capBuffDesc.Format = newFormat; CaptureBuffer capBuff = null; try { capBuff = new CaptureBuffer(capBuffDesc, this._captureDevice); } catch (Exception ex) { string errMsg = string.Format("Sound format not supported: {0} samples/sec, {1} bits/sample, {2} channels.", newFormat.SamplesPerSecond, newFormat.BitsPerSample, newFormat.Channels); throw new Exception(errMsg, ex); } if (capBuff != null) { capBuff.Dispose(); capBuff = null; } }
public void StartRecording(int deviceIndex) { if (mCaptureBuffer != null) { if (mCaptureBuffer.Capturing) { mCaptureBuffer.Stop(); } mCaptureBuffer.Dispose(); mCaptureBuffer = null; } CaptureDevicesCollection audioDevices = new CaptureDevicesCollection(); if (deviceIndex != -1 && deviceIndex < audioDevices.Count - 1) { // initialize the capture buffer and start the animation thread Capture capture = new Capture(audioDevices[deviceIndex].DriverGuid); CaptureBufferDescription captureBufferDescription = new CaptureBufferDescription(); WaveFormat waveFormat = new WaveFormat(); waveFormat.BitsPerSample = 16; waveFormat.SamplesPerSecond = 8000; waveFormat.Channels = 1; waveFormat.BlockAlign = (short)(waveFormat.Channels * waveFormat.BitsPerSample / 8); waveFormat.AverageBytesPerSecond = waveFormat.BlockAlign * waveFormat.SamplesPerSecond; waveFormat.FormatTag = WaveFormatTag.Pcm; captureBufferDescription.Format = waveFormat; captureBufferDescription.BufferBytes = waveFormat.SamplesPerSecond * 120; mCaptureBuffer = new Microsoft.DirectX.DirectSound.CaptureBuffer(captureBufferDescription, capture); mCaptureBuffer.Start(true); } }
private void Send() { try { captureBuffer = new CaptureBuffer(captureBufferDescription, cap); CreateNotifyPositions(); int num = checked ((int)Math.Round(unchecked ((double)bufferSize / 2.0))); captureBuffer.Start(true); bool flag = true; int bufferStartingLocation = 0; MemoryStream memoryStream = new MemoryStream(num); while (flagSrarting) { autoResetEvent.WaitOne(); memoryStream.Seek(0L, SeekOrigin.Begin); captureBuffer.Read(bufferStartingLocation, memoryStream, num, LockFlag.None); flag = !flag; bufferStartingLocation = ((!flag) ? num : 0); byte[] buffer = memoryStream.GetBuffer(); udpSend.Send(buffer, buffer.Length, send_Com); } } catch (Exception ex) { ProjectData.SetProjectError(ex); Exception ex2 = ex; ProjectData.ClearProjectError(); } finally { captureBuffer.Stop(); captureBuffer.Dispose(); udpSend.Close(); } }
/// <summary> /// Creates a capture buffer and sets the format /// </summary> private void CreateCaptureBuffer() { CaptureBufferDescription dscheckboxd = new CaptureBufferDescription(); if (null != applicationNotify) { applicationNotify.Dispose(); applicationNotify = null; } if (null != applicationBuffer) { applicationBuffer.Dispose(); applicationBuffer = null; } if (0 == InputFormat.Channels) { throw new ArgumentException("Audio Channels cannot be zero (use 1 - mono, 2 - stereo, etc.)."); } // Set the notification size notifySize = (1024 > InputFormat.AverageBytesPerSecond / 8) ? 1024 : (InputFormat.AverageBytesPerSecond / 8); notifySize -= notifySize % InputFormat.BlockAlign; // Set the buffer sizes captureBufferSize = notifySize * NumberRecordNotifications; // Create the capture buffer dscheckboxd.BufferBytes = captureBufferSize; dscheckboxd.Format = InputFormat; // Set the format during creation try { applicationBuffer = new CaptureBuffer(dscheckboxd, applicationDevice); } catch (ApplicationException) { //Yeah, I know, D i r e c t X managed... throw new ApplicationException("The sound capturing device is not ready. Is '" + CaptureDevice + "' plugged in?"); } nextCaptureOffset = 0; InitDirectSoundNotifications(); }
static void DisposeCaptureBuffer() { if (buffer != null) { if (buffer.Capturing) { buffer.Stop(); } buffer.Dispose(); buffer = null; } }
/// <summary> /// Stops capture process. /// </summary> public void Stop() { if (isCapturing) { isCapturing = false; terminated.Set(); thread.Join(); notify.Dispose(); buffer.Dispose(); capture.Dispose(); } }
//it creates the buffer for recording public void CreateCaptureBuffer(int Index) { // Desc: Creates a capture buffer and sets the format if (null != applicationNotify) { applicationNotify.Dispose(); applicationNotify = null; } if (null != applicationBuffer) { applicationBuffer.Dispose(); applicationBuffer = null; } InputFormat = GetInputFormat(Index); if (0 == InputFormat.Channels) { return; } m_iNotifySize = (1024 > InputFormat.AverageBytesPerSecond / 8) ? 1024 : (InputFormat.AverageBytesPerSecond / 8); m_iNotifySize -= m_iNotifySize % InputFormat.BlockAlign; // Set the buffer sizes m_iCaptureBufferSize = m_iNotifySize * NumberRecordNotifications; // calculate the size of VuMeter Update array length m_UpdateVMArrayLength = m_iCaptureBufferSize / 50; CalculationFunctions cf = new CalculationFunctions(); m_UpdateVMArrayLength = Convert.ToInt32(cf.AdaptToFrame(Convert.ToInt32(m_UpdateVMArrayLength), m_FrameSize)); arUpdateVM = new byte [m_UpdateVMArrayLength]; // Create the capture buffer dsc.BufferBytes = m_iCaptureBufferSize; InputFormat.FormatTag = WaveFormatTag.Pcm; dsc.Format = InputFormat; // Set the format during creatation m_cApplicationDevice = InitDirectSound(); applicationBuffer = new CaptureBuffer(dsc, m_cApplicationDevice); InitNotifications(); }
private void ScanAvailableInputFormats() { //----------------------------------------------------------------------------- // Name: ScanAvailableInputFormats() // Desc: Tests to see if 20 different standard wave formats are supported by // the capture device //----------------------------------------------------------------------------- WaveFormat format = new WaveFormat(); CaptureBufferDescription dscheckboxd = new CaptureBufferDescription(); CaptureBuffer pDSCaptureBuffer = null; // This might take a second or two, so throw up the hourglass Cursor = Cursors.WaitCursor; format.FormatTag = WaveFormatTag.Pcm; // Try 20 different standard formats to see if they are supported for (int iIndex = 0; iIndex < 20; iIndex++) { GetWaveFormatFromIndex(iIndex, ref format); // To test if a capture format is supported, try to create a // new capture buffer using a specific format. If it works // then the format is supported, otherwise not. dscheckboxd.BufferBytes = format.AverageBytesPerSecond; dscheckboxd.Format = format; try { pDSCaptureBuffer = new CaptureBuffer(dscheckboxd, mf.applicationDevice); InputFormatSupported[iIndex] = true; } catch { InputFormatSupported[iIndex] = false; } if (pDSCaptureBuffer != null) { pDSCaptureBuffer.Dispose(); } } Cursor = Cursors.Default; }
public void StopCapture() { if (captureThread != null) { captureThread.Abort(); captureThread = null; } if (captureBuffer != null) { //captureBuffer.Stop(); captureBuffer.Dispose(); captureBuffer = null; } if (capture != null) { capture.Dispose(); capture = null; } IsRecording = false; }
void CreateCaptureBuffer() { //----------------------------------------------------------------------------- // Name: CreateCaptureBuffer() // Desc: Creates a capture buffer and sets the format //----------------------------------------------------------------------------- CaptureBufferDescription dscheckboxd = new CaptureBufferDescription(); if (null != notify) { notify.Dispose(); notify = null; } if (null != buffer) { buffer.Dispose(); buffer = null; } if (0 == selectedFormat.Channels) { return; } // Set the notification size notifySize = (1024 > selectedFormat.AverageBytesPerSecond / 8) ? 1024 : (selectedFormat.AverageBytesPerSecond / 8); notifySize -= notifySize % selectedFormat.BlockAlign; // Set the buffer sizes captureBufferSize = notifySize * NumberRecordNotifications; // Create the capture buffer dscheckboxd.BufferBytes = captureBufferSize; selectedFormat.FormatTag = WaveFormatTag.Pcm; dscheckboxd.Format = selectedFormat; // Set the format during creatation buffer = new CaptureBuffer(dscheckboxd, selectedDevice); nextCaptureOffset = 0; InitNotifications(); }
void CreateCaptureBuffer() { //----------------------------------------------------------------------------- // Name: CreateCaptureBuffer() // Desc: Creates a capture buffer and sets the format //----------------------------------------------------------------------------- CaptureBufferDescription dscheckboxd = new CaptureBufferDescription(); if (null != applicationNotify) { applicationNotify.Dispose(); applicationNotify = null; } if (null != applicationBuffer) { applicationBuffer.Dispose(); applicationBuffer = null; } if (0 == InputFormat.Channels) { return; } // Set the notification size NotifySize = (1024 > InputFormat.AverageBytesPerSecond / 8) ? 1024 : (InputFormat.AverageBytesPerSecond / 8); NotifySize -= NotifySize % InputFormat.BlockAlign; // Set the buffer sizes CaptureBufferSize = NotifySize * NumberRecordNotifications; // Create the capture buffer dscheckboxd.BufferBytes = CaptureBufferSize; InputFormat.FormatTag = WaveFormatTag.Pcm; dscheckboxd.Format = InputFormat; // Set the format during creatation applicationBuffer = new CaptureBuffer(dscheckboxd, applicationDevice); NextCaptureOffset = 0; InitNotifications(); }
private void InitCaptureBuffer() { if (captureBuffer_ != null) { captureBuffer_.Dispose(); captureBuffer_ = null; } notifySize_ = (1024 > waveFormat_.AverageBytesPerSecond / 8) ? 1024 : (waveFormat_.AverageBytesPerSecond / 8); notifySize_ -= notifySize_ % waveFormat_.BlockAlign; captureBufferSize_ = notifySize_ * NOTIFY_NUM; CaptureBufferDescription bufferDescription = new CaptureBufferDescription(); bufferDescription.BufferBytes = captureBufferSize_; bufferDescription.Format = waveFormat_; captureBuffer_ = new CaptureBuffer(bufferDescription, capture_); captureOffset_ = 0; captureDataLength_ = 0; }
/// <summary> /// Test if Microphone Privacy Settings are to restrictive for microphone access. /// </summary> /// <returns>True if microphone is accessible</returns> public static bool Allowed() { bool access = false; var devices = new CaptureDevicesCollection(); if (devices?.Count <= 0) { return(false); } var captureDevice = new Capture(devices[0].DriverGuid); CaptureBuffer applicationBuffer = null; var inputFormat = new WaveFormat(); inputFormat.AverageBytesPerSecond = 8000; inputFormat.BitsPerSample = 8; inputFormat.BlockAlign = 1; inputFormat.Channels = 1; inputFormat.FormatTag = WaveFormatTag.Pcm; inputFormat.SamplesPerSecond = 8000; CaptureBufferDescription bufferdesc = new CaptureBufferDescription(); bufferdesc.BufferBytes = 200; bufferdesc.Format = inputFormat; try { applicationBuffer = new CaptureBuffer(bufferdesc, captureDevice); access = true; } catch (SoundException e) { } finally { applicationBuffer?.Dispose(); captureDevice?.Dispose(); } return(access); }
public void CreateCaptureBuffer() { CaptureBufferDescription bufferdescription = new CaptureBufferDescription(); if (null != myNotify) { myNotify.Dispose(); myNotify = null; } if (null != capturebuffer) { capturebuffer.Dispose(); capturebuffer = null; } iNotifySize = (1024 > mWavFormat.AverageBytesPerSecond / 8) ? 1024 : (mWavFormat.AverageBytesPerSecond / 8); iNotifySize -= iNotifySize % mWavFormat.BlockAlign; iBufferSize = iNotifyNum * iNotifySize; bufferdescription.Format = mWavFormat; bufferdescription.BufferBytes = iBufferSize; capturebuffer = new CaptureBuffer(bufferdescription, capture); iBufferOffset = 0; Console.WriteLine("Create a capture buffer successfully..."); }
/// <summary> /// Worker thread. /// </summary> /// private void WorkerThread() { // Get the selected capture device DirectSoundCapture captureDevice = new DirectSoundCapture(device); // Set the capture format WaveFormat format = new WaveFormat(); format.Channels = 1; format.SamplesPerSecond = sampleRate; format.FormatTag = sampleFormat.ToWaveFormat(); format.BitsPerSample = (short)Signal.GetSampleSize(sampleFormat); format.BlockAlignment = (short)(format.BitsPerSample / 8); format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlignment; // Setup the capture buffer CaptureBufferDescription captureBufferDescription = new CaptureBufferDescription(); captureBufferDescription.Format = format; captureBufferDescription.BufferBytes = 2 * desiredCaptureSize * format.BlockAlignment; captureBufferDescription.WaveMapped = true; captureBufferDescription.ControlEffects = false; CaptureBuffer captureBuffer = null; NotificationPosition[] notifications = new NotificationPosition[2]; try { captureBuffer = new CaptureBuffer(captureDevice, captureBufferDescription); // Setup the notification positions int bufferPortionSize = captureBuffer.SizeInBytes / 2; notifications[0] = new NotificationPosition(); notifications[0].Offset = bufferPortionSize - 1; notifications[0].Event = new AutoResetEvent(false); notifications[1] = new NotificationPosition(); notifications[1].Offset = bufferPortionSize - 1 + bufferPortionSize; notifications[1].Event = new AutoResetEvent(false); captureBuffer.SetNotificationPositions(notifications); // Make a copy of the wait handles WaitHandle[] waitHandles = new WaitHandle[notifications.Length]; for (int i = 0; i < notifications.Length; i++) { waitHandles[i] = notifications[i].Event; } // Start capturing captureBuffer.Start(true); if (sampleFormat == SampleFormat.Format32BitIeeeFloat) { float[] currentSample = new float[desiredCaptureSize]; while (!stopEvent.WaitOne(0, true)) { int bufferPortionIndex = WaitHandle.WaitAny(waitHandles); captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex); OnNewFrame(currentSample); } } else if (sampleFormat == SampleFormat.Format16Bit) { short[] currentSample = new short[desiredCaptureSize]; while (!stopEvent.WaitOne(0, true)) { int bufferPortionIndex = WaitHandle.WaitAny(waitHandles); captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex); OnNewFrame(currentSample); } } } catch (Exception ex) { if (AudioSourceError != null) { AudioSourceError(this, new AudioSourceErrorEventArgs(ex.Message)); } else { throw; } } finally { if (captureBuffer != null) { captureBuffer.Stop(); captureBuffer.Dispose(); } if (captureDevice != null) { captureDevice.Dispose(); } for (int i = 0; i < notifications.Length; i++) { if (notifications[i].Event != null) { notifications[i].Event.Close(); } } } }
/* * Send synchronously sends data captured from microphone across the network on port 1550. */ private void Send() { try { //The following lines get audio from microphone and then send them //across network. captureBuffer = new CaptureBuffer(captureBufferDescription, capture); CreateNotifyPositions(); int halfBuffer = bufferSize / 2; captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); bStop = false; while (!bStop && Process.GetCurrentProcess().MainWindowHandle != IntPtr.Zero) { autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; //TODO: Fix this ugly way of initializing differently. //Choose the vocoder. And then send the data to other party at port 1550. /* if (vocoder == Vocoder.ALaw) * { * byte[] dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); * udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString(), 1550); * } * else if (vocoder == Vocoder.uLaw) * { * byte[] dataToWrite = MuLawEncoder.MuLawEncode(memStream.GetBuffer()); * udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString(), 1550); * } * else*/ { byte[] dataToWrite = memStream.GetBuffer(); udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString(), 1550); } } } catch (Exception) { //MessageBox.Show("Wystąpił problem podczas wysyłania pakietów!", "Błąd", MessageBoxButtons.OK, MessageBoxIcon.Error); } finally { captureBuffer.Stop(); //Increment flag by one. nUdpClientFlag += 1; //When flag is two then it means we have got out of loops in Send and Receive. while (nUdpClientFlag != 2) { } //Clear the flag. nUdpClientFlag = 0; //Close the socket. udpClient.Close(); captureBuffer.Dispose(); } }
/// <summary> /// Worker thread. /// </summary> /// private void WorkerThread() { // Get the selected capture device DirectSoundCapture captureDevice = new DirectSoundCapture(device); // Set the capture format WaveFormat format = new WaveFormat(); format.Channels = 1; format.SamplesPerSecond = sampleRate; format.FormatTag = sampleFormat.ToWaveFormat(); format.BitsPerSample = (short)Signal.GetSampleSize(sampleFormat); format.BlockAlignment = (short)(format.BitsPerSample / 8); format.AverageBytesPerSecond = format.SamplesPerSecond * format.BlockAlignment; // Setup the capture buffer CaptureBufferDescription captureBufferDescription = new CaptureBufferDescription(); captureBufferDescription.Format = format; captureBufferDescription.BufferBytes = 2 * desiredCaptureSize * format.BlockAlignment; captureBufferDescription.WaveMapped = true; captureBufferDescription.ControlEffects = false; CaptureBuffer captureBuffer = null; NotificationPosition[] notifications = new NotificationPosition[2]; try { captureBuffer = new CaptureBuffer(captureDevice, captureBufferDescription); // Setup the notification positions int bufferPortionSize = captureBuffer.SizeInBytes / 2; notifications[0] = new NotificationPosition(); notifications[0].Offset = bufferPortionSize - 1; notifications[0].Event = new AutoResetEvent(false); notifications[1] = new NotificationPosition(); notifications[1].Offset = bufferPortionSize - 1 + bufferPortionSize; notifications[1].Event = new AutoResetEvent(false); captureBuffer.SetNotificationPositions(notifications); // Make a copy of the wait handles WaitHandle[] waitHandles = new WaitHandle[notifications.Length]; for (int i = 0; i < notifications.Length; i++) waitHandles[i] = notifications[i].Event; // Start capturing captureBuffer.Start(true); if (sampleFormat == SampleFormat.Format32BitIeeeFloat) { float[] currentSample = new float[desiredCaptureSize]; while (!stopEvent.WaitOne(0, true)) { int bufferPortionIndex = WaitHandle.WaitAny(waitHandles); captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex); OnNewFrame(currentSample); } } else if (sampleFormat == SampleFormat.Format16Bit) { short[] currentSample = new short[desiredCaptureSize]; while (!stopEvent.WaitOne(0, true)) { int bufferPortionIndex = WaitHandle.WaitAny(waitHandles); captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex); OnNewFrame(currentSample); } } } catch (Exception ex) { if (AudioSourceError != null) AudioSourceError(this, new AudioSourceErrorEventArgs(ex.Message)); else throw; } finally { if (captureBuffer != null) { captureBuffer.Stop(); captureBuffer.Dispose(); } if (captureDevice != null) captureDevice.Dispose(); for (int i = 0; i < notifications.Length; i++) if (notifications[i].Event != null) notifications[i].Event.Close(); } }
public void StopRecording() { if (CurrentState == State.NotReady) { return; } PcmDataBufferAvailableHandler deleg = PcmDataBufferAvailable; if (deleg != null) { for (int i = 0; i < m_PcmDataBuffer.Length; i++) { m_PcmDataBuffer[i] = 0; } m_PcmDataBufferAvailableEventArgs.PcmDataBuffer = m_PcmDataBuffer; m_PcmDataBufferAvailableEventArgs.PcmDataBufferLength = m_PcmDataBufferLength; deleg(this, m_PcmDataBufferAvailableEventArgs); } if (CurrentState != State.Recording && CurrentState != State.Monitoring) { return; } bool wasRecording = CurrentState == State.Recording; #if FORCE_SINGLE_NOTIFICATION_EVENT m_CircularBufferTimer.Stop(); #endif Monitor.Enter(LOCK_CIRCULAR_BUFFER); try { m_CircularBuffer.Stop(); } finally { Monitor.Exit(LOCK_CIRCULAR_BUFFER); } #if FORCE_SINGLE_NOTIFICATION_EVENT m_CircularBufferNotificationEvent.Set(); m_CircularBufferNotificationEvent.Close(); #else for (int i = 0; i < NOTIFICATIONS; i++) { m_CircularBufferNotificationEvents[i].Set(); m_CircularBufferNotificationEvents[i].Close(); } #endif //lock (LOCK_THREAD_INSTANCE) //{ // if (m_CircularBufferRefreshThread != null) // { // m_CircularBufferRefreshThread.Abort(); // } //} int count = 0; while (m_CircularBufferRefreshThread != null //&& (m_CircularBufferRefreshThread.IsAlive //// NO NEED FOR AN EXTRA CHECK, AS THE THREAD POINTER IS RESET TO NULL ////|| m_CircularBufferRefreshThreadIsAlive //) ) { //if (count % 5 == 0) //{ // Console.WriteLine(@"///// RECORDER m_CircularBufferRefreshThread.Abort(): " + count++); // lock (LOCK_THREAD_INSTANCE) // { // if (m_CircularBufferRefreshThread != null) // { // m_CircularBufferRefreshThread.Abort(); // } // } //} Console.WriteLine(@"///// RECORDER m_CircularBufferRefreshThread != null: " + count++); Thread.Sleep(20); if (count > 15) { lock (LOCK_THREAD_INSTANCE) { if (m_CircularBufferRefreshThread != null) { m_CircularBufferRefreshThread.Join(100); } m_CircularBufferRefreshThread = null; } break; } } int numberOfBytesThatHaveBeenRead = 0; do { try { numberOfBytesThatHaveBeenRead = circularBufferTransferData( #if !FORCE_SINGLE_NOTIFICATION_EVENT -1, false, #endif true ); } catch (Exception ex) { #if DEBUG Debugger.Break(); #endif Console.WriteLine(ex.Message); Console.WriteLine(ex.StackTrace); break; } #if DEBUG if (numberOfBytesThatHaveBeenRead > 0) { Console.WriteLine(string.Format("READ buffer bytes (STOP RECORD): {0}", numberOfBytesThatHaveBeenRead)); } #endif } while (numberOfBytesThatHaveBeenRead > 0); if (m_RecordingFileWriter != null) { // overwrite the existing RIFF header, this time with correct data length long length = 0; Stream stream = null; try { stream = m_RecordingFileWriter.BaseStream; stream.Position = 0; } catch (Exception ex) { #if DEBUG Debugger.Break(); #endif Console.WriteLine(ex.Message); Console.WriteLine(ex.StackTrace); try { m_RecordingFileWriter.Close(); m_RecordingFileWriter = null; Thread.Sleep(100); //FileInfo fileInfo = new FileInfo(m_RecordedFilePath); stream = File.OpenWrite(m_RecordedFilePath); } catch (Exception ex2) { #if DEBUG Debugger.Break(); #endif Console.WriteLine("WTF?!"); Console.WriteLine(ex2.Message); Console.WriteLine(ex2.StackTrace); stream = null; } } if (stream != null) { try { length = stream.Length; m_RecordedFileRiffHeaderSize = RecordingPCMFormat.RiffHeaderWrite( stream, (uint) (length - (long) m_RecordedFileRiffHeaderSize)); } catch (Exception ex3) { #if DEBUG Debugger.Break(); #endif Console.WriteLine("Erm ?!"); Console.WriteLine(ex3.Message); Console.WriteLine(ex3.StackTrace); } finally { if (m_RecordingFileWriter != null) { m_RecordingFileWriter.Close(); } else if (stream != null) { stream.Close(); } } if (length <= (long)m_RecordedFileRiffHeaderSize && // no PCM data, just RIFF header File.Exists(m_RecordedFilePath)) { File.Delete(m_RecordedFilePath); m_RecordedFilePath = null; } } else { try { m_RecordingFileWriter.Close(); } catch (Exception ex4) { #if DEBUG Debugger.Break(); #endif Console.WriteLine("WHAT ?!"); Console.WriteLine(ex4.Message); Console.WriteLine(ex4.StackTrace); } } } #if !USE_SHARPDX m_Notify.Dispose(); m_Notify = null; #endif m_CircularBuffer.Dispose(); m_CircularBuffer = null; CurrentState = State.Stopped; if (wasRecording) { AudioRecordingFinishHandler del = AudioRecordingFinished; if (del != null) { del(this, new AudioRecordingFinishEventArgs(m_RecordedFilePath)); } //var del = AudioRecordingFinished; //if (del != null) //del(this, new AudioRecordingFinishEventArgs(m_RecordedFilePath)); } }
/* * Send synchronously sends data captured from microphone across the network on port 1550. */ private void Send() { try { captureBuffer = new CaptureBuffer(captureBufferDescription, capture); //The following lines get audio from microphone and then send them //across network CreateNotifyPositions(); int halfBuffer = bufferSize / 2; captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); bStop = false; while (!bStop) { if (!IsMuted) { autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; //Encode and encrypt data. byte[] dataEncoded = ALawEncoder.ALawEncode(memStream.GetBuffer()); byte[] dataToWrite = AES_Crypto.Encrypt(dataEncoded, CallCurrentPass, CallCurrentSalt); udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString(), 1550); } } } catch (Exception e) { MessageBox.Show(e.ToString(), "VoiceChat-Send ()", MessageBoxButton.OK, MessageBoxImage.Error, MessageBoxResult.OK); } finally { captureBuffer.Stop(); captureBuffer.Dispose(); //Increment flag by one. nUdpClientFlag += 1; //When flag is two then it means we have got out of loops in Send and Receive. while (nUdpClientFlag != 2) { } //Clear the flag. nUdpClientFlag = 0; //Close the socket. udpClient.Close(); } }
/// <summary> /// Worker thread. /// </summary> /// private void WorkerThread() { needToStop = false; // Get the selected capture device DirectSoundCapture captureDevice = new DirectSoundCapture(device); // Set the capture format var bitsPerSample = Signal.GetSampleSize(sampleFormat); WaveFormat format = WaveFormat.CreateCustomFormat(sampleFormat.ToWaveFormat(), sampleRate, 1, sampleRate * bitsPerSample / 8, bitsPerSample / 8, bitsPerSample); // Setup the capture buffer CaptureBufferDescription captureBufferDescription = new CaptureBufferDescription(); captureBufferDescription.Format = format; captureBufferDescription.BufferBytes = 2 * desiredCaptureSize * format.BlockAlign; captureBufferDescription.Flags |= CaptureBufferCapabilitiesFlags.WaveMapped; captureBufferDescription.Flags &= ~CaptureBufferCapabilitiesFlags.ControlEffects; CaptureBuffer captureBuffer = null; NotificationPosition[] notifications = new NotificationPosition[2]; try { captureBuffer = new CaptureBuffer(captureDevice, captureBufferDescription); // Setup the notification positions int bufferPortionSize = captureBuffer.Capabilities.BufferBytes / 2; notifications[0] = new NotificationPosition(); notifications[0].Offset = bufferPortionSize - 1; notifications[0].WaitHandle = new AutoResetEvent(false); notifications[1] = new NotificationPosition(); notifications[1].Offset = bufferPortionSize - 1 + bufferPortionSize; notifications[1].WaitHandle = new AutoResetEvent(false); captureBuffer.SetNotificationPositions(notifications); // Make a copy of the wait handles WaitHandle[] waitHandles = new WaitHandle[notifications.Length]; for (int i = 0; i < notifications.Length; i++) { waitHandles[i] = notifications[i].WaitHandle; } // Start capturing captureBuffer.Start(true); if (sampleFormat == SampleFormat.Format32BitIeeeFloat) { float[] currentSample = new float[desiredCaptureSize]; Signal signal = Signal.FromArray(currentSample, sampleRate, sampleFormat); while (!needToStop) { int bufferPortionIndex = WaitHandle.WaitAny(waitHandles); captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex, LockFlags.None); OnNewFrame(signal); } } else if (sampleFormat == SampleFormat.Format16Bit) { short[] currentSample = new short[desiredCaptureSize]; Signal signal = Signal.FromArray(currentSample, sampleRate, sampleFormat); while (!needToStop) { int bufferPortionIndex = WaitHandle.WaitAny(waitHandles); captureBuffer.Read(currentSample, 0, currentSample.Length, bufferPortionSize * bufferPortionIndex, LockFlags.None); OnNewFrame(signal); } } } catch (Exception ex) { if (AudioSourceError == null) { throw; } AudioSourceError(this, new AudioSourceErrorEventArgs(ex)); } finally { if (captureBuffer != null) { captureBuffer.Stop(); captureBuffer.Dispose(); } if (captureDevice != null) { captureDevice.Dispose(); } #if !NETSTANDARD1_4 for (int i = 0; i < notifications.Length; i++) { if (notifications[i].WaitHandle != null) { notifications[i].WaitHandle.Close(); } } #endif } }
public bool StartCodec(ref string strFault) { bool functionReturnValue = false; //Returns true if successful Thread.Sleep(100); // This delay is necessary for reliable starup following a StopCodec lock (objCodecLock) { dttLastSoundCardSample = Now; bool blnSpectrumSave = MCB.DisplaySpectrum; bool blnWaterfallSave = MCB.DisplayWaterfall; System.DateTime dttStartWait = Now; MCB.DisplayWaterfall = false; MCB.DisplaySpectrum = false; string[] strCaptureDevices = EnumerateCaptureDevices(); string[] strPlaybackDevices = EnumeratePlaybackDevices(); functionReturnValue = false; DeviceInformation objDI = new DeviceInformation(); int intPtr = 0; // Playback devices try { cllPlaybackDevices = null; cllPlaybackDevices = new Microsoft.DirectX.DirectSound.DevicesCollection(); if ((devSelectedPlaybackDevice != null)) { devSelectedPlaybackDevice.Dispose(); devSelectedPlaybackDevice = null; } foreach (DeviceInformation objDI in cllPlaybackDevices) { DeviceDescription objDD = new DeviceDescription(objDI); if (strPlaybackDevices(intPtr) == MCB.PlaybackDevice) { if (MCB.DebugLog) Logs.WriteDebug("[Main.StartCodec] Setting SelectedPlaybackDevice = " + MCB.PlaybackDevice); devSelectedPlaybackDevice = new Device(objDD.info.DriverGuid); functionReturnValue = true; break; // TODO: might not be correct. Was : Exit For } intPtr += 1; } if (!functionReturnValue) { strFault = "Playback Device setup, Device " + MCB.PlaybackDevice + " not found in Windows enumerated Playback Devices"; } } catch (Exception ex) { strFault = Err.Number.ToString + "/" + Err.Description; Logs.Exception("[StartCodec], Playback Device setup] Err: " + ex.ToString); functionReturnValue = false; } if (functionReturnValue) { // Capture Device CaptureBufferDescription dscheckboxd = new CaptureBufferDescription(); try { functionReturnValue = false; cllCaptureDevices = null; cllCaptureDevices = new CaptureDevicesCollection(); intPtr = 0; for (int i = 0; i <= cllCaptureDevices.Count - 1; i++) { if (MCB.CaptureDevice == strCaptureDevices(i)) { objCaptureDeviceGuid = cllCaptureDevices(i).DriverGuid; devCaptureDevice = new Capture(objCaptureDeviceGuid); stcSCFormat.SamplesPerSecond = 12000; // 12000 Hz sample rate stcSCFormat.Channels = 1; stcSCFormat.BitsPerSample = 16; stcSCFormat.BlockAlign = 2; stcSCFormat.AverageBytesPerSecond = 2 * 12000; stcSCFormat.FormatTag = WaveFormatTag.Pcm; objApplicationNotify = null; objCapture = null; // Set the buffer sizes intCaptureBufferSize = intNotifySize * intNumberRecordNotifications; // Create the capture buffer dscheckboxd.BufferBytes = intCaptureBufferSize; stcSCFormat.FormatTag = WaveFormatTag.Pcm; dscheckboxd.Format = stcSCFormat; // Set the format during creatation if ((objCapture != null)) { objCapture.Dispose(); objCapture = null; } //objCapture = New CaptureBuffer(dscheckboxd, devCaptureDevice) intNextCaptureOffset = 0; WriteTextToSpectrum("CODEC Start OK", Brushes.LightGreen); while (Now.Subtract(dttStartWait).TotalSeconds < 3) { Application.DoEvents(); Thread.Sleep(100); } objCapture = new CaptureBuffer(dscheckboxd, devCaptureDevice); InititializeNotifications(); objCapture.Start(true); // start with looping InititializeSpectrum(Color.Black); functionReturnValue = true; } } if (!functionReturnValue) { strFault = "Could not find DirectSound capture device " + MCB.CaptureDevice.ToUpper; //Logs.Exception("[Main.StartCodec] Could not find DirectSound capture device " & MCB.CaptureDevice & " in Windows enumerated Capture Devices") } } catch (Exception ex) { strFault = Err.Number.ToString + "/" + Err.Description; functionReturnValue = false; //Logs.Exception("[Main.StartCodec] Err: " & ex.ToString) } } if (functionReturnValue) { if (MCB.DebugLog) Logs.WriteDebug("[Main.StartCodec] Successful start of codec"); objProtocol.ARDOPProtocolState = ProtocolState.DISC; } else { if (MCB.DebugLog) Logs.WriteDebug("[Main.StartCodec] CODEC Start Failed"); WriteTextToSpectrum("CODEC Start Failed", Brushes.Red); objProtocol.ARDOPProtocolState = ProtocolState.OFFLINE; while (Now.Subtract(dttStartWait).TotalSeconds < 3) { Application.DoEvents(); Thread.Sleep(100); } tmrStartCODEC.Interval = 5000; tmrStartCODEC.Start(); } InititializeSpectrum(Color.Black); MCB.DisplayWaterfall = blnWaterfallSave; MCB.DisplaySpectrum = blnSpectrumSave; } return functionReturnValue; }