unsafe private void OnProcessSampleEvent(int mediaTypeID, uint dwSampleFlags, long llSampleTime, long llSampleDuration, uint dwSampleSize, ref byte[] sampleBuffer) { try { if (mediaTypeID == 0) { if (_vpxEncoder == null) { logger.Warn("Video sample cannot be processed as the VPX encoder has not yet received the frame size."); } else { byte[] vpxEncodedBuffer = null; unsafe { fixed(byte *p = sampleBuffer) { int encodeResult = _vpxEncoder.Encode(p, (int)dwSampleSize, 1, ref vpxEncodedBuffer); if (encodeResult != 0) { logger.Warn("VPX encode of video sample failed."); } } } OnMediaSampleReady?.Invoke(MediaSampleTypeEnum.VP8, _vp8Timestamp, vpxEncodedBuffer); //Console.WriteLine($"Video SeqNum {videoSeqNum}, timestamp {videoTimestamp}, buffer length {vpxEncodedBuffer.Length}, frame count {sampleProps.FrameCount}."); _vp8Timestamp += VP8_TIMESTAMP_SPACING; } } else { uint sampleDuration = (uint)(sampleBuffer.Length / 2); byte[] mulawSample = new byte[sampleDuration]; int sampleIndex = 0; // ToDo: Find a way to wire up the Media foundation WAVE_FORMAT_MULAW codec so the encoding below is not necessary. for (int index = 0; index < sampleBuffer.Length; index += 2) { var ulawByte = MuLawEncoder.LinearToMuLawSample(BitConverter.ToInt16(sampleBuffer, index)); mulawSample[sampleIndex++] = ulawByte; } OnMediaSampleReady?.Invoke(MediaSampleTypeEnum.Mulaw, _mulawTimestamp, mulawSample); //Console.WriteLine($"Audio SeqNum {audioSeqNum}, timestamp {audioTimestamp}, buffer length {mulawSample.Length}."); _mulawTimestamp += sampleDuration; } } catch (Exception excp) { logger.Warn("Exception MfSampleGrabber_OnProcessSampleEvent. " + excp.Message); } }
// Encode G.711 private void button2_Click(object sender, EventArgs e) { try { if (this.currentAudio == null) { throw new Exception("Вы не выбрали файл для кодирования."); } if (codecToEncode.SelectedItem == null) { throw new Exception("Вы не выбрали кодэк."); } } catch (Exception ex) { MessageBox.Show(ex.Message, "Ошибка", MessageBoxButtons.OK, MessageBoxIcon.Error); return; } SaveFileDialog save = new SaveFileDialog(); save.Filter = "Wave File (*.wav)|*.wav;"; if (save.ShowDialog() != DialogResult.OK) { return; } Codecs codec = (codecToEncode.SelectedIndex == 0) ? Codecs.ALAW : Codecs.MULAW; byte[] samples = new byte[this.currentAudio.ShortSamples.Length]; for (int i = 0; i < this.currentAudio.ShortSamples.Length; i++) { if (codec == Codecs.ALAW) { samples[i] = ALawEncoder.LinearToALawSample(this.currentAudio.ShortSamples[i]); } else if (codec == Codecs.MULAW) { samples[i] = MuLawEncoder.LinearToMuLawSample(this.currentAudio.ShortSamples[i]); } } WaveFormat format = null; if (codec == Codecs.ALAW) { format = WaveFormat.CreateALawFormat(this.currentAudio.SampleRate, this.currentAudio.Stream.WaveFormat.Channels); } else if (codec == Codecs.MULAW) { format = WaveFormat.CreateMuLawFormat(this.currentAudio.SampleRate, this.currentAudio.Stream.WaveFormat.Channels); } WaveFileWriter writer = new WaveFileWriter(save.FileName, format); writer.Write(samples, 0, samples.Length); writer.Close(); DialogResult dres = MessageBox.Show("Аудиофайл успешно сохранен. Открыть файл?", "Файл сохранен", MessageBoxButtons.YesNo, MessageBoxIcon.Question); if (dres == DialogResult.Yes) { this.decodeG711(save.FileName, codec); } }
public byte[] Encode(byte[] data, int offset, int length) { byte[] encoded = new byte[length / 2]; int outIndex = 0; for (int n = 0; n < length; n += 2) { encoded[outIndex++] = MuLawEncoder.LinearToMuLawSample(BitConverter.ToInt16(data, offset + n)); } return(encoded); }
/// <summary> /// Converts the 16-bit ACM stream to 8-bit MuLaw on read. /// </summary> /// <param name="destinationBuffer">The destination buffer to output into.</param> /// <param name="offset">The offset to store at.</param> /// <param name="readingCount">The requested size to read.</param> /// <returns></returns> public int Read(byte[] destinationBuffer, int offset, int readingCount) { // Source buffer has twice as many items as the output array. var sizeOfPcmBuffer = readingCount * 2; sourceBuffer = BufferHelpers.Ensure(sourceBuffer, sizeOfPcmBuffer); var sourceBytesRead = ieeeToPcm.Read(sourceBuffer, 0, sizeOfPcmBuffer); var samplesRead = sourceBytesRead / 2; var outIndex = 0; for (var n = 0; n < sizeOfPcmBuffer; n += 2) { destinationBuffer[outIndex++] = MuLawEncoder.LinearToMuLawSample(BitConverter.ToInt16(sourceBuffer, offset + n)); } return(samplesRead); }
/// <summary> /// Event handler for receiving an audio sample that is ready for encoding, packaging into RTP and sending to the remote end /// of the VoIP call. /// </summary> private void RTPChannelSampleAvailable(object sender, WaveInEventArgs e) { TimeSpan samplePeriod = DateTime.Now.Subtract(_lastInputSampleReceivedAt); _lastInputSampleReceivedAt = DateTime.Now; _inputSampleCount++; _audioLogger.Debug(_inputSampleCount + " sample period " + samplePeriod.TotalMilliseconds + "ms, sample bytes " + e.BytesRecorded + "."); byte[] sample = new byte[e.Buffer.Length / 2]; int sampleIndex = 0; for (int index = 0; index < e.Buffer.Length; index += 2) { var ulawByte = MuLawEncoder.LinearToMuLawSample(BitConverter.ToInt16(e.Buffer, index)); sample[sampleIndex++] = ulawByte; } m_rtpChannel.SendRTPRaw(sample); }
/// <summary> /// Sends a sample from a signal generator generated waveform. /// </summary> private void SendSignalGeneratorSample(object state) { lock (_audioStreamTimer) { int inputBufferSize = RTP_TIMESTAMP_RATE / 1000 * AUDIO_SAMPLE_PERIOD_MILLISECONDS; int outputBufferSize = RTP_TIMESTAMP_RATE / 1000 * AUDIO_SAMPLE_PERIOD_MILLISECONDS; // Get the signal generator to generate the samples and then convert from // signed linear to PCM. float[] linear = new float[inputBufferSize]; _signalGenerator.Read(linear, 0, inputBufferSize); short[] pcm = linear.Select(x => (short)(x * 32767f)).ToArray(); byte[] encodedSample = new byte[outputBufferSize]; for (int index = 0; index < inputBufferSize; index++) { encodedSample[index] = MuLawEncoder.LinearToMuLawSample(pcm[index]); } _peerConnection.SendAudioFrame((uint)outputBufferSize, _peerConnection.GetSendingFormat(SDPMediaTypesEnum.audio).FormatCodec.GetHashCode(), encodedSample); } }
public byte[] Encode(short[] data, BandMode mode) { return(MuLawEncoder.MuLawEncode(data)); }
/// <summary> /// Starts the Media Foundation sampling. /// </summary> unsafe private void SampleMp4Media() { try { logger.LogDebug("Starting mp4 media sampling thread."); _isMp4Sampling = true; VpxEncoder vpxEncoder = null; uint vp8Timestamp = 0; uint mulawTimestamp = 0; while (!_exit) { if (OnMp4MediaSampleReady == null) { logger.LogDebug("No active clients, media sampling paused."); break; } else { byte[] sampleBuffer = null; var sample = _mediaSource.GetSample(ref sampleBuffer); if (sample != null && sample.HasVideoSample) { if (vpxEncoder == null || (vpxEncoder.GetWidth() != sample.Width || vpxEncoder.GetHeight() != sample.Height || vpxEncoder.GetStride() != sample.Stride)) { if (vpxEncoder != null) { vpxEncoder.Dispose(); } vpxEncoder = InitialiseVpxEncoder((uint)sample.Width, (uint)sample.Height, (uint)sample.Stride); } byte[] vpxEncodedBuffer = null; unsafe { fixed(byte *p = sampleBuffer) { int encodeResult = vpxEncoder.Encode(p, sampleBuffer.Length, 1, ref vpxEncodedBuffer); if (encodeResult != 0) { logger.LogWarning("VPX encode of video sample failed."); } } } OnMp4MediaSampleReady?.Invoke(SDPMediaTypesEnum.video, vp8Timestamp, vpxEncodedBuffer); //Console.WriteLine($"Video SeqNum {videoSeqNum}, timestamp {videoTimestamp}, buffer length {vpxEncodedBuffer.Length}, frame count {sampleProps.FrameCount}."); vp8Timestamp += VP8_TIMESTAMP_SPACING; } else if (sample != null && sample.HasAudioSample) { uint sampleDuration = (uint)(sampleBuffer.Length / 2); byte[] mulawSample = new byte[sampleDuration]; int sampleIndex = 0; for (int index = 0; index < sampleBuffer.Length; index += 2) { var ulawByte = MuLawEncoder.LinearToMuLawSample(BitConverter.ToInt16(sampleBuffer, index)); mulawSample[sampleIndex++] = ulawByte; } OnMp4MediaSampleReady?.Invoke(SDPMediaTypesEnum.audio, mulawTimestamp, mulawSample); //Console.WriteLine($"Audio SeqNum {audioSeqNum}, timestamp {audioTimestamp}, buffer length {mulawSample.Length}."); mulawTimestamp += sampleDuration; } } } vpxEncoder.Dispose(); } catch (Exception excp) { logger.LogWarning("Exception SampleMp4Media. " + excp.Message); } finally { logger.LogDebug("mp4 sampling thread stopped."); _isMp4Sampling = false; } }
// 음성정보를 전달한다. private void SendVoiceInfo() { try { captureBuffer = new CaptureBuffer(captureBufferDescription, capture); CreateNotifyPositions(); int halfBuffer = bufferSize / 2; captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); bStop = false; VoiceInfo voiceInfo = new VoiceInfo(); while (!bStop) { autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; if (vocoder == Vocoder.ALaw) { byte[] dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); voiceInfo.UserId = Main_Pan._UserInfo.Id; voiceInfo.Data = dataToWrite; Main_Pan._ClientEngine.Send(NotifyType.Request_VoiceChat, voiceInfo); } else if (vocoder == Vocoder.uLaw) { byte[] dataToWrite = MuLawEncoder.MuLawEncode(memStream.GetBuffer()); voiceInfo.UserId = Main_Pan._UserInfo.Id; voiceInfo.Data = dataToWrite; Main_Pan._ClientEngine.Send(NotifyType.Request_VoiceChat, voiceInfo); } else { byte[] dataToWrite = memStream.GetBuffer(); voiceInfo.UserId = Main_Pan._UserInfo.Id; voiceInfo.Data = dataToWrite; Main_Pan._ClientEngine.Send(NotifyType.Request_VoiceChat, voiceInfo); } } } catch (Exception ex) { } finally { captureBuffer.Stop(); nUdpClientFlag = 0; } }
/* * Send synchronously sends data captured from microphone across the network on port 1550. */ private void Send() { try { //The following lines get audio from microphone and then send them //across network. captureBuffer = new CaptureBuffer(captureBufferDescription, capture); CreateNotifyPositions(); int halfBuffer = bufferSize / 2; captureBuffer.Start(true); bool readFirstBufferPart = true; int offset = 0; MemoryStream memStream = new MemoryStream(halfBuffer); bStop = false; while (!bStop) { autoResetEvent.WaitOne(); memStream.Seek(0, SeekOrigin.Begin); captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); readFirstBufferPart = !readFirstBufferPart; offset = readFirstBufferPart ? 0 : halfBuffer; //TODO: Fix this ugly way of initializing differently. //Choose the vocoder. And then send the data to other party at port 1550. if (vocoder == Vocoder.ALaw) { byte[] dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString(), 1550); } else if (vocoder == Vocoder.uLaw) { byte[] dataToWrite = MuLawEncoder.MuLawEncode(memStream.GetBuffer()); udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString(), 1550); } else { byte[] dataToWrite = memStream.GetBuffer(); udpClient.Send(dataToWrite, dataToWrite.Length, otherPartyIP.Address.ToString(), 1550); } } } catch (Exception ex) { MessageBox.Show(ex.Message, "VoiceChat-Send ()", MessageBoxButtons.OK, MessageBoxIcon.Error); } finally { captureBuffer.Stop(); //Increment flag by one. nUdpClientFlag += 1; //When flag is two then it means we have got out of loops in Send and Receive. while (nUdpClientFlag != 2) { } //Clear the flag. nUdpClientFlag = 0; //Close the socket. udpClient.Close(); } }
byte EncodeMuLaw( short _sample ) { return MuLawEncoder.LinearToMuLawSample(_sample); }
/* * Send synchronously sends data captured from microphone across the network on port 1550. */ public void Send() { //TODO: Send Microphone Data //Esperamos hasta que recivamos el start del server try { //The following lines get audio from microphone and then send them //across network. int users_count = 0; //captureBuffer = new CaptureBuffer(captureBufferDescription, capture); //CreateNotifyPositions(); //int halfBuffer = bufferSize / 2; //captureBuffer.Start(true); //bool readFirstBufferPart = true; //int offset = 0; //MemoryStream memStream = new MemoryStream(halfBuffer); //bStop = false; cGlobalVars.AddLogChat("Sending Started"); lock (_music_data) { _music_data.Clear(); } while (!bStop) { lock (clientIPs) { if (eMode == Mode.Server) { users_count = clientIPs.Count; } else if (eMode == Mode.Client) { users_count = 1; } if (users_count > 0) { //autoResetEvent.WaitOne(); //memStream.Seek(0, SeekOrigin.Begin); ////captureBuffer.Read(offset, memStream, halfBuffer, LockFlag.None); //readFirstBufferPart = !readFirstBufferPart; //offset = readFirstBufferPart ? 0 : halfBuffer; ////TODO: Fix this ugly way of initializing differently. ////Choose the vocoder. And then send the data to other party at port 1550. ////if (vocoder == Vocoder.ALaw) ////{ ////byte[] dataToWrite = MuLawEncoder.MuLawEncode(memStream.GetBuffer()); //MULAW ////byte[] dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); //ALAW (RECOMENdADO) ////byte[] dataToWrite = memStream.GetBuffer(); //NORMAL //byte[] dataToWrite; //if (vocoder == Vocoder.ALaw) // dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); //ALAW (RECOMENdADO) //else if (vocoder == Vocoder.uLaw) // dataToWrite = MuLawEncoder.MuLawEncode(memStream.GetBuffer()); //MULAW //else // dataToWrite = memStream.GetBuffer(); //if (bStop) // return; byte[] elemento = DispatchData(); if (elemento != null) { byte[] dataToWrite; if (vocoder == Vocoder.uLaw) { dataToWrite = MuLawEncoder.MuLawEncode(elemento); } else if (vocoder == Vocoder.ALaw) { dataToWrite = ALawEncoder.ALawEncode(elemento); } else { dataToWrite = elemento; } //byte[] dataToWrite = MuLawEncoder.MuLawEncode(memStream.GetBuffer()); //MULAW //byte[] dataToWrite = ALawEncoder.ALawEncode(memStream.GetBuffer()); //ALAW (RECOMENdADO) //NORMAL if (eMode == Mode.Client) { cGlobalVars.AddLogChat("3Sending Data!"); udpClient.Send(elemento, elemento.Length, serverIP.Address.ToString(), 1550); } else if (eMode == Mode.Server) { for (int i = 0; i < users_count; i++) { udpClient.Send(elemento, elemento.Length, clientIPs[i].Address.ToString(), 1550); } } // } } } } } cGlobalVars.AddLogChat("Sending Ended"); } catch (Exception ex) { // MessageBox.Show(ex.Message, "VoiceChat-Send ()", MessageBoxButtons.OK, MessageBoxIcon.Error); cGlobalVars.AddLogChat("VoiceChat-Send >> " + ex.Message); } finally { //captureBuffer.Stop(); //Increment flag by one. nUdpClientFlag += 1; //When flag is two then it means we have got out of loops in Send and Receive. while (nUdpClientFlag != 2) { } //Clear the flag. nUdpClientFlag = 0; //Close the socket. udpClient.Close(); } }
public void OnDataAvailable(object sender, WaveInEventArgs e) { _waitingForData = false; var outPos = 0; var floatIndex = 0; try { CheckBuffers(e.BytesRecorded); // Note: Data ratio of stereo 32-bit 48KHz to mono 16-bit 8KHz is 24:1 //var bytesReceived = e.BytesRecorded; //var outputSize = bytesReceived / 24; //var outArray = new byte[(int)Math.Ceiling(outputSize / 2d) * 2]; // Note: Data ratio of stereo 32-bit 48KHz to mono 8-bit 8KHz is 48:1 var bytesReceived = e.BytesRecorded; var outputSize = bytesReceived / 48; var outArray = new byte[(int)Math.Ceiling(outputSize / 2d) * 2]; // 48KHz // 32Kbps //var i = 0; //var j = 0; //var k = 0; //var o = new float[e.BytesRecorded / 8]; //while (i < e.BytesRecorded) //{ // var left = BitConverter.ToSingle(e.Buffer, i); // var right = BitConverter.ToSingle(e.Buffer, i + 4); // var mono = (left + right) * 0.5F; // o[j] = mono; // i = i + 8; // j = j + 1; //} //var result = Downsample(o); //for (k = 0; k < result.Length; k++) //{ // outArray[k] = MuLawEncoder.LinearToMuLawSample((short) (result[k] * MaxValue)); //} // #1 Resample to 8KHz var waveBuffer = Downsample(e); while (floatIndex < waveBuffer.FloatBufferCount) { // #2 Convert to Mono var leftSample = waveBuffer.FloatBuffer[floatIndex++]; var rightSample = waveBuffer.FloatBuffer[floatIndex++]; var monoSample = ConvertToMono(leftSample, rightSample); // #3 Convert to short and then mu-law outArray[outPos++] = MuLawEncoder.LinearToMuLawSample((short)(monoSample * MaxValue)); } if (DataAvailable != null) { foreach (var delDelegate in DataAvailable.GetInvocationList()) { delDelegate.DynamicInvoke(this, new DataEventArgs(outArray, outPos)); //24)); } } } catch (Exception ex) { Console.WriteLine(ex.ToString()); Console.WriteLine($"{nameof(outPos)}: {outPos}, {nameof(floatIndex)}: {floatIndex}"); throw; } _waitingForData = true; }
/// <summary> /// Starts the Media Foundation sampling. /// </summary> unsafe private static void StartMedia() { try { logger.LogDebug("Starting media sampling thread."); _isSampling = true; while (true) { if (OnMediaSampleReady == null) { logger.LogDebug("No active clients, media sampling paused."); break; } else { byte[] sampleBuffer = null; var sample = _mediaSource.GetSample(ref sampleBuffer); if (sample != null && sample.HasVideoSample) { if (_vpxEncoder == null || (_vpxEncoder.GetWidth() != sample.Width || _vpxEncoder.GetHeight() != sample.Height || _vpxEncoder.GetStride() != sample.Stride)) { OnVideoResolutionChanged((uint)sample.Width, (uint)sample.Height, (uint)sample.Stride); } byte[] vpxEncodedBuffer = null; unsafe { fixed(byte *p = sampleBuffer) { int encodeResult = _vpxEncoder.Encode(p, sampleBuffer.Length, 1, ref vpxEncodedBuffer); if (encodeResult != 0) { logger.LogWarning("VPX encode of video sample failed."); } } } OnMediaSampleReady?.Invoke(SDPMediaTypesEnum.video, _vp8Timestamp, vpxEncodedBuffer); //Console.WriteLine($"Video SeqNum {videoSeqNum}, timestamp {videoTimestamp}, buffer length {vpxEncodedBuffer.Length}, frame count {sampleProps.FrameCount}."); _vp8Timestamp += VP8_TIMESTAMP_SPACING; } else if (sample != null && sample.HasAudioSample) { uint sampleDuration = (uint)(sampleBuffer.Length / 2); byte[] mulawSample = new byte[sampleDuration]; int sampleIndex = 0; // ToDo: Find a way to wire up the Media foundation WAVE_FORMAT_MULAW codec so the encoding below is not necessary. for (int index = 0; index < sampleBuffer.Length; index += 2) { var ulawByte = MuLawEncoder.LinearToMuLawSample(BitConverter.ToInt16(sampleBuffer, index)); mulawSample[sampleIndex++] = ulawByte; } OnMediaSampleReady?.Invoke(SDPMediaTypesEnum.audio, _mulawTimestamp, mulawSample); //Console.WriteLine($"Audio SeqNum {audioSeqNum}, timestamp {audioTimestamp}, buffer length {mulawSample.Length}."); _mulawTimestamp += sampleDuration; } } } } catch (Exception excp) { logger.LogWarning("Exception OnProcessSampleEvent. " + excp.Message); } finally { logger.LogDebug("Media sampling thread stopped."); _isSampling = false; } }