private void SampleReceived(byte[] sample, int headerLength) { if (sample != null) { //using(MemoryStream sampleStream = new MemoryStream(sample)) //{ // using (var rawSourceStream = new RawSourceWaveStream(sampleStream, WaveFormat.CreateMuLawFormat(8000, 1))) // { // using (var pcmConversionStream = WaveFormatConversionStream.CreatePcmStream(rawSourceStream)) // { // byte[] buffer = new byte[1024]; // int bytesRead = pcmConversionStream.Read(buffer, 0, 1024); // while (bytesRead > 0) // { // m_waveFileWriter.Write(buffer, 0, bytesRead); // bytesRead = pcmConversionStream.Read(buffer, 0, 1024); // } // } // } //} for (int index = headerLength; index < sample.Length; index++) { short pcm = MuLawDecoder.MuLawToLinearSample(sample[index]); m_waveFileWriter.WriteByte((byte)(pcm & 0xFF)); m_waveFileWriter.WriteByte((byte)(pcm >> 8)); } } }
public void Transcribe(byte[] buffer, int length) { //var mulaw = WaveFormat.CreateCustomFormat(WaveFormatEncoding.MuLaw, 8000, 1, 8000, 1, 8); //var pcm = WaveFormat.CreateCustomFormat(WaveFormatEncoding.Pcm, 8000, 1, 16000, 2, 16); //using (var raw = new RawSourceWaveStream(buffer, 0, length, mulaw)) //{ // using (var stream = new WaveFormatConversionStream(pcm, raw)) // { // var output = new byte[length * 2]; // var read = stream.Read(output, 0, length * 2); // client.SendAudio(output, read); // } //} var output = new byte[length * 2]; var index = 0; for (var b = 0; b < length; b++) { var sample = MuLawDecoder.MuLawToLinearSample(buffer[b]); output[index] = (byte)(sample & 0xFF); output[index + 1] = (byte)(sample >> 8); } client.SendAudio(output, output.Length); }
public void WritePacket(RawCapture raw, PacketType type) { captureFileWriter.Write(raw); //if (type == PacketType.SIPDialog) //{ // var packet = PacketDotNet.Packet.ParsePacket(raw.LinkLayerType, raw.Data); // var udpPacket = PacketDotNet.UdpPacket.GetEncapsulated(packet); // SIPMessages.Add(udpPacket); //} if (type == PacketType.RTP) { var packet = PacketDotNet.Packet.ParsePacket(raw.LinkLayerType, raw.Data); var udpPacket = PacketDotNet.UdpPacket.GetEncapsulated(packet); // Only write out RTP packets to wav, and not RTPC if (udpPacket.SourcePort != this.CalleeRTCPPort || udpPacket.SourcePort != this.CallerRTCPPort) { if (wavOutput == null) { wavOutput = new WaveFileWriter("Calls\\" + CallID + ".wav", new WaveFormat(8000, 16, 1)); } for (int index = 0; index < udpPacket.PayloadData.Length; index++) { // assuming this is MuLaw, need to handle other formats like g729, g726 etc short pcm = MuLawDecoder.MuLawToLinearSample(udpPacket.PayloadData[index]); wavOutput.WriteByte((byte)(pcm & 0xFF)); wavOutput.WriteByte((byte)(pcm >> 8)); } } } }
private void decodeG711(string filename, Codecs codec) { WaveFileReader reader = new WaveFileReader(filename); byte[] buffer = new byte[reader.Length]; short[] samples = new short[buffer.Length]; reader.Read(buffer, 0, buffer.Length); for (int i = 0; i < buffer.Length; i++) { if (codec == Codecs.ALAW) { samples[i] = ALawDecoder.ALawToLinearSample(buffer[i]); } else if (codec == Codecs.MULAW) { samples[i] = MuLawDecoder.MuLawToLinearSample(buffer[i]); } } WaveFileWriter writer = new WaveFileWriter("tmp_" + (++this.tmpCount) + ".wav", new WaveFormat(44100, 16, 2)); writer.WriteSamples(samples, 0, samples.Length); writer.Close(); WaveFileReader tmpReader = new WaveFileReader("tmp_" + this.tmpCount + ".wav"); WaveStream pcm = new WaveChannel32(tmpReader); BlockAlignReductionStream stream = new BlockAlignReductionStream(pcm); AudioFile file = new WaveFile(tmpReader, stream, filename); this.files.Add(file); this.addFileToListView(file); this.initAudio(file); }
private async Task ProcessAudioForTranscriptionAsync(string socketId, string payload) { byte[] payloadByteArray = Convert.FromBase64String(payload); byte[] decoded; MuLawDecoder.MuLawDecode(payloadByteArray, out decoded); var transcriptionEngine = GetSocketTranscriptionEngine(socketId); await transcriptionEngine.Transcribe(decoded); }
/// <summary> /// Event handler for receiving an RTP packet containing and audio payload from the remote end of the VoIP call. /// </summary> /// <param name="sample">The audio sample.</param> /// <param name="offset">The offset in the sample that the audio starts.</param> public void AudioSampleReceived(byte[] sample, int offset) { if (sample != null) { for (int index = offset; index < sample.Length; index++) { short pcm = MuLawDecoder.MuLawToLinearSample(sample[index]); byte[] pcmSample = new byte[] { (byte)(pcm & 0xFF), (byte)(pcm >> 8) }; m_waveProvider.AddSamples(pcmSample, 0, 2); } } }
public byte[] Decode(byte[] data, int offset, int length) { byte[] decoded = new byte[length * 2]; int outIndex = 0; for (int n = 0; n < length; n++) { short decodedSample = MuLawDecoder.MuLawToLinearSample(data[n + offset]); decoded[outIndex++] = (byte)(decodedSample & 0xFF); decoded[outIndex++] = (byte)(decodedSample >> 8); } return(decoded); }
/* * Receive audio data coming on port 1550 and feed it to the speakers to be played. */ private void Receive() { try { bStop = false; IPEndPoint remoteEP = new IPEndPoint(IPAddress.Any, 0); while (!bStop) { //Receive data. byte[] byteData = udpClient.Receive(ref remoteEP); //G711 compresses the data by 50%, so we allocate a buffer of double //the size to store the decompressed data. byte[] byteDecodedData = new byte[byteData.Length * 2]; //Decompress data using the proper vocoder. if (vocoder == Vocoder.ALaw) { ALawDecoder.ALawDecode(byteData, out byteDecodedData); } else if (vocoder == Vocoder.uLaw) { MuLawDecoder.MuLawDecode(byteData, out byteDecodedData); } else { byteDecodedData = new byte[byteData.Length]; byteDecodedData = byteData; } //Play the data received to the user. playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device); playbackBuffer.Write(0, byteDecodedData, LockFlag.None); playbackBuffer.Play(0, BufferPlayFlags.Default); } } catch (Exception ex) { MessageBox.Show(ex.Message, "VoiceChat-Receive ()", MessageBoxButtons.OK, MessageBoxIcon.Error); } finally { nUdpClientFlag += 1; } }
/// <summary> /// Event handler for receiving an RTP frmae from the remote end of the VoIP call. /// </summary> /// <param name="rtpFrame">The RTP frame received.</param> private void RTPChannelSampleReceived(RTPFrame rtpFrame) { if (rtpFrame != null) { var framePayload = rtpFrame.GetFramePayload(); if (framePayload != null) { for (int index = 0; index < framePayload.Length; index++) { short pcm = MuLawDecoder.MuLawToLinearSample(framePayload[index]); byte[] pcmSample = new byte[] { (byte)(pcm & 0xFF), (byte)(pcm >> 8) }; m_waveProvider.AddSamples(pcmSample, 0, 2); } } } }
// 서버로부터 음성정보를 전달받는다. private void ReceiveVoiceInfo(byte[] byteData) { try { //Initialize(); bStop = false; //G711 compresses the data by 50%, so we allocate a buffer of double //the size to store the decompressed data. byte[] byteDecodedData = new byte[byteData.Length * 2]; //Decompress data using the proper vocoder. if (vocoder == Vocoder.ALaw) { ALawDecoder.ALawDecode(byteData, out byteDecodedData); } else if (vocoder == Vocoder.uLaw) { MuLawDecoder.MuLawDecode(byteData, out byteDecodedData); } else { byteDecodedData = new byte[byteData.Length]; byteDecodedData = byteData; } //Play the data received to the user. playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device); playbackBuffer.Write(0, byteDecodedData, LockFlag.None); playbackBuffer.Play(0, BufferPlayFlags.Default); } catch (Exception ex) { } finally { nUdpClientFlag += 1; } }
/// <summary> /// The purpose of this method is to decode the RTP payload into raw audio. /// </summary> private void DecodePayload() { // u-law decoding if (this.payloadType == 0) { decodedPayload = MuLawDecoder.MuLawDecode(payload); } // a-law decoding else if (this.payloadType == 1) { decodedPayload = ALawDecoder.ALawDecode(payload); } else if (this.payloadType == 101) { decodedPayload = null; } else { throw new InvalidRTPPacketException(); // unknown decoding --- more may need to be added in the future. } }
//FileStream fileStream = new FileStream("d:\\test.wav", FileMode.Create); //FileStream fileStream1 = new FileStream("d:\\test1.wav", FileMode.Create); private void RtpReceiver_RtpPacketReceived(RtpPacket packet) { if (closing) { return; } //byte[] rtpPayload = packet.Payload.ToArray(); var frame = session.Depacketize(packet); //var data = session.Depacketize(packet); if (frame != null) { //fileStream.Write(data, 0, data.Length); var data = frame.Data; var time = frame.Time; if (data != null) { byte[] decoded = new byte[2 * data.Length]; int j = 0; for (int i = 0; i < data.Length; i++) { short sample = MuLawDecoder.MuLawToLinearSample(data[i]); decoded[j++] = (byte)(sample & 0xFF); decoded[j++] = (byte)(sample >> 8); } //decoder.Decode(data, out byte[] decoded); //fileStream1.Write(decoded, 0, decoded.Length); if (decoded != null && decoded.Length > 0) { waveBuffer.AddSamples(decoded, 0, decoded.Length); } } } }
public short[] Decode(byte[] data, BandMode mode) { return(MuLawDecoder.MuLawDecode(data)); }
/* * Receive audio data coming on port 1550 and feed it to the speakers to be played. */ private void Receive() { try { IsThreadReceiveEnd = false; byte[] byteData; bStop = false; IPEndPoint remoteEP = new IPEndPoint(IPAddress.Any, 0); if (eMode == Mode.Server) { LogAppend("Server Started"); LogUsersConnected(); } else { LogAppend("Client Audio Connected"); } while (!bStop) { //Receive data. try { //bytes_received = udp_socket.ReceiveFrom(data, ref ep); try { byteData = udpClient.Receive(ref remoteEP); } catch (Exception) { return; } //G711 compresses the data by 50%, so we allocate a buffer of double //the size to store the decompressed data. byte[] byteDecodedData = new byte[byteData.Length * 2]; if (vocoder == Vocoder.ALaw) { ALawDecoder.ALawDecode(byteData, out byteDecodedData); //Vocoder.ALaw } else if (vocoder == Vocoder.uLaw) { MuLawDecoder.MuLawDecode(byteData, out byteDecodedData); //Vocoder.uLaw } else { byteDecodedData = new byte[byteData.Length]; byteDecodedData = byteData; } if (eMode == Mode.Server) { lock (otherPartyIPs) { for (int i = 0; i < otherPartyIPs.Count; i++) { udpClient.Send(byteDecodedData, byteDecodedData.Length, otherPartyIPs[i].Address.ToString(), 1550); } } } //Play the data received to the user. playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device); playbackBuffer.Write(0, byteDecodedData, LockFlag.None); playbackBuffer.Play(0, BufferPlayFlags.Default); } catch (Exception) { } } if (eMode == Mode.Server) { LogAppend("Server Stopped"); LogUsersConnected(); } else { LogAppend("Client Audio Disconnected"); } } catch (Exception ex) { LogAppend("Voice Receive > " + ex.Message); //MessageBox.Show(ex.Message, "VoiceChat-Receive ()", MessageBoxButtons.OK, MessageBoxIcon.Error); } finally { nUdpClientFlag += 1; } IsThreadReceiveEnd = true; }
/* * Receive audio data coming on port 1550 and feed it to the speakers to be played. */ public void Receive() { //TODO: Receive Sound DATA try { byte[] byteData; bStop = false; IPEndPoint remoteEP = new IPEndPoint(IPAddress.Any, 0); if (eMode == Mode.Server) { cGlobalVars.AddLogChat("Server Started"); LogUsersConnected(); } else { cGlobalVars.AddLogChat("Client Audio Connected"); } while (!bStop) { //Receive data. try { //bytes_received = udp_socket.ReceiveFrom(data, ref ep); try { byteData = udpClient.Receive(ref remoteEP); } catch (Exception) { return; } //G711 compresses the data by 50%, so we allocate a buffer of double //the size to store the decompressed data. byte[] byteDecodedData = new byte[byteData.Length * 2]; if (vocoder == Vocoder.ALaw) { ALawDecoder.ALawDecode(byteData, out byteDecodedData); //Vocoder.ALaw } else if (vocoder == Vocoder.uLaw) { MuLawDecoder.MuLawDecode(byteData, out byteDecodedData); //Vocoder.uLaw } else { byteDecodedData = new byte[byteData.Length]; byteDecodedData = byteData; } if (eMode == Mode.Server) { lock (clientIPs) { // udpClient.Send(byteData, byteData.Length, clientIPs[i].Address.ToString(), 1550); //Parallel.For(0, clientIPs.Count, delegate(int i) // { // // } // ); } } //LogAppend("2Receibed Data!"); bwp_internet.AddSamples(byteData, 0, byteData.Length); } catch (Exception) { } } if (eMode == Mode.Server) { cGlobalVars.AddLogChat("Server Stopped"); LogUsersConnected(); } else { cGlobalVars.AddLogChat("Client Audio Disconnected"); } } catch (Exception ex) { cGlobalVars.AddLogChat("Voice Receive > " + ex.Message); //MessageBox.Show(ex.Message, "VoiceChat-Receive ()", MessageBoxButtons.OK, MessageBoxIcon.Error); } finally { nUdpClientFlag += 1; } }
/* * Receive audio data coming on port 1550 and feed it to the speakers to be played. */ public void Receive() { //TODO: Receive Sound DATA try { byte[] byteData; bStop = false; IPEndPoint remoteEP = new IPEndPoint(IPAddress.Any, 0); cGlobalVars.AddLogChat("Client Audio Connected"); while (!bStop) { //Receive data. try { //bytes_received = udp_socket.ReceiveFrom(data, ref ep); try { byteData = udpClient.Receive(ref remoteEP); } catch (Exception) { return; } //G711 compresses the data by 50%, so we allocate a buffer of double //the size to store the decompressed data. byte[] byteDecodedData = new byte[byteData.Length * 2]; if (vocoder == VoiceCommon.Vocoder.ALaw) { ALawDecoder.ALawDecode(byteData, out byteDecodedData); //Vocoder.ALaw } else if (vocoder == VoiceCommon.Vocoder.uLaw) { MuLawDecoder.MuLawDecode(byteData, out byteDecodedData); //Vocoder.uLaw } else { byteDecodedData = new byte[byteData.Length]; byteDecodedData = byteData; } bwp_internet.AddSamples(byteData, 0, byteData.Length); } catch (Exception) { } } cGlobalVars.AddLogChat("Client Audio Disconnected"); } catch (Exception ex) { cGlobalVars.AddLogChat("Voice Receive > " + ex.Message); //MessageBox.Show(ex.Message, "VoiceChat-Receive ()", MessageBoxButtons.OK, MessageBoxIcon.Error); } finally { nUdpClientFlag += 1; } }