public async Task <string> ProcessVoiceToText(Stream stream, int bitRate) { var fn = $"a-{Guid.NewGuid()}-{bitRate}.wav"; stream.Seek(0, SeekOrigin.Begin); var wavStream = new RawSourceWaveStream(stream, new WaveFormat(bitRate, 2)); // Debugging only // WaveFileWriter.CreateWaveFile($"{fn}-source.wav", wavStream); stream.Seek(0, SeekOrigin.Begin); var newFormat = new WaveFormat(InputRate, 1); WaveFormatConversionStream cs = new WaveFormatConversionStream(newFormat, wavStream); // Debugging only // WaveFileWriter.CreateWaveFile(fn, cs); cs.Seek(0, SeekOrigin.Begin); speechRecognizer.StartRecognition(cs); var result = speechRecognizer.GetResult(); speechRecognizer.StopRecognition(); cs.Close(); return(result?.GetHypothesis()); }
private byte[] RealMix(ReceivedRtp item1, ReceivedRtp item2) { if (item1 == null || item2 == null) { return(null); } if (item1.size == 0 || item2.size == 0) { return(null); } byte[] wavSrc1 = new byte[item1.size - headersize]; byte[] wavSrc2 = new byte[item2.size - headersize]; Array.Copy(item1.buff, headersize, wavSrc1, 0, (item1.size - headersize)); Array.Copy(item2.buff, headersize, wavSrc2, 0, (item2.size - headersize)); WaveMixerStream32 mixer = new WaveMixerStream32(); // mixer.AutoStop = true; MemoryStream memstrem = new MemoryStream(wavSrc1); RawSourceWaveStream rawsrcstream = new RawSourceWaveStream(memstrem, this.codec); WaveFormatConversionStream conversionstream = new WaveFormatConversionStream(pcmFormat16, rawsrcstream); WaveChannel32 channelstream = new WaveChannel32(conversionstream); mixer.AddInputStream(channelstream); memstrem = new MemoryStream(wavSrc2); rawsrcstream = new RawSourceWaveStream(memstrem, this.codec); conversionstream = new WaveFormatConversionStream(pcmFormat16, rawsrcstream); channelstream = new WaveChannel32(conversionstream); mixer.AddInputStream(channelstream); mixer.Position = 0; Wave32To16Stream to16 = new Wave32To16Stream(mixer); var convStm = new WaveFormatConversionStream(pcmFormat8, to16); byte[] mixedbytes = new byte[(int)convStm.Length]; int chk = convStm.Read(mixedbytes, 0, (int)convStm.Length); //Buffer.BlockCopy(tobyte, 0, writingBuffer, 0, tobyte.Length); memstrem.Close(); rawsrcstream.Close(); conversionstream.Close(); channelstream.Close(); convStm.Close(); convStm.Dispose(); convStm = null; to16.Close(); to16.Dispose(); to16 = null; mixer.Close(); mixer.Dispose(); mixer = null; return(mixedbytes); }
private void AudioSourceDataAvailable(object sender, DataAvailableEventArgs e) { try { lock (_obj) { if (_bTalking && _avstream != null) { byte[] bSrc = e.RawData; int totBytes = bSrc.Length; if (!_audioSource.RecordingFormat.Equals(_waveFormat)) { using (var ws = new TalkHelperStream(bSrc, totBytes, _audioSource.RecordingFormat)) { int j = -1; var bDst = new byte[44100]; totBytes = 0; using (var helpStm = new WaveFormatConversionStream(_waveFormat, ws)) { while (j != 0) { j = helpStm.Read(bDst, totBytes, 10000); totBytes += j; } helpStm.Close(); } ws.Close(); bSrc = bDst; } } var enc = new byte[totBytes / 2]; ALawEncoder.ALawEncode(bSrc, totBytes, enc); try { _avstream.Write(enc, 0, enc.Length); _avstream.Flush(); } catch (SocketException) { StopTalk(); } } } } catch (Exception ex) { MainForm.LogExceptionToFile(ex); StopTalk(); } }
public WavRepairer(string path) { if (!UFUtils.IsAssetPath(path)) { path = UFUtils.GetAbsoluteUnityPath(path); } if (!path.Contains(".wav")) { Debug.LogError("Cannot apply wav repair to non wav files: " + path); return; } string encodingInfo = ""; string backUpPath = path.Replace(".wav", "_backup.wav"); string tempPath = path.Replace(".wav", "_temp.wav"); File.Copy(path, backUpPath, true); try { WaveFileReader reader = new WaveFileReader(path); WaveFormat inputFormat = reader.WaveFormat; encodingInfo += inputFormat.Encoding; int sampleRate = inputFormat.SampleRate; int bits = 16; int channels = inputFormat.Channels; WaveFormat outputFormat = new WaveFormat(sampleRate, bits, channels); encodingInfo += " -> " + outputFormat; WaveStream convertedStream = new WaveFormatConversionStream(outputFormat, reader); WaveFileWriter.CreateWaveFile(tempPath, convertedStream); convertedStream.Close(); File.Copy(tempPath, path, true); } catch (Exception e) { Debug.LogError("Could not repair audio file " + Path.GetFileNameWithoutExtension(path) + " automatically, please repair it manually using an audio editor.\n" + "Additional encoding info: " + encodingInfo + ", and source exception:\n" + e); File.Copy(backUpPath, path, true); } File.Delete(tempPath); File.Delete(backUpPath); AssetDatabase.Refresh(); }
private static WaveFormat mp3ToWav(string pathToMp3, Stream outputStream, int sampleRate, int bitDepth, int numChannels) { using (var reader = new Mp3FileReader(pathToMp3)) { var targetFormat = new NAudio.Wave.WaveFormat(sampleRate, bitDepth, numChannels); var pcmStream = new WaveFormatConversionStream(targetFormat, reader); var buffer = new byte[pcmStream.Length]; pcmStream.Read(buffer, 0, (int)pcmStream.Length); outputStream.Write(buffer, 0, buffer.Length); outputStream.Position = 0; pcmStream.Close(); return(targetFormat); } }
static void RecordAndPlayAudio() { Console.WriteLine("Program micro presser une touche pour commancer l'enregistrement "); Console.ReadKey(); Console.WriteLine("presser une touche pour arreter l'enregistrement"); mciSendString("open new Type waveaudio Alias recsound", "", 0, 0); mciSendString("record recsound", "", 0, 0); Console.ReadKey(); mciSendString("save recsound recordTemp.wav", "", 0, 0); //Fichier sauvegardé dans bin/debug mciSendString("close recsound ", "", 0, 0); Console.WriteLine("Sauvergardee "); Console.ReadKey(); string FileName = "recordTemp.wav"; string CommandString = "open " + "\"" + FileName + "\"" + " type waveaudio alias recsound"; mciSendString(CommandString, null, 0, 0); CommandString = "play recsound"; mciSendString(CommandString, null, 0, 0); WaveFileReader reader = new NAudio.Wave.WaveFileReader("recordTemp.wav"); WaveFormat newFormat = new WaveFormat(16000, 16, 1); WaveFormatConversionStream str = new WaveFormatConversionStream(newFormat, reader); try { WaveFileWriter.CreateWaveFile("record.wav", str); Console.WriteLine("Audio converted to 16Khz"); } catch (Exception ex) { Console.WriteLine(ex); } finally { str.Close(); } Console.ReadKey(); }
public string ConvertSampleRate(string sourceFile, string destinationDirectory, int destChannels, int destSanplingRate, int destBitDepth) { if (!File.Exists(sourceFile)) { throw new FileNotFoundException("Invalid source file path"); } if (!Directory.Exists(destinationDirectory)) { throw new FileNotFoundException("Invalid destination directory"); } string destinationFilePath = null; WaveStream sourceStream = null; WaveFormatConversionStream conversionStream = null; try { WaveFormat destFormat = new WaveFormat((int)destSanplingRate, destBitDepth, destChannels); sourceStream = new WaveFileReader(sourceFile); conversionStream = new WaveFormatConversionStream(destFormat, sourceStream); destinationFilePath = GenerateOutputFileFullname(sourceFile, destinationDirectory, destChannels, destSanplingRate, destBitDepth); WaveFileWriter.CreateWaveFile(destinationFilePath, conversionStream); } finally { if (conversionStream != null) { conversionStream.Close(); } if (sourceStream != null) { sourceStream.Close(); } } return(destinationFilePath); }
private void AudioSourceDataAvailable(object sender, DataAvailableEventArgs e) { try { lock (_obj) { if (_bTalking && _avstream != null) { byte[] bSrc = e.RawData; int totBytes = bSrc.Length; if (!_audioSource.RecordingFormat.Equals(_waveFormat)) { var ws = new TalkHelperStream(bSrc, totBytes, _audioSource.RecordingFormat); var helpStm = new WaveFormatConversionStream(_waveFormat, ws); totBytes = helpStm.Read(bSrc, 0, 25000); ws.Close(); ws.Dispose(); helpStm.Close(); helpStm.Dispose(); } var enc = new byte[totBytes / 2]; ALawEncoder.ALawEncode(bSrc, totBytes, enc); try { _avstream.Write(enc, 0, enc.Length); _avstream.Flush(); } catch (SocketException) { StopTalk(); } } } } catch (Exception ex) { Log.Error("", ex);//MainForm.LogExceptionToFile(ex); StopTalk(); } }
/// <summary> /// Converts a WMA file to a WAV stream /// </summary> /// <param name="outputStream">Stream to store the converted wav.</param> /// <param name="filePath">Path to a .wma file to convert</param> /// <returns>The WaveFormat object of the converted wav</returns> private static WaveFormat wmaToWav(string pathToWma, Stream outputStream, int sampleRate, int bitDepth, int numChannels) { if (!Path.GetExtension(pathToWma).ToLowerInvariant().Contains("wma")) { throw new ArgumentException("Must be a .wma file!"); } using (var reader = new WMAFileReader(pathToWma)) { var targetFormat = new NAudio.Wave.WaveFormat(sampleRate, bitDepth, numChannels); var pcmStream = new WaveFormatConversionStream(targetFormat, reader); var buffer = new byte[pcmStream.Length]; pcmStream.Read(buffer, 0, (int)pcmStream.Length); outputStream.Write(buffer, 0, buffer.Length); outputStream.Position = 0; pcmStream.Close(); return(targetFormat); } }
private void clearToolStripMenuItem_Click(object sender, EventArgs e) { if (Wave != null) { Wave.Close(); Wave.Dispose(); Wave = null; } if (Recorder != null) { Recorder.StopRecording(); Recorder = null; } SetupChart(); processToolStripMenuItem.Enabled = false; startToolStripMenuItem.Enabled = true; stopToolStripMenuItem.Enabled = false; openToolStripMenuItem.Enabled = true; clearToolStripMenuItem.Enabled = false; saveToolStripMenuItem.Enabled = false; }
private void AudioSourceDataAvailable(object sender, DataAvailableEventArgs e) { try { lock (_obj) { if (_bTalking && _avstream != null) { byte[] bSrc = e.RawData; int totBytes = bSrc.Length; if (!_audioSource.RecordingFormat.Equals(_waveFormat)) { var ws = new TalkHelperStream(bSrc, totBytes, _audioSource.RecordingFormat); var helpStm = new WaveFormatConversionStream(_waveFormat, ws); totBytes = helpStm.Read(bSrc, 0, 25000); ws.Close(); ws.Dispose(); helpStm.Close(); helpStm.Dispose(); } if (_needsencodeinit) { _enc.EncodeInit(BitConverter.ToInt16(e.RawData, 0), BitConverter.ToInt16(e.RawData, 2)); _needsencodeinit = false; } var buff = new byte[25000]; int c; unsafe { fixed(byte *src = bSrc) { fixed(byte *dst = buff) { c = (int)_enc.EncodeFoscam(src, totBytes, dst); } } } Buffer.BlockCopy(buff, 0, _talkBuffer, _talkDatalen, c); _talkDatalen += c; var dtms = (int)(DateTime.Now - _dt).TotalMilliseconds; int i = 0; int j = 0; try { while (j + 160 < _talkDatalen) { //need to write out in 160 byte packets for 40ms byte[] cmd = SInit(TalkData, MoIPAvFlag); cmd = AddNext(cmd, dtms + (i * 40)); cmd = AddNext(cmd, _seq); cmd = AddNext(cmd, (int)(DateTime.Now - _dt).TotalSeconds); cmd = AddNext(cmd, (byte)0x0); cmd = AddNext(cmd, 160); var pkt = new byte[160]; Buffer.BlockCopy(_talkBuffer, j, pkt, 0, 160); cmd = AddNext(cmd, pkt, 160); Encode(ref cmd); _avstream.Write(cmd, 0, cmd.Length); j += 160; _seq++; i++; } if (j < _talkDatalen) { Buffer.BlockCopy(_talkBuffer, j, _talkBuffer, 0, _talkDatalen - j); _talkDatalen = _talkDatalen - j; } } catch (SocketException) { StopTalk(true); } } } } catch (Exception ex) { Log.Error("", ex);//MainForm.LogExceptionToFile(ex); StopTalk(true); } }
private void ProcessMixingFinal(RcvData data, int dataSize) { string processingFn = string.Format("e:\\{0}_{1}_{2}.wav", data.seqnum, data.extension, data.peernumber); List <RecInfos> ls0 = lExtension0.FindAll( delegate(RecInfos list) { return(list.rcvData.Equals(data) && list.isExtension == 0); }); List <RecInfos> ls1 = lExtension1.FindAll( delegate(RecInfos list) { return(list.rcvData.Equals(data) && list.isExtension == 1); }); IsExtensionComparer isExtensionCompare = new IsExtensionComparer(); ls0.Sort(isExtensionCompare); ls1.Sort(isExtensionCompare); int count = 0; int count0 = ls0.Count(); int count1 = ls1.Count(); if (count0 - count1 < 0) { count = count0; } else { count = count1; } for (int i = 0; i < count; i++) { if (ls0[i].seq == ls1[i].seq) { // 믹싱 byte[] wavSrc0 = new byte[160]; byte[] wavSrc1 = new byte[160]; Array.Copy(ls0[i].voice, 12, wavSrc0, 0, wavSrc0.Length); Array.Copy(ls1[i].voice, 12, wavSrc1, 0, wavSrc1.Length); WaveMixerStream32 mixer = new WaveMixerStream32(); //mixer.AutoStop = true; WaveChannel32 channelStm = null; for (int j = 0; j < 2; j++) { MemoryStream memStm = null; BufferedStream bufStm = null; RawSourceWaveStream rawSrcStm = null; WaveFormatConversionStream conversionStm = null; if (j == 0) { memStm = new MemoryStream(wavSrc0); } else { memStm = new MemoryStream(wavSrc1); } bufStm = new BufferedStream(memStm); rawSrcStm = new RawSourceWaveStream(bufStm, mulawFormat); conversionStm = new WaveFormatConversionStream(pcmFormat, rawSrcStm); channelStm = new WaveChannel32(conversionStm); mixer.AddInputStream(channelStm); } mixer.Position = 0; if (File.Exists(processingFn)) { var wavefilestream = new WaveFileReader(processingFn); byte[] wavefilebyte = new byte[(int)wavefilestream.Length]; int chk0 = wavefilestream.Read(wavefilebyte, 0, wavefilebyte.Length); Wave32To16Stream to16 = new Wave32To16Stream(mixer); var conversionStm = new WaveFormatConversionStream(pcmFormat, to16); byte[] tobyte = new byte[(int)conversionStm.Length]; int chk1 = conversionStm.Read(tobyte, 0, (int)conversionStm.Length); byte[] desByte = new byte[wavefilebyte.Length + tobyte.Length]; conversionStm.Close(); wavefilestream.Close(); Buffer.BlockCopy(wavefilebyte, 0, desByte, 0, wavefilebyte.Length); Buffer.BlockCopy(tobyte, 0, desByte, wavefilebyte.Length, tobyte.Length); using (MemoryStream memStm = new MemoryStream(desByte)) using (BufferedStream buffStm = new BufferedStream(memStm)) using (RawSourceWaveStream rawSrcStm = new RawSourceWaveStream(buffStm, pcmFormat)) { WaveFileWriter.CreateWaveFile(processingFn, rawSrcStm); } } else { var mixedStm = new Wave32To16Stream(mixer); var convStm = new WaveFormatConversionStream(pcmFormat, mixedStm); WaveFileWriter.CreateWaveFile(processingFn, convStm); convStm.Close(); mixedStm.Close(); } mixer.Close(); // 삭제 lExtension0.Remove(ls0[i]); lExtension1.Remove(ls1[i]); } else if (ls0[i].seq - ls1[i].seq < 0) { // ls0 만 믹싱 // ls0 원본에 byte[] 붙임 > 원본 byte[]를 wavesream 으로 변환 > wave 파일로 저장 if (File.Exists(processingFn)) { //wavefilestream = new WaveFileReader(processingFn); } else { } // 삭제 lExtension0.Remove(ls0[i]); ls1.Insert(i + 1, ls1[i]); } else if (ls0[i].seq - ls1[i].seq > 0) { // ls1 만 믹싱 // ls1 원본에 byte[] 붙임 > 원본 byte[]를 wavesream 으로 변환 > wave 파일로 저장 if (File.Exists(processingFn)) { //wavefilestream = new WaveFileReader(processingFn); } else { } // 삭제 lExtension1.Remove(ls1[i]); ls0.Insert(i + 1, ls0[i]); } } }
private void ProcessMixing2(RcvData data, int dataSize) { string processingFn = string.Format("d:\\{0}_{1}_{2}.wav", data.seqnum, data.extension, data.peernumber); List <RecInfos> ls0 = lExtension0.FindAll( delegate(RecInfos list) { return(list.rcvData.Equals(data) && list.isExtension == 0); }); List <RecInfos> ls1 = lExtension1.FindAll( delegate(RecInfos list) { return(list.rcvData.Equals(data) && list.isExtension == 1); }); IsExtensionComparer isExtensionCompare = new IsExtensionComparer(); ls0.Sort(isExtensionCompare); ls1.Sort(isExtensionCompare); int count = 0; int count0 = ls0.Count(); int count1 = ls1.Count(); if (count0 - count1 < 0) { count = count0; } else { count = count1; } byte[] buffWriting = new byte[320 * count]; for (int i = 0; i < count; i++) { if (ls0[i].seq == ls1[i].seq) { // 믹싱 // 코덱 종류에 따라서 바이트 길이는 달라질 수 있다. 실제로 만들 때 경우의 수 확인하고 만들어야 한다. byte[] wavSrc0 = new byte[160]; byte[] wavSrc1 = new byte[160]; Array.Copy(ls0[i].voice, 12, wavSrc0, 0, wavSrc0.Length); Array.Copy(ls1[i].voice, 12, wavSrc1, 0, wavSrc1.Length); WaveMixerStream32 mixer = new WaveMixerStream32(); //mixer.AutoStop = true; WaveChannel32 channelStm = null; MemoryStream memStm = null; BufferedStream bufStm = null; RawSourceWaveStream rawSrcStm = null; WaveFormatConversionStream conversionStm = null; for (int j = 0; j < 2; j++) { if (j == 0) { memStm = new MemoryStream(wavSrc0); } else { memStm = new MemoryStream(wavSrc1); } bufStm = new BufferedStream(memStm); rawSrcStm = new RawSourceWaveStream(bufStm, mulawFormat); conversionStm = new WaveFormatConversionStream(pcmFormat, rawSrcStm); channelStm = new WaveChannel32(conversionStm); mixer.AddInputStream(channelStm); } mixer.Position = 0; Wave32To16Stream to16 = new Wave32To16Stream(mixer); var convStm = new WaveFormatConversionStream(pcmFormat, to16); byte[] tobyte = new byte[(int)convStm.Length]; int chk = convStm.Read(tobyte, 0, (int)convStm.Length); Buffer.BlockCopy(tobyte, 0, buffWriting, i * tobyte.Length, tobyte.Length); conversionStm.Close(); rawSrcStm.Close(); bufStm.Close(); memStm.Close(); convStm.Close(); to16.Close(); channelStm.Close(); mixer.Close(); // 삭제 lExtension0.Remove(ls0[i]); lExtension1.Remove(ls1[i]); } else if (ls0[i].seq - ls1[i].seq < 0) { // ls0 만 믹싱 // ls0 원본에 byte[] 붙임 > 원본 byte[]를 wavesream 으로 변환 > wave 파일로 저장 // 믹싱 // 코덱 종류에 따라서 바이트 길이는 달라질 수 있다. 실제로 만들 때 경우의 수 확인하고 만들어야 한다. byte[] wavSrc0 = new byte[160]; byte[] wavSrc1 = new byte[160]; Array.Copy(ls0[i].voice, 12, wavSrc0, 0, wavSrc0.Length); Array.Copy(ls1[i].voice, 12, wavSrc1, 0, wavSrc1.Length); WaveMixerStream32 mixer = new WaveMixerStream32(); //mixer.AutoStop = true; WaveChannel32 channelStm = null; MemoryStream memStm = null; BufferedStream bufStm = null; RawSourceWaveStream rawSrcStm = null; WaveFormatConversionStream conversionStm = null; for (int j = 0; j < 2; j++) { if (j == 0) { memStm = new MemoryStream(wavSrc0); } else { memStm = new MemoryStream(wavSrc1); } bufStm = new BufferedStream(memStm); rawSrcStm = new RawSourceWaveStream(bufStm, mulawFormat); conversionStm = new WaveFormatConversionStream(pcmFormat, rawSrcStm); channelStm = new WaveChannel32(conversionStm); mixer.AddInputStream(channelStm); } mixer.Position = 0; Wave32To16Stream to16 = new Wave32To16Stream(mixer); var convStm = new WaveFormatConversionStream(pcmFormat, to16); byte[] tobyte = new byte[(int)convStm.Length]; int chk = convStm.Read(tobyte, 0, (int)convStm.Length); Buffer.BlockCopy(tobyte, 0, buffWriting, i * tobyte.Length, tobyte.Length); conversionStm.Close(); rawSrcStm.Close(); bufStm.Close(); memStm.Close(); convStm.Close(); to16.Close(); channelStm.Close(); mixer.Close(); // 삭제 lExtension0.Remove(ls0[i]); ls1.Insert(i + 1, ls1[i]); } else if (ls0[i].seq - ls1[i].seq > 0) { // ls1 만 믹싱 // ls1 원본에 byte[] 붙임 > 원본 byte[]를 wavesream 으로 변환 > wave 파일로 저장 // 믹싱 // 코덱 종류에 따라서 바이트 길이는 달라질 수 있다. 실제로 만들 때 경우의 수 확인하고 만들어야 한다. byte[] wavSrc0 = new byte[160]; byte[] wavSrc1 = new byte[160]; Array.Copy(ls0[i].voice, 12, wavSrc0, 0, wavSrc0.Length); Array.Copy(ls1[i].voice, 12, wavSrc1, 0, wavSrc1.Length); WaveMixerStream32 mixer = new WaveMixerStream32(); //mixer.AutoStop = true; WaveChannel32 channelStm = null; MemoryStream memStm = null; BufferedStream bufStm = null; RawSourceWaveStream rawSrcStm = null; WaveFormatConversionStream conversionStm = null; for (int j = 0; j < 2; j++) { if (j == 0) { memStm = new MemoryStream(wavSrc0); } else { memStm = new MemoryStream(wavSrc1); } bufStm = new BufferedStream(memStm); rawSrcStm = new RawSourceWaveStream(bufStm, mulawFormat); conversionStm = new WaveFormatConversionStream(pcmFormat, rawSrcStm); channelStm = new WaveChannel32(conversionStm); mixer.AddInputStream(channelStm); } mixer.Position = 0; Wave32To16Stream to16 = new Wave32To16Stream(mixer); var convStm = new WaveFormatConversionStream(pcmFormat, to16); byte[] tobyte = new byte[(int)convStm.Length]; int chk = convStm.Read(tobyte, 0, (int)convStm.Length); Buffer.BlockCopy(tobyte, 0, buffWriting, i * tobyte.Length, tobyte.Length); conversionStm.Close(); rawSrcStm.Close(); bufStm.Close(); memStm.Close(); convStm.Close(); to16.Close(); channelStm.Close(); mixer.Close(); // 삭제 lExtension1.Remove(ls1[i]); ls0.Insert(i + 1, ls0[i]); } } // 10개의 버프를 바이트로 만들어 WaveFileWrite WaveFileWriting(buffWriting, processingFn); }
private void AudioSourceDataAvailable(object sender, DataAvailableEventArgs e) { try { lock (_obj) { if (_bTalking && _avstream != null) { byte[] bSrc = e.RawData; int totBytes = bSrc.Length; int j = -1; if (!_audioSource.RecordingFormat.Equals(_waveFormat)) { using (var ws = new TalkHelperStream(bSrc, totBytes, _audioSource.RecordingFormat)) { var bDst = new byte[44100]; totBytes = 0; using (var helpStm = new WaveFormatConversionStream(_waveFormat, ws)) { while (j != 0) { j = helpStm.Read(bDst, totBytes, 10000); totBytes += j; } helpStm.Close(); } ws.Close(); bSrc = bDst; } } var enc = _muLawCodec.Encode(bSrc, 0, totBytes); //ALawEncoder.ALawEncode(bSrc, totBytes, enc); Buffer.BlockCopy(enc, 0, _talkBuffer, _talkDatalen, enc.Length); _talkDatalen += enc.Length; j = 0; try { while (j + 240 < _talkDatalen) { //need to write out in 240 byte packets var pkt = new byte[240]; Buffer.BlockCopy(_talkBuffer, j, pkt, 0, 240); // _avstream.Write(_hdr, 0, _hdr.Length); _avstream.Write(pkt, 0, 240); j += 240; } if (j < _talkDatalen) { Buffer.BlockCopy(_talkBuffer, j, _talkBuffer, 0, _talkDatalen - j); _talkDatalen = _talkDatalen - j; } } catch (SocketException) { StopTalk(); } } } } catch (Exception ex) { MainForm.LogExceptionToFile(ex); StopTalk(); } }