public void CanConvertMuLawToSuggestedPcm()
 {
     using (WaveStream stream = WaveFormatConversionStream.CreatePcmStream(
                new NullWaveStream(WaveFormat.CreateMuLawFormat(8000, 1), 1000)))
     {
     }
 }
Ejemplo n.º 2
0
        // Encode G.711
        private void button2_Click(object sender, EventArgs e)
        {
            try {
                if (this.currentAudio == null)
                {
                    throw new Exception("Вы не выбрали файл для кодирования.");
                }
                if (codecToEncode.SelectedItem == null)
                {
                    throw new Exception("Вы не выбрали кодэк.");
                }
            } catch (Exception ex) {
                MessageBox.Show(ex.Message, "Ошибка", MessageBoxButtons.OK, MessageBoxIcon.Error);
                return;
            }
            SaveFileDialog save = new SaveFileDialog();

            save.Filter = "Wave File (*.wav)|*.wav;";
            if (save.ShowDialog() != DialogResult.OK)
            {
                return;
            }
            Codecs codec = (codecToEncode.SelectedIndex == 0) ? Codecs.ALAW : Codecs.MULAW;

            byte[] samples = new byte[this.currentAudio.ShortSamples.Length];
            for (int i = 0; i < this.currentAudio.ShortSamples.Length; i++)
            {
                if (codec == Codecs.ALAW)
                {
                    samples[i] = ALawEncoder.LinearToALawSample(this.currentAudio.ShortSamples[i]);
                }
                else if (codec == Codecs.MULAW)
                {
                    samples[i] = MuLawEncoder.LinearToMuLawSample(this.currentAudio.ShortSamples[i]);
                }
            }
            WaveFormat format = null;

            if (codec == Codecs.ALAW)
            {
                format = WaveFormat.CreateALawFormat(this.currentAudio.SampleRate, this.currentAudio.Stream.WaveFormat.Channels);
            }
            else if (codec == Codecs.MULAW)
            {
                format = WaveFormat.CreateMuLawFormat(this.currentAudio.SampleRate, this.currentAudio.Stream.WaveFormat.Channels);
            }
            WaveFileWriter writer = new WaveFileWriter(save.FileName, format);

            writer.Write(samples, 0, samples.Length);
            writer.Close();
            DialogResult dres = MessageBox.Show("Аудиофайл успешно сохранен. Открыть файл?", "Файл сохранен", MessageBoxButtons.YesNo, MessageBoxIcon.Question);

            if (dres == DialogResult.Yes)
            {
                this.decodeG711(save.FileName, codec);
            }
        }
        public static void GenerateAudioStream(byte[] buffer, MemoryStream memoryStream)
        {
            // define the audio file type
            var waveFormat = WaveFormat.CreateMuLawFormat(8000, 1);

            // use WaveFileWriter to convert the audio file buffer and write it into a memory stream
            using (var waveFileWriter = new WaveFileWriter(new IgnoreDisposeStream(memoryStream), waveFormat))
            {
                waveFileWriter.Write(buffer, 0, buffer.Length);
                waveFileWriter.Flush();
            }
        }
Ejemplo n.º 4
0
        private static void TestMp3()
        {
            string mp3File    = @".\TestAudio\speech_20200512013308591.mp3";
            string outputFile = @".\TestAudio\output-from-mp3.wav";

            using (Mp3FileReader reader = new Mp3FileReader(mp3File))
            {
                using (WaveStream pcmStream = WaveFormatConversionStream.CreatePcmStream(reader))
                {
                    WaveFileWriter.CreateWaveFile(outputFile, pcmStream);
                }
            }
            return;

            Dictionary <uint, byte[]> audioBytes = new Dictionary <uint, byte[]>();

            uint timestamp = 0;

            string file = @".\TestAudio\speech_20200512013308591.mp3";
            //var pcmFormat = new WaveFormat(8000, 16, 1);
            var ulawFormat = WaveFormat.CreateMuLawFormat(8000, 1);



            using (var pcmStm = WaveFormatConversionStream.CreatePcmStream(new Mp3FileReader(file)))
            {
                using (WaveFormatConversionStream ulawStm = new WaveFormatConversionStream(ulawFormat, pcmStm))
                {
                    byte[] buffer    = new byte[160];
                    int    bytesRead = ulawStm.Read(buffer, 0, 160);

                    while (bytesRead > 0)
                    {
                        byte[] sample = new byte[bytesRead];
                        Array.Copy(buffer, sample, bytesRead);
                        //m_rtpChannel.AddSample(sample);
                        audioBytes.Add(timestamp, sample);
                        timestamp += 160;

                        bytesRead = ulawStm.Read(buffer, 0, 160);
                    }
                }
            }

            //WaveFileWriter.CreateWaveFile(tempFile, WaveP);
            string fileName = @".\TestAudio\output.wav";

            using (WaveFileWriter writer = new WaveFileWriter(fileName, ulawFormat))
            {
                var testSequence = audioBytes.SelectMany(p => p.Value).ToArray();
                writer.Write(testSequence, 0, testSequence.Length);
            }
        }
Ejemplo n.º 5
0
        public void Start(int sessionId)
        {
            this.sessionId = sessionId;

            var location  = Assembly.GetEntryAssembly().Location;
            var directory = Path.GetDirectoryName(location);

            filename  = string.Format("Recordings/{0}.wav", sessionId);
            path      = string.Format(@"{0}/{1}", directory, filename);
            format    = WaveFormat.CreateMuLawFormat(8000, 1);
            writer    = new WaveFileWriter(filename, format);
            recording = true;
        }
        public MuLawStream(bool debug = false)
        {
            _debug = debug;
            if (_debug)
            {
                _outputStream = new BufferedWaveProvider(WaveFormat.CreateMuLawFormat(8000, 1))
                {
                    // There is data loss if you change the buffer duration from the default.
                    // _sourceStream.BufferDuration = TimeSpan.FromMilliseconds(200),

                    DiscardOnBufferOverflow = true,

                    // If we don't set this to false we'll simply fill up disk space with zeros.
                    ReadFully = false
                };
            }
        }
Ejemplo n.º 7
0
        private static void TestWav()
        {
            Dictionary <uint, byte[]> audioBytes = new Dictionary <uint, byte[]>();

            uint timestamp = 0;

            string file       = @".\TestAudio\output-from-mp3.wav";
            var    pcmFormat  = new WaveFormat(8000, 16, 1);
            var    ulawFormat = WaveFormat.CreateMuLawFormat(8000, 1);

            using (WaveFormatConversionStream pcmStm = new WaveFormatConversionStream(pcmFormat, new WaveFileReader(file)))
            {
                using (WaveFormatConversionStream ulawStm = new WaveFormatConversionStream(ulawFormat, pcmStm))
                {
                    byte[] buffer    = new byte[160];
                    int    bytesRead = ulawStm.Read(buffer, 0, 160);

                    while (bytesRead > 0)
                    {
                        byte[] sample = new byte[bytesRead];
                        Array.Copy(buffer, sample, bytesRead);
                        //m_rtpChannel.AddSample(sample);
                        audioBytes.Add(timestamp, sample);
                        timestamp += 160;

                        bytesRead = ulawStm.Read(buffer, 0, 160);
                    }
                }
            }

            string fileName = @".\TestAudio\output-from-wav.wav";

            using (WaveFileWriter writer = new WaveFileWriter(fileName, ulawFormat))
            {
                var testSequence = audioBytes.SelectMany(p => p.Value).ToArray();
                writer.Write(testSequence, 0, testSequence.Length);
            }
        }
Ejemplo n.º 8
0
        private void GetAudioSamples()
        {
            ////var pcmStream = WaveFormatConversionStream.CreatePcmStream(new Mp3FileReader("whitelight.mp3"));
            //var pcmStream = new WaveFileReader("whitelight-ulaw.wav");
            //byte[] sampleBuffer = new byte[160];
            //int bytesRead = pcmStream.Read(sampleBuffer, 0, 160);
            ////int bytesRead = m_rawRTPPayloadReader.BaseStream.Read(sampleBuffer, 0, 160);
            //while (bytesRead > 0)
            //{
            //    m_rtpChannel.AddSample(sampleBuffer);
            //    bytesRead = pcmStream.Read(sampleBuffer, 0, 160);
            //    //bytesRead = m_rawRTPPayloadReader.BaseStream.Read(sampleBuffer, 0, 160);
            //}

            var pcmFormat  = new WaveFormat(8000, 16, 1);
            var ulawFormat = WaveFormat.CreateMuLawFormat(8000, 1);

            using (WaveFormatConversionStream pcmStm = new WaveFormatConversionStream(pcmFormat, new Mp3FileReader("whitelight.mp3")))
            {
                using (WaveFormatConversionStream ulawStm = new WaveFormatConversionStream(ulawFormat, pcmStm))
                {
                    byte[] buffer    = new byte[160];
                    int    bytesRead = ulawStm.Read(buffer, 0, 160);

                    while (bytesRead > 0)
                    {
                        byte[] sample = new byte[bytesRead];
                        Array.Copy(buffer, sample, bytesRead);
                        m_rtpChannel.Send(sample, 20);

                        bytesRead = ulawStm.Read(buffer, 0, 160);
                    }
                }
            }

            logger.Debug("Finished adding audio samples.");
        }
Ejemplo n.º 9
0
        private void btnStopRecord1_Click(object sender, EventArgs e)
        {
            timerClock.Stop();
            audioCapture a = new audioCapture();

            a.OnRecordingStopped(sender, null);

            //This create the wave output that is shown on the screen
            var waveFormat = WaveFormat.CreateMuLawFormat(8000, 1);

            using (var x = new RawSourceWaveStream(audioStream1, waveFormat))
            {
                waveViewerSample1.WaveStream = x;
            }

            //Enable/Disable the buttons as required

            setValidStream(true);

            //Show the length of the sample for the user to see
            double sampleLength = (audioCapture.recordingEnded - audioCapture.recordingStarted).TotalMilliseconds * 0.001;

            lblSample1Length.Text = Math.Round(sampleLength, 2).ToString();
        }
Ejemplo n.º 10
0
        private void button1_Click(object sender, EventArgs e)
        {
            if (string.IsNullOrWhiteSpace(inFile) || string.IsNullOrWhiteSpace(textBox2.Text))
            {
                MessageBox.Show("select an input/output first!"); return;
            }

            #region get error "NoDriver calling acmFormatSuggest": http://stackoverflow.com/questions/5652388/naudio-error-nodriver-calling-acmformatsuggest
            //using (var reader = new WaveFileReader(inFile))
            //using (var converter = WaveFormatConversionStream.CreatePcmStream(reader))
            //{
            //    WaveFileWriter.CreateWaveFile(textBox2.Text, converter);
            //}
            #endregion

            #region use command line converter : search for "Converting Audio Without ACM or MFT" : https://www.codeproject.com/Articles/501521/How-to-convert-between-most-audio-formats-in-NET
            ////var lamepath = @"C:\Users\Mark\Apps\lame.exe";
            ////var lamepath = @"C:\Users\323122960\Documents\Projects\UoT - audio files\Codec\extract msi\PlaybackInstallation\Verint\Playback\CommandLineConvertor.exe";
            //var lamepath = @"C:\Users\323122960\Documents\Projects\UoT - audio files\Codec\exe from - se442028-Logs-Yufen_SHS Log-VerintPlayback\CommandLineConvertor.exe";
            ////var lamepath = @"C:\Users\323122960\Documents\Projects\UoT - audio files\Codec\Solution_1\ffmpeg-0.5\ffmpeg-0.5\ffmpeg.exe";
            //Process p = new Process();
            //p.StartInfo.FileName = lamepath; //1st arg in cmd line, other args assigned to  p.StartInfo.Arguments ="";
            //p.StartInfo.UseShellExecute = false;
            //p.StartInfo.Arguments = String.Format("-b 128 \"{0}\" \"{1}\"", inFile, outFile);
            //p.StartInfo.CreateNoWindow = true;
            //p.Start();
            #endregion

            #region MuLawDecoder : http://stackoverflow.com/questions/9551011/naudio-decoding-ulaw-to-pcm
            //byte[] bytes = System.IO.File.ReadAllBytes(inFile);
            //using (var writer = new WaveFileWriter(textBox2.Text, new WaveFormat(8000, 16, 1)))
            ////using(var reader = new WaveFileReader(inFile))
            ////using(byte waveByte = new WaveFileReader(inFile).ReadByte())

            //for(int i=0;i<bytes.Length;i++)
            //{
            //    short pcm = MuLawDecoder.MuLawToLinearSample(bytes[i]);
            //    writer.WriteByte
            //}

            #endregion

            #region get 16bit http://stackoverflow.com/questions/6647730/change-wav-file-to-16khz-and-8bit-with-using-naudio

            //using (var reader = new WaveFileReader(inFile))
            //{
            //    var newFormat = new WaveFormat(8000, 16, 1);
            //    using (var conversionStream = new WaveFormatConversionStream(newFormat, reader)) //using() does the .Close() implicitly
            //    {
            //        WaveFileWriter.CreateWaveFile(outFile, conversionStream);
            //    }
            //}

            #endregion

            #region Working SOlution BUT outFile is just noise! : http://stackoverflow.com/questions/6647730/change-wav-file-to-16khz-and-8bit-with-using-naudio   ...http://stackoverflow.com/questions/6647730/change-wav-file-to-16khz-and-8bit-with-using-naudio
            ////for error "ACM format not possible"et : http://stackoverflow.com/questions/13628145/acmnotpossible-calling-acmstreamopen-naudio
            ////http://stackoverflow.com/questions/6951949/how-to-decode-rtp-packets-and-save-it-has-wav-file
            ////converted file is just a noise/sizzle : http://stackoverflow.com/questions/6647730/change-wav-file-to-16khz-and-8bit-with-using-naudio


            FileStream fileStream = new FileStream(inFile, FileMode.Open);
            //WaveFileReader streanSansRIFF = new WaveFileReader(fileStream);//inserted this to read ONLY actual audio part of file. STream will read even metadata RIFF header as if it was audio data
            var waveFormat = WaveFormat.CreateMuLawFormat(8000, 2);//2 is channel=steroe, 1=mono. No other value!!

            var reader = new RawSourceWaveStream(fileStream, waveFormat);


            using (WaveStream convertedStream = WaveFormatConversionStream.CreatePcmStream(reader))
            //using(var upsampler = new WaveFormatConversionStream(new WaveFormat(8000, 16, 1) , convertedStream))
            //using (ISampleProvider convertedStream = WaveFormatConversionStream.CreatePcmStream(reader))
            {
                //WaveFileWriter.CreateWaveFile(inFile.Replace("vox", "wav"), convertedStream);//http://stackoverflow.com/questions/6647730/change-wav-file-to-16khz-and-8bit-with-using-naudio


                WaveFileWriter.CreateWaveFile(textBox2.Text, convertedStream);
                //WaveFileWriter.CreateWaveFile16(textBox2.Text, convertedStream);//http://stackoverflow.com/questions/22869594/how-to-play-isampleprovider
            }
            fileStream.Close();
            #endregion
        }
Ejemplo n.º 11
0
Archivo: Utils.cs Proyecto: spFly/sip
        private static async Task SendRtp(RTPSession rtpSession, IPEndPoint dstRtpEndPoint, string audioFileName, CancellationTokenSource cts)
        {
            try
            {
                string audioFileExt = Path.GetExtension(audioFileName).ToLower();

                switch (audioFileExt)
                {
                case ".g722":
                case ".ulaw":
                {
                    uint timestamp = 0;
                    using (StreamReader sr = new StreamReader(audioFileName))
                    {
                        byte[] buffer    = new byte[320];
                        int    bytesRead = sr.BaseStream.Read(buffer, 0, buffer.Length);

                        while (bytesRead > 0 && !cts.IsCancellationRequested)
                        {
                            if (!dstRtpEndPoint.Address.Equals(IPAddress.Any))
                            {
                                rtpSession.SendAudioFrame(timestamp, buffer);
                            }

                            timestamp += (uint)buffer.Length;

                            await Task.Delay(40, cts.Token);

                            bytesRead = sr.BaseStream.Read(buffer, 0, buffer.Length);
                        }
                    }
                }
                break;

                case ".mp3":
                {
                    var pcmFormat  = new WaveFormat(8000, 16, 1);
                    var ulawFormat = WaveFormat.CreateMuLawFormat(8000, 1);

                    uint timestamp = 0;

                    using (WaveFormatConversionStream pcmStm = new WaveFormatConversionStream(pcmFormat, new Mp3FileReader(audioFileName)))
                    {
                        using (WaveFormatConversionStream ulawStm = new WaveFormatConversionStream(ulawFormat, pcmStm))
                        {
                            byte[] buffer    = new byte[320];
                            int    bytesRead = ulawStm.Read(buffer, 0, buffer.Length);

                            while (bytesRead > 0 && !cts.IsCancellationRequested)
                            {
                                byte[] sample = new byte[bytesRead];
                                Array.Copy(buffer, sample, bytesRead);

                                if (dstRtpEndPoint.Address != IPAddress.Any)
                                {
                                    rtpSession.SendAudioFrame(timestamp, buffer);
                                }

                                timestamp += (uint)buffer.Length;

                                await Task.Delay(40, cts.Token);

                                bytesRead = ulawStm.Read(buffer, 0, buffer.Length);
                            }
                        }
                    }
                }
                break;

                default:
                    throw new NotImplementedException($"The {audioFileExt} file type is not understood by this example.");
                }
            }
            catch (OperationCanceledException) { }
            catch (Exception excp)
            {
                SIPSorcery.Sys.Log.Logger.LogError($"Exception sending RTP. {excp.Message}");
            }
        }
        public AudioInfo Synthesize(string text)
        {
            Dictionary <uint, byte[]> audioBytes = new Dictionary <uint, byte[]>();

            var ulawFormat = WaveFormat.CreateMuLawFormat(8000, 1);

            string tempFile = Path.GetTempFileName();

            try
            {
                SynthesizeSpeechRequest sreq = new SynthesizeSpeechRequest();
                sreq.Text         = text;
                sreq.OutputFormat = OutputFormat.Mp3;
                sreq.VoiceId      = VoiceId.Salli;
                SynthesizeSpeechResponse sres = _amazonPolly.SynthesizeSpeechAsync(sreq).GetAwaiter().GetResult();


                using (var pollyMemoryStream = new MemoryStream())
                {
                    sres.AudioStream.CopyTo(pollyMemoryStream);
                    pollyMemoryStream.Flush();

                    pollyMemoryStream.Position = 0;

                    using (Mp3FileReader reader = new Mp3FileReader(pollyMemoryStream, wave => new DmoMp3FrameDecompressor(wave)))
                    {
                        using (WaveStream pcmStream = WaveFormatConversionStream.CreatePcmStream(reader))
                        {
                            WaveFileWriter.CreateWaveFile(tempFile, pcmStream);
                        }
                    }
                }


                var pcmFormat = new WaveFormat(8000, 16, 1);

                List <byte[]> allBytes = new List <byte[]>();

                using (WaveFormatConversionStream pcmStm = new WaveFormatConversionStream(pcmFormat, new WaveFileReader(tempFile)))
                {
                    using (WaveFormatConversionStream ulawStm = new WaveFormatConversionStream(ulawFormat, pcmStm))
                    {
                        byte[] buffer    = new byte[160];
                        int    bytesRead = ulawStm.Read(buffer, 0, 160);

                        while (bytesRead > 0)
                        {
                            byte[] sample = new byte[bytesRead];
                            Array.Copy(buffer, sample, bytesRead);
                            allBytes.Add(sample);

                            bytesRead = ulawStm.Read(buffer, 0, 160);
                        }

                        int secondsToAdd = 1;
                        var silentBytes  = new byte[ulawStm.WaveFormat.AverageBytesPerSecond * secondsToAdd];
                        allBytes.Add(silentBytes);
                    }
                }

                AudioInfo ai = new AudioInfo()
                {
                    AudioData = allBytes.SelectMany(p => p).ToArray(),
                    Codec     = Codec.G711U
                };

                return(ai);

                //string fileName = @".\TestAudio\output-from-polly-mp3-then-wav.wav";
                //using (WaveFileWriter writer = new WaveFileWriter(fileName, ulawFormat))
                //{
                //    var testSequence = audioBytes.SelectMany(p => p.Value).ToArray();
                //    writer.Write(testSequence, 0, testSequence.Length);
                //}
            }
            finally
            {
                try
                {
                    File.Delete(tempFile);
                }
                catch { }
            }
        }
Ejemplo n.º 13
0
        public static void TestPolly3()
        {
            Dictionary <uint, byte[]> audioBytes = new Dictionary <uint, byte[]>();

            var ulawFormat = WaveFormat.CreateMuLawFormat(8000, 1);

            string tempFile = Path.GetTempFileName();

            try
            {
                using (AmazonPollyClient pc = new AmazonPollyClient())
                {
                    SynthesizeSpeechRequest sreq = new SynthesizeSpeechRequest();
                    sreq.Text         = "Something something something";
                    sreq.OutputFormat = OutputFormat.Mp3;
                    sreq.VoiceId      = VoiceId.Salli;
                    SynthesizeSpeechResponse sres = pc.SynthesizeSpeechAsync(sreq).GetAwaiter().GetResult();


                    using (var pollyMemoryStream = new MemoryStream())
                    {
                        sres.AudioStream.CopyTo(pollyMemoryStream);
                        pollyMemoryStream.Flush();

                        pollyMemoryStream.Position = 0;

                        using (Mp3FileReader reader = new Mp3FileReader(pollyMemoryStream, wave => new DmoMp3FrameDecompressor(wave)))
                        {
                            using (WaveStream pcmStream = WaveFormatConversionStream.CreatePcmStream(reader))
                            {
                                //WaveFileWriter.WriteWavFileToStream(pcmWaveStream, pcmStream);
                                WaveFileWriter.CreateWaveFile(tempFile, pcmStream);
                            }
                        }
                    }
                }


                var  pcmFormat = new WaveFormat(8000, 16, 1);
                uint timestamp = 0;

                using (WaveFormatConversionStream pcmStm = new WaveFormatConversionStream(pcmFormat, new WaveFileReader(tempFile)))
                {
                    using (WaveFormatConversionStream ulawStm = new WaveFormatConversionStream(ulawFormat, pcmStm))
                    {
                        string fileName = @".\TestAudio\output-from-polly-mp3-then-wav.wav";
                        WaveFileWriter.CreateWaveFile(fileName, ulawStm);
                        //byte[] buffer = new byte[160];
                        //int bytesRead = ulawStm.Read(buffer, 0, 160);

                        //while (bytesRead > 0)
                        //{
                        //    byte[] sample = new byte[bytesRead];
                        //    Array.Copy(buffer, sample, bytesRead);
                        //    audioBytes.Add(timestamp, sample);
                        //    timestamp += 160;

                        //    bytesRead = ulawStm.Read(buffer, 0, 160);
                        //}
                    }
                }

                //string fileName = @".\TestAudio\output-from-polly-mp3-then-wav.wav";
                //using (WaveFileWriter writer = new WaveFileWriter(fileName, ulawFormat))
                //{
                //    var testSequence = audioBytes.SelectMany(p => p.Value).ToArray();
                //    writer.Write(testSequence, 0, testSequence.Length);
                //}
            }
            finally
            {
                try
                {
                    File.Delete(tempFile);
                }
                catch { }
            }

            //string fileName = @".\TestAudio\output-from-polly-mp3-then-wav.wav";
            //using (WaveFileWriter writer = new WaveFileWriter(fileName, ulawFormat))
            //{
            //    var testSequence = audioBytes.SelectMany(p => p.Value).ToArray();
            //    writer.Write(testSequence, 0, testSequence.Length);
            //}
        }
Ejemplo n.º 14
0
        private void StackRtp2Instance(RecordInfo_t _recInfo)
        {
            var _ingInstance = RecordIngList.FirstOrDefault(x => x.ext == _recInfo.extension && x.peer == _recInfo.peer_number);

            if (_ingInstance == null)
            {
                byte[] rtpbuff = new byte[_recInfo.size];
                Array.Copy(_recInfo.voice, 0, rtpbuff, 0, _recInfo.size);
                WinSound.RTPPacket rtp = new WinSound.RTPPacket(rtpbuff);

                WaveFormat _wavformat;

                switch (rtp.PayloadType)
                {
                case 0:
                    _wavformat = WaveFormat.CreateMuLawFormat(8000, 1);
                    break;

                case 8:
                    _wavformat = WaveFormat.CreateALawFormat(8000, 1);
                    break;

                case 4:
                    _wavformat = WaveFormat.CreateCustomFormat(WaveFormatEncoding.G723, 8000, 1, 8000 * 1, 1, 8);
                    break;

                case 18:
                    _wavformat = WaveFormat.CreateCustomFormat(WaveFormatEncoding.G729, 8000, 1, 8000 * 1, 1, 8);
                    break;

                default:
                    _wavformat = WaveFormat.CreateALawFormat(8000, 1);
                    break;
                }

                DateTime now = DateTime.Now;
                TimeSpan ts  = now - new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Local);

                string _header      = string.Format("{0:0000}{1:00}{2:00}{3:00}{4:00}{5:00}{6:000}", now.Year, now.Month, now.Day, now.Hour, now.Minute, now.Second, now.Millisecond);
                string _datepath    = string.Format("{0:0000}-{1:00}-{2:00}", now.Year, now.Month, now.Day);
                string _fileName    = string.Format("{0}_{1}_{2}.wav", _header, _recInfo.extension, _recInfo.peer_number);
                string _wavFileName = string.Format(@"{0}\{1}\{2}", _option.SaveDirectory, _datepath, _fileName);

                string _path = string.Format(@"{0}\{1}", _option.SaveDirectory, _datepath);
                if (!Directory.Exists(_path))
                {
                    Directory.CreateDirectory(_path);
                }

                RtpRecordInfo RecInstance = new RtpRecordInfo(_wavformat, string.Format(@"{0}\{1}", _option.SaveDirectory, _datepath), _fileName)
                {
                    ext = _recInfo.extension, peer = _recInfo.peer_number, codec = _wavformat, idx = ts.TotalMilliseconds, savepath = string.Format(@"{0}\{1}", _option.SaveDirectory, _datepath), filename = _fileName
                };

                RecInstance.EndOfRtpStreamEvent += RecInstance_EndOfRtpStreamEvent;

                RecInstance.Add(_recInfo);
                lock (RecordIngList)
                {
                    RecordIngList.Add(RecInstance);
                }
            }
            else
            {
                _ingInstance.Add(_recInfo);
            }
        }
Ejemplo n.º 15
0
        private static async Task SendRecvRtp(Socket rtpSocket, RTPSession rtpSession, IPEndPoint dstRtpEndPoint, string audioFileName, CancellationTokenSource cts)
        {
            try
            {
                SIPSorcery.Sys.Log.Logger.LogDebug($"Sending from RTP socket {rtpSocket.LocalEndPoint} to {dstRtpEndPoint}.");

                // Nothing is being done with the data being received from the client. But the remote rtp socket will
                // be switched if it differs from the one in the SDP. This helps cope with NAT.
                var rtpRecvTask = Task.Run(async() =>
                {
                    DateTime lastRecvReportAt = DateTime.Now;
                    uint packetReceivedCount  = 0;
                    uint bytesReceivedCount   = 0;
                    byte[] buffer             = new byte[512];
                    EndPoint remoteEP         = new IPEndPoint(IPAddress.Any, 0);

                    SIPSorcery.Sys.Log.Logger.LogDebug($"Listening on RTP socket {rtpSocket.LocalEndPoint}.");

                    var recvResult = await rtpSocket.ReceiveFromAsync(buffer, SocketFlags.None, remoteEP);

                    while (recvResult.ReceivedBytes > 0 && !cts.IsCancellationRequested)
                    {
                        RTPPacket rtpPacket = new RTPPacket(buffer.Take(recvResult.ReceivedBytes).ToArray());

                        packetReceivedCount++;
                        bytesReceivedCount += (uint)rtpPacket.Payload.Length;

                        recvResult = await rtpSocket.ReceiveFromAsync(buffer, SocketFlags.None, remoteEP);

                        if (DateTime.Now.Subtract(lastRecvReportAt).TotalSeconds > RTP_REPORTING_PERIOD_SECONDS)
                        {
                            lastRecvReportAt = DateTime.Now;
                            dstRtpEndPoint   = recvResult.RemoteEndPoint as IPEndPoint;

                            SIPSorcery.Sys.Log.Logger.LogDebug($"RTP recv {rtpSocket.LocalEndPoint}<-{dstRtpEndPoint} pkts {packetReceivedCount} bytes {bytesReceivedCount}");
                        }
                    }
                });

                string audioFileExt = Path.GetExtension(audioFileName).ToLower();

                switch (audioFileExt)
                {
                case ".g722":
                case ".ulaw":
                {
                    uint timestamp = 0;
                    using (StreamReader sr = new StreamReader(audioFileName))
                    {
                        DateTime lastSendReportAt    = DateTime.Now;
                        uint     packetReceivedCount = 0;
                        uint     bytesReceivedCount  = 0;
                        byte[]   buffer    = new byte[320];
                        int      bytesRead = sr.BaseStream.Read(buffer, 0, buffer.Length);

                        while (bytesRead > 0 && !cts.IsCancellationRequested)
                        {
                            packetReceivedCount++;
                            bytesReceivedCount += (uint)bytesRead;

                            if (!dstRtpEndPoint.Address.Equals(IPAddress.Any))
                            {
                                rtpSession.SendAudioFrame(rtpSocket, dstRtpEndPoint, timestamp, buffer);
                            }

                            timestamp += (uint)buffer.Length;

                            if (DateTime.Now.Subtract(lastSendReportAt).TotalSeconds > RTP_REPORTING_PERIOD_SECONDS)
                            {
                                lastSendReportAt = DateTime.Now;
                                SIPSorcery.Sys.Log.Logger.LogDebug($"RTP send {rtpSocket.LocalEndPoint}->{dstRtpEndPoint} pkts {packetReceivedCount} bytes {bytesReceivedCount}");
                            }

                            await Task.Delay(40, cts.Token);

                            bytesRead = sr.BaseStream.Read(buffer, 0, buffer.Length);
                        }
                    }
                }
                break;

                case ".mp3":
                {
                    DateTime lastSendReportAt    = DateTime.Now;
                    uint     packetReceivedCount = 0;
                    uint     bytesReceivedCount  = 0;
                    var      pcmFormat           = new WaveFormat(8000, 16, 1);
                    var      ulawFormat          = WaveFormat.CreateMuLawFormat(8000, 1);

                    uint timestamp = 0;

                    using (WaveFormatConversionStream pcmStm = new WaveFormatConversionStream(pcmFormat, new Mp3FileReader(audioFileName)))
                    {
                        using (WaveFormatConversionStream ulawStm = new WaveFormatConversionStream(ulawFormat, pcmStm))
                        {
                            byte[] buffer    = new byte[320];
                            int    bytesRead = ulawStm.Read(buffer, 0, buffer.Length);

                            while (bytesRead > 0 && !cts.IsCancellationRequested)
                            {
                                packetReceivedCount++;
                                bytesReceivedCount += (uint)bytesRead;

                                byte[] sample = new byte[bytesRead];
                                Array.Copy(buffer, sample, bytesRead);

                                if (dstRtpEndPoint.Address != IPAddress.Any)
                                {
                                    rtpSession.SendAudioFrame(rtpSocket, dstRtpEndPoint, timestamp, buffer);
                                }

                                timestamp += (uint)buffer.Length;

                                if (DateTime.Now.Subtract(lastSendReportAt).TotalSeconds > RTP_REPORTING_PERIOD_SECONDS)
                                {
                                    lastSendReportAt = DateTime.Now;
                                    SIPSorcery.Sys.Log.Logger.LogDebug($"RTP send {rtpSocket.LocalEndPoint}->{dstRtpEndPoint} pkts {packetReceivedCount} bytes {bytesReceivedCount}");
                                }

                                await Task.Delay(40, cts.Token);

                                bytesRead = ulawStm.Read(buffer, 0, buffer.Length);
                            }
                        }
                    }
                }
                break;

                default:
                    throw new NotImplementedException($"The {audioFileExt} file type is not understood by this example.");
                }
            }
            catch (OperationCanceledException) { }
            catch (Exception excp)
            {
                SIPSorcery.Sys.Log.Logger.LogError($"Exception sending RTP. {excp.Message}");
            }
        }
Ejemplo n.º 16
0
 public AcmMuLawChatCodec()
     : base(new WaveFormat(8000, 16, 1), WaveFormat.CreateMuLawFormat(8000, 1))
 {
 }
Ejemplo n.º 17
0
        private void StackRtp2Instance(RecordInfo_t recInfo, byte[] buffer)
        {
            var ingInstance = RecordIngList.FirstOrDefault(x => x.ext == recInfo.extension && x.peer == recInfo.peer_number);

            if (ingInstance == null)
            {
                byte[] rtpbuff = new byte[recInfo.size];
                Array.Copy(recInfo.voice, 0, rtpbuff, 0, recInfo.size);
                WinSound.RTPPacket rtp = new WinSound.RTPPacket(rtpbuff);

                WaveFormat wavformat;

                switch (rtp.PayloadType)
                {
                case 0:
                    wavformat = WaveFormat.CreateMuLawFormat(8000, 1);
                    break;

                case 8:
                    wavformat = WaveFormat.CreateALawFormat(8000, 1);
                    break;

                case 4:
                    wavformat = WaveFormat.CreateCustomFormat(WaveFormatEncoding.G723, 8000, 1, 8000, 1, 8);
                    break;

                case 18:
                    wavformat = WaveFormat.CreateCustomFormat(WaveFormatEncoding.G729, 8000, 1, 8000, 1, 8);
                    break;

                default:
                    wavformat = WaveFormat.CreateALawFormat(8000, 1);
                    break;
                }

                DateTime now = DateTime.Now;
                TimeSpan ts  = now - new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Local);

                string header   = string.Format("{0:0000}{1:00}{2:00}{3:00}{4:00}{5:00}{6:000}", now.Year, now.Month, now.Day, now.Hour, now.Minute, now.Second, now.Millisecond);
                string datepath = string.Format("{0:0000}-{1:00}-{2:00}", now.Year, now.Month, now.Day);
                string fileName = string.Format("{0}_{1}_{2}.wav", header, recInfo.extension, recInfo.peer_number);

                string path = string.Format(@"{0}\{1}", Options.savedir, datepath);

                if (!Directory.Exists(path))
                {
                    Directory.CreateDirectory(path);
                }

                RtpRecordInfo RecInstance = new RtpRecordInfo(wavformat, path, fileName)
                {
                    ext = recInfo.extension, peer = recInfo.peer_number, codec = wavformat, idx = ts.TotalMilliseconds, savepath = path, filename = fileName
                };

                RecInstance.EndOfRtpStreamEvent += RecInstance_EndOfRtpStreamEvent;

                // util.WriteLogTest3(recInfo.isExtension.ToString() + " : >> RTPPacket Codec : " + rtp.PayloadType.ToString() + " // RecInfo Codec : " + recInfo.codec.ToString(), fileName + "_codec");
                RecInstance.chkcount++;
                RecInstance.firstIsExtension = recInfo.isExtension;

                RecInstance.Add(recInfo);
                lock (RecordIngList)
                {
                    RecordIngList.Add(RecInstance);
                }
            }
            else
            {
                //if (ingInstance.chkcount == 1 && ingInstance.firstIsExtension != recInfo.isExtension)
                //{
                //    byte[] rtpbuff = new byte[recInfo.size];
                //    Array.Copy(recInfo.voice, 0, rtpbuff, 0, recInfo.size);
                //    WinSound.RTPPacket rtp = new WinSound.RTPPacket(rtpbuff);

                //    util.WriteLogTest3(recInfo.isExtension.ToString() + " : >> RTPPacket Codec : " + rtp.PayloadType.ToString() + " // Structure Codec : " + recInfo.codec.ToString(), ingInstance.filename + "_codec");
                //    ingInstance.chkcount++;
                //}

                ingInstance.Add(recInfo);
            }
        }