예제 #1
0
        /*
         * Receive audio data coming on port 1550 and feed it to the speakers to be played.
         */
        private void UDP_Receive()
        {
            try {
                bStop = false;
                IPEndPoint remoteEP = new IPEndPoint(IPAddress.Any, 0);

                while (!bStop)
                {
                    //Receive data.
                    byte[] byteData = udpClient.Receive(ref remoteEP);

                    string c_ip = remoteEP.Address.ToString();


                    //G711 compresses the data by 50%, so we allocate a buffer of double
                    //the size to store the decompressed data.
                    byte[] byteDecodedData = new byte[byteData.Length * 2];

                    //Decompress data using the proper vocoder.
                    ALawDecoder.ALawDecode(byteData, out byteDecodedData);

                    //Play the data received to the user.
                    playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
                    playbackBuffer.Write(0, byteDecodedData, LockFlag.None);
                    playbackBuffer.Play(0, BufferPlayFlags.Default);
                }
            } catch (Exception ex) {
                //MessageBox.Show(ex.Message, "VoiceChat-Receive ()", MessageBoxButtons.OK, MessageBoxIcon.Error);
            } finally {
                nUdpClientFlag += 1;
            }
        }
예제 #2
0
        /*
         * Receive audio data coming on port 1550 and feed it to the speakers to be played.
         */
        private void Receive()
        {
            try
            {
                bStop = false;
                IPEndPoint remoteEP = new IPEndPoint(IPAddress.Any, 0);

                while (!bStop)
                {
                    //Receive data.
                    byte[] byteData = udpClient.Receive(ref remoteEP);

                    //G711 compresses the data by 50%, so we allocate a buffer of double
                    //the size to store the decompressed data.
                    byte[] byteDecodedData = new byte[byteData.Length * 2];

                    //Decompress data using the proper vocoder. And decrypt
                    var decryptedData = AES_Crypto.Decrypt(byteData, CallCurrentPass, CallCurrentSalt);
                    ALawDecoder.ALawDecode(decryptedData, out byteDecodedData);

                    //Play the data received to the user.
                    playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
                    playbackBuffer.Write(0, byteDecodedData, LockFlag.None);
                    playbackBuffer.Play(0, BufferPlayFlags.Default);
                }
            }
            catch (Exception e)
            {
                MessageBox.Show(e.Message, "VoiceChat-Receive ()", MessageBoxButton.OK, MessageBoxImage.Error, MessageBoxResult.OK);
            }
            finally
            {
                nUdpClientFlag += 1;
            }
        }
예제 #3
0
        private void decodeG711(string filename, Codecs codec)
        {
            WaveFileReader reader = new WaveFileReader(filename);

            byte[]  buffer  = new byte[reader.Length];
            short[] samples = new short[buffer.Length];
            reader.Read(buffer, 0, buffer.Length);
            for (int i = 0; i < buffer.Length; i++)
            {
                if (codec == Codecs.ALAW)
                {
                    samples[i] = ALawDecoder.ALawToLinearSample(buffer[i]);
                }
                else if (codec == Codecs.MULAW)
                {
                    samples[i] = MuLawDecoder.MuLawToLinearSample(buffer[i]);
                }
            }

            WaveFileWriter writer = new WaveFileWriter("tmp_" + (++this.tmpCount) + ".wav", new WaveFormat(44100, 16, 2));

            writer.WriteSamples(samples, 0, samples.Length);
            writer.Close();

            WaveFileReader            tmpReader = new WaveFileReader("tmp_" + this.tmpCount + ".wav");
            WaveStream                pcm       = new WaveChannel32(tmpReader);
            BlockAlignReductionStream stream    = new BlockAlignReductionStream(pcm);
            AudioFile file = new WaveFile(tmpReader, stream, filename);

            this.files.Add(file);
            this.addFileToListView(file);
            this.initAudio(file);
        }
예제 #4
0
 private static float[] ALawDecompress(byte[] input, int length)
 {
     float[] array = VoiceChatFloatPool.Instance.Get();
     for (int i = 0; i < length; i++)
     {
         short num = ALawDecoder.ALawToLinearSample(input[i]);
         array[i] = (float)num / 32767f;
     }
     return(array);
 }
예제 #5
0
        public byte[] Decode(byte[] data, int offset, int length)
        {
            var decoded  = new byte[length * 2];
            var outIndex = 0;

            for (var i = 0; i < length; i++)
            {
                var decodedSample = ALawDecoder.ALawToLinearSample(data[i + offset]);
                decoded[outIndex++] = (byte)(decodedSample & 0xFF);
                decoded[outIndex++] = (byte)(decodedSample >> 8);
            }
            return(decoded);
        }
예제 #6
0
    public void PlayReceivedVoice(byte[] byteData)
    {
        try
        {
            byte[] byteDecodedData = new byte[byteData.Length * 2];

            ALawDecoder.ALawDecode(byteData, out byteDecodedData);
            _playbackBuffer = new SecondaryBuffer(_playbackBufferDescription, _device);
            _playbackBuffer.Write(0, byteDecodedData, LockFlag.None);
            _playbackBuffer.Play(0, BufferPlayFlags.Default);
        }
        catch  { }
    }
예제 #7
0
        public byte[] Decode(byte[] data, int offset, int length)
        {
            byte[] decoded  = new byte[length * 2];
            int    outIndex = 0;

            for (int n = 0; n < length; n++)
            {
                short decodedSample = ALawDecoder.ALawToLinearSample(data[n + offset]);
                decoded[outIndex++] = (byte)(decodedSample & 0xFF);
                decoded[outIndex++] = (byte)(decodedSample >> 8);
            }
            return(decoded);
        }
    public void PlayReceivedVoice(byte[] byteData)
    {
        try
        {
            //G711 compresses the data by 50%, so we allocate a buffer of double
            //the size to store the decompressed data.
            byte[] byteDecodedData = new byte[byteData.Length * 2];

            ALawDecoder.ALawDecode(byteData, out byteDecodedData);      // G.711 decoding
            playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
            playbackBuffer.Write(0, byteDecodedData, LockFlag.None);    // 0= Starting Point offset
            playbackBuffer.Play(0, BufferPlayFlags.Default);            // 0 = The Priority of Sound for hardware that mixing the voice resources
        }
        catch  { }
    }
예제 #9
0
        //Receive audio data coming on port 1550 and feed it to the speakers to be played.

        private void Receive()
        {
            try
            {
                bStop = false;
                IPEndPoint remoteEP = new IPEndPoint(IPAddress.Any, 0);

                while (!bStop)
                {
                    //Receive data.
                    byte[] byteData = udpClient.Receive(ref remoteEP);

                    //G711 compresses the data by 50%, so we allocate a buffer of double
                    //the size to store the decompressed data.
                    byte[] byteDecodedData = new byte[byteData.Length * 2];

                    //Decompress data using the proper vocoder.
                    if (vocoder == Vocoder.ALaw)
                    {
                        ALawDecoder.ALawDecode(byteData, out byteDecodedData);
                    }

                    else
                    {
                        byteDecodedData = new byte[byteData.Length];
                        byteDecodedData = byteData;
                    }


                    //Play the data received to the user.
                    playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
                    playbackBuffer.Write(0, byteDecodedData, LockFlag.None);
                    playbackBuffer.Play(0, BufferPlayFlags.Default);
                }
            }
            catch (Exception ex)
            {
            }
            finally
            {
                nUdpClientFlag += 1;
            }
        }
예제 #10
0
        // 서버로부터 음성정보를 전달받는다.
        private void ReceiveVoiceInfo(byte[] byteData)
        {
            try
            {
                //Initialize();
                bStop = false;

                //G711 compresses the data by 50%, so we allocate a buffer of double
                //the size to store the decompressed data.
                byte[] byteDecodedData = new byte[byteData.Length * 2];

                //Decompress data using the proper vocoder.
                if (vocoder == Vocoder.ALaw)
                {
                    ALawDecoder.ALawDecode(byteData, out byteDecodedData);
                }
                else if (vocoder == Vocoder.uLaw)
                {
                    MuLawDecoder.MuLawDecode(byteData, out byteDecodedData);
                }
                else
                {
                    byteDecodedData = new byte[byteData.Length];
                    byteDecodedData = byteData;
                }


                //Play the data received to the user.
                playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
                playbackBuffer.Write(0, byteDecodedData, LockFlag.None);
                playbackBuffer.Play(0, BufferPlayFlags.Default);
            }
            catch (Exception ex)
            {
            }
            finally
            {
                nUdpClientFlag += 1;
            }
        }
예제 #11
0
 /// <summary>
 /// The purpose of this method is to decode the RTP payload into raw audio.
 /// </summary>
 private void DecodePayload()
 {
     // u-law decoding
     if (this.payloadType == 0)
     {
         decodedPayload = MuLawDecoder.MuLawDecode(payload);
     }
     // a-law decoding
     else if (this.payloadType == 1)
     {
         decodedPayload = ALawDecoder.ALawDecode(payload);
     }
     else if (this.payloadType == 101)
     {
         decodedPayload = null;
     }
     else
     {
         throw new InvalidRTPPacketException();
         // unknown decoding --- more may need to be added in the future.
     }
 }
예제 #12
0
        private void AudioIn(Socket mySocket)
        {
            var            wf = new WaveFormat(8000, 16, 1);
            DirectSoundOut dso;

            if (String.IsNullOrEmpty(Parent.AudioOutDevice))
            {
                dso = new DirectSoundOut(100);
            }
            else
            {
                dso = new DirectSoundOut(Guid.Parse(Parent.AudioOutDevice));
            }
            var bwp = new BufferedWaveProvider(wf);

            dso.Init(bwp);
            dso.Play();
            var bBuffer = new byte[3200];

            try
            {
                while (mySocket.Connected)
                {
                    int    i = mySocket.Receive(bBuffer, 0, 3200, SocketFlags.None);
                    byte[] dec;
                    ALawDecoder.ALawDecode(bBuffer, i, out dec);
                    bwp.AddSamples(dec, 0, dec.Length);
                    Thread.Sleep(100);
                }
            }
            catch (Exception ex)
            {
                mySocket.Close();
                mySocket = null;
            }
            dso.Stop();
            dso.Dispose();
        }
예제 #13
0
        /*
         * Receive audio data coming on port 1550 and feed it to the speakers to be played.
         */
        private void Receive()
        {
            try
            {
                IsThreadReceiveEnd = false;

                byte[] byteData;
                bStop = false;
                IPEndPoint remoteEP = new IPEndPoint(IPAddress.Any, 0);

                if (eMode == Mode.Server)
                {
                    LogAppend("Server Started");
                    LogUsersConnected();
                }
                else
                {
                    LogAppend("Client Audio Connected");
                }

                while (!bStop)
                {
                    //Receive data.
                    try
                    {
                        //bytes_received = udp_socket.ReceiveFrom(data, ref ep);

                        try
                        {
                            byteData = udpClient.Receive(ref remoteEP);
                        }
                        catch (Exception)
                        {
                            return;
                        }

                        //G711 compresses the data by 50%, so we allocate a buffer of double
                        //the size to store the decompressed data.
                        byte[] byteDecodedData = new byte[byteData.Length * 2];

                        if (vocoder == Vocoder.ALaw)
                        {
                            ALawDecoder.ALawDecode(byteData, out byteDecodedData);  //Vocoder.ALaw
                        }
                        else if (vocoder == Vocoder.uLaw)
                        {
                            MuLawDecoder.MuLawDecode(byteData, out byteDecodedData);  //Vocoder.uLaw
                        }
                        else
                        {
                            byteDecodedData = new byte[byteData.Length];
                            byteDecodedData = byteData;
                        }


                        if (eMode == Mode.Server)
                        {
                            lock (otherPartyIPs)
                            {
                                for (int i = 0; i < otherPartyIPs.Count; i++)
                                {
                                    udpClient.Send(byteDecodedData, byteDecodedData.Length, otherPartyIPs[i].Address.ToString(), 1550);
                                }
                            }
                        }

                        //Play the data received to the user.
                        playbackBuffer = new SecondaryBuffer(playbackBufferDescription, device);
                        playbackBuffer.Write(0, byteDecodedData, LockFlag.None);
                        playbackBuffer.Play(0, BufferPlayFlags.Default);
                    }
                    catch (Exception)
                    {
                    }
                }

                if (eMode == Mode.Server)
                {
                    LogAppend("Server Stopped");
                    LogUsersConnected();
                }
                else
                {
                    LogAppend("Client Audio Disconnected");
                }
            }
            catch (Exception ex)
            {
                LogAppend("Voice Receive > " + ex.Message);
                //MessageBox.Show(ex.Message, "VoiceChat-Receive ()", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            finally
            {
                nUdpClientFlag += 1;
            }

            IsThreadReceiveEnd = true;
        }
예제 #14
0
        private void SpyServerListener()
        {
            HttpWebRequest request  = null;
            WebResponse    response = null;
            Stream         stream   = null;

            var data = new byte[3200];

            try
            {
                request                  = (HttpWebRequest)WebRequest.Create(_source);
                request.Timeout          = 10000;
                request.ReadWriteTimeout = 5000;
                response                 = request.GetResponse();
                stream = response.GetResponseStream();

                if (stream != null)
                {
                    stream.ReadTimeout = 5000;
                    while (!_stopEvent.WaitOne(0, false))
                    {
                        if (DataAvailable != null)
                        {
                            int recbytesize = stream.Read(data, 0, 3200);
                            if (recbytesize == 0)
                            {
                                throw new Exception("lost stream");
                            }

                            byte[] dec;
                            ALawDecoder.ALawDecode(data, recbytesize, out dec);

                            if (_sampleChannel != null)
                            {
                                _waveProvider.AddSamples(dec, 0, dec.Length);

                                var sampleBuffer = new float[dec.Length];
                                _sampleChannel.Read(sampleBuffer, 0, dec.Length);

                                if (Listening && WaveOutProvider != null)
                                {
                                    WaveOutProvider.AddSamples(dec, 0, dec.Length);
                                }
                                var da = new DataAvailableEventArgs((byte[])dec.Clone());
                                DataAvailable(this, da);
                            }
                        }
                        else
                        {
                            break;
                        }
                        // need to stop ?
                        if (_stopEvent.WaitOne(0, false))
                        {
                            break;
                        }
                    }
                }

                if (AudioFinished != null)
                {
                    AudioFinished(this, ReasonToFinishPlaying.StoppedByUser);
                }
            }
            catch (Exception e)
            {
                if (AudioSourceError != null)
                {
                    AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message));
                }
                MainForm.LogExceptionToFile(e);
            }
            if (stream != null)
            {
                try
                {
                    stream.Close();
                }
                catch
                {
                }
                stream = null;
            }
        }
예제 #15
0
        private void SpyServerListener()
        {
            var data = new byte[3200];

            try
            {
                var request = (HttpWebRequest)WebRequest.Create(_source);
                request.Timeout          = 10000;
                request.ReadWriteTimeout = 5000;
                var response = request.GetResponse();
                using (Stream stream = response.GetResponseStream())
                {
                    if (stream == null)
                    {
                        throw new Exception("Stream is null");
                    }

                    stream.ReadTimeout = 5000;
                    while (!_stopEvent.WaitOne(0, false))
                    {
                        int recbytesize = stream.Read(data, 0, 3200);
                        if (recbytesize == 0)
                        {
                            throw new Exception("lost stream");
                        }

                        byte[] dec;
                        ALawDecoder.ALawDecode(data, recbytesize, out dec);
                        var da = DataAvailable;
                        if (da != null)
                        {
                            if (_sampleChannel != null)
                            {
                                _waveProvider.AddSamples(dec, 0, dec.Length);

                                var sampleBuffer = new float[dec.Length];
                                int read         = _sampleChannel.Read(sampleBuffer, 0, dec.Length);

                                da(this, new DataAvailableEventArgs((byte[])dec.Clone(), read));

                                if (Listening)
                                {
                                    WaveOutProvider?.AddSamples(dec, 0, read);
                                }
                            }
                        }
                        else
                        {
                            break;
                        }
                        // need to stop ?
                        if (_stopEvent.WaitOne(0, false))
                        {
                            break;
                        }
                    }
                }

                AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser));
            }
            catch (Exception e)
            {
                var af = AudioFinished;
                af?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost));

                //Logger.LogExceptionToFile(e,"ispyServer");
            }

            if (_sampleChannel != null)
            {
                _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter;
                _sampleChannel = null;
            }

            if (_waveProvider?.BufferedBytes > 0)
            {
                _waveProvider.ClearBuffer();
            }

            if (WaveOutProvider?.BufferedBytes > 0)
            {
                WaveOutProvider?.ClearBuffer();
            }
        }
예제 #16
0
        /*
         * Receive audio data coming on port 1550 and feed it to the speakers to be played.
         */
        public void Receive()
        {
            //TODO: Receive Sound DATA

            try
            {
                byte[] byteData;
                bStop = false;
                IPEndPoint remoteEP = new IPEndPoint(IPAddress.Any, 0);
                cGlobalVars.AddLogChat("Client Audio Connected");

                while (!bStop)
                {
                    //Receive data.
                    try
                    {
                        //bytes_received = udp_socket.ReceiveFrom(data, ref ep);

                        try
                        {
                            byteData = udpClient.Receive(ref remoteEP);
                        }
                        catch (Exception)
                        {
                            return;
                        }

                        //G711 compresses the data by 50%, so we allocate a buffer of double
                        //the size to store the decompressed data.
                        byte[] byteDecodedData = new byte[byteData.Length * 2];

                        if (vocoder == VoiceCommon.Vocoder.ALaw)
                        {
                            ALawDecoder.ALawDecode(byteData, out byteDecodedData); //Vocoder.ALaw
                        }
                        else if (vocoder == VoiceCommon.Vocoder.uLaw)
                        {
                            MuLawDecoder.MuLawDecode(byteData, out byteDecodedData); //Vocoder.uLaw
                        }
                        else
                        {
                            byteDecodedData = new byte[byteData.Length];
                            byteDecodedData = byteData;
                        }

                        bwp_internet.AddSamples(byteData, 0, byteData.Length);
                    }
                    catch (Exception)
                    {
                    }
                }
                cGlobalVars.AddLogChat("Client Audio Disconnected");
            }
            catch (Exception ex)
            {
                cGlobalVars.AddLogChat("Voice Receive > " + ex.Message);
                //MessageBox.Show(ex.Message, "VoiceChat-Receive ()", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            finally
            {
                nUdpClientFlag += 1;
            }
        }
예제 #17
0
        /*
         * Receive audio data coming on port 1550 and feed it to the speakers to be played.
         */
        public void Receive()
        {
            //TODO: Receive Sound DATA

            try
            {
                byte[] byteData;
                bStop = false;
                IPEndPoint remoteEP = new IPEndPoint(IPAddress.Any, 0);

                if (eMode == Mode.Server)
                {
                    cGlobalVars.AddLogChat("Server Started");
                    LogUsersConnected();
                }
                else
                {
                    cGlobalVars.AddLogChat("Client Audio Connected");
                }

                while (!bStop)
                {
                    //Receive data.
                    try
                    {
                        //bytes_received = udp_socket.ReceiveFrom(data, ref ep);

                        try
                        {
                            byteData = udpClient.Receive(ref remoteEP);
                        }
                        catch (Exception)
                        {
                            return;
                        }

                        //G711 compresses the data by 50%, so we allocate a buffer of double
                        //the size to store the decompressed data.
                        byte[] byteDecodedData = new byte[byteData.Length * 2];

                        if (vocoder == Vocoder.ALaw)
                        {
                            ALawDecoder.ALawDecode(byteData, out byteDecodedData); //Vocoder.ALaw
                        }
                        else if (vocoder == Vocoder.uLaw)
                        {
                            MuLawDecoder.MuLawDecode(byteData, out byteDecodedData); //Vocoder.uLaw
                        }
                        else
                        {
                            byteDecodedData = new byte[byteData.Length];
                            byteDecodedData = byteData;
                        }

                        if (eMode == Mode.Server)
                        {
                            lock (clientIPs)
                            {
                                // udpClient.Send(byteData, byteData.Length, clientIPs[i].Address.ToString(), 1550);


                                //Parallel.For(0, clientIPs.Count, delegate(int i)
                                //     {
                                //
                                //     }
                                // );
                            }
                        }

                        //LogAppend("2Receibed Data!");

                        bwp_internet.AddSamples(byteData, 0, byteData.Length);
                    }
                    catch (Exception)
                    {
                    }
                }

                if (eMode == Mode.Server)
                {
                    cGlobalVars.AddLogChat("Server Stopped");
                    LogUsersConnected();
                }
                else
                {
                    cGlobalVars.AddLogChat("Client Audio Disconnected");
                }
            }
            catch (Exception ex)
            {
                cGlobalVars.AddLogChat("Voice Receive > " + ex.Message);
                //MessageBox.Show(ex.Message, "VoiceChat-Receive ()", MessageBoxButtons.OK, MessageBoxIcon.Error);
            }
            finally
            {
                nUdpClientFlag += 1;
            }
        }
        private void SpyServerListener()
        {
            while (true)
            {
                if (sSocket != null)
                {
                    try
                    {
                        Socket clientSocket = sSocket.Accept();
                        Logger.Info(string.Format("来自【{0}】新的指挥请示已接入!开启计时器!", clientSocket.RemoteEndPoint.ToString()));
                        //获得请求后,开启计时器,显示指挥时间。
                        speakTime.Elapsed += SpeakTime_Elapsed;
                        sw.Start();
                        speakTime.Start();
                        SetForm(false);
                        //计时器开启后,开始接收数据并播放。
                        while (true)
                        {
                            try
                            {
                                byte[] dataSize = RecerveVarData(clientSocket);
                                if (dataSize.Length <= 0)
                                {
                                    Logger.Info("无语音流,指挥结束!!!");
                                    speakTime.Stop();
                                    sw.Stop();
                                    sw.Reset();
                                    SetLB(string.Format("00:00:00"));
                                    SetForm(true);
                                    if (clientSocket != null)
                                    {
                                        //接收不到语音流,关闭套接字
                                        clientSocket.Shutdown(SocketShutdown.Both);
                                        clientSocket.Close();
                                        clientSocket.Dispose();
                                        clientSocket = null;
                                    }
                                    break;
                                }
                                else
                                {
                                    byte[] dec;
                                    ALawDecoder.ALawDecode(dataSize, dataSize.Length, out dec);
                                    var da = DataAvailable;
                                    if (da != null)
                                    {
                                        //Logger.Info("接受一段语音流,进入播放!!!");
                                        if (_sampleChannel != null)
                                        {
                                            _waveProvider.AddSamples(dec, 0, dec.Length);

                                            var sampleBuffer = new float[dec.Length];
                                            int read         = _sampleChannel.Read(sampleBuffer, 0, dec.Length);

                                            da(this, new DataAvailableEventArgs((byte[])dec.Clone(), read));

                                            if (Listening)
                                            {
                                                WaveOutProvider?.AddSamples(dec, 0, read);
                                            }
                                        }
                                    }
                                }
                            }
                            catch (SocketException se)
                            {
                                //sSocket.Shutdown(SocketShutdown.Both);
                                Logger.Error("通信出现异常,退出Socket. " + se.Message);
                                sSocket.Dispose();
                                sSocket = null;
                            }
                        }
                    }
                    catch (Exception e)
                    {
                        if (sSocket != null)
                        {
                            Logger.Error("通信出现异常,关闭Socket. " + e.Message);
                            //接收不到语音流,关闭套接字
                            sSocket.Close();
                            sSocket.Dispose();
                            sSocket = null;
                        }
                    }
                }
                else
                {
                    if (speakTime != null)
                    {
                        Logger.Error("指挥端通信结束,计时器停止。");
                        speakTime.Stop();
                    }
                    if (sw != null)
                    {
                        sw.Stop();
                    }

                    SetLB(string.Format("00:00:00"));
                    SetForm(true);

                    Logger.Info("ServerStream ReStart!!!");
                    Start();
                }
            }
        }
예제 #19
0
        private void SpyServerListener()
        {
            var data = new byte[3200];

            try
            {
                var request = (HttpWebRequest)WebRequest.Create(_source);
                request.Timeout          = 10000;
                request.ReadWriteTimeout = 5000;
                var response = request.GetResponse();
                using (Stream stream = response.GetResponseStream())
                {
                    if (stream == null)
                    {
                        throw new Exception("Stream is null");
                    }
                    stream.ReadTimeout = 5000;
                    while (!_stopEvent.WaitOne(0, false))
                    {
                        int recbytesize = stream.Read(data, 0, 3200);
                        if (recbytesize == 0)
                        {
                            throw new Exception("lost stream");
                        }
                        var recive = Encoding.UTF8.GetString(data, 0, recbytesize);
                        if (recive.Equals("setCurrentVolumeMute"))
                        {
                            setVolumeMute();
                        }
                        else if (recive.StartsWith("setCurrentVolume"))
                        {
                            if (recive.IndexOf("#") > -1)
                            {
                                var    cmd            = recive.Split(new[] { '#' });
                                Thread setValueThread = new Thread(new ParameterizedThreadStart(this.setVolumeFromServer));
                                setValueThread.IsBackground = true;
                                setValueThread.Start(cmd[1]);


                                Thread setValueThread1 = new Thread(new ParameterizedThreadStart(this.setVolumeWINMM));
                                setValueThread1.IsBackground = true;
                                setValueThread1.Start(cmd[1]);
                            }
                        }
                        else
                        {
                            byte[] dec;
                            ALawDecoder.ALawDecode(data, recbytesize, out dec);
                            var da = DataAvailable;
                            if (da != null)
                            {
                                if (_sampleChannel != null)
                                {
                                    _waveProvider.AddSamples(dec, 0, dec.Length);

                                    var sampleBuffer = new float[dec.Length];
                                    int read         = _sampleChannel.Read(sampleBuffer, 0, dec.Length);

                                    da(this, new DataAvailableEventArgs((byte[])dec.Clone(), read));


                                    if (Listening)
                                    {
                                        WaveOutProvider?.AddSamples(dec, 0, read);
                                    }
                                }
                            }
                            else
                            {
                                break;
                            }
                            // need to stop ?
                            if (_stopEvent.WaitOne(0, false))
                            {
                                break;
                            }
                        }
                    }
                }

                AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser));
            }
            catch (Exception e)
            {
                var af = AudioFinished;
                af?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost));

                //Logger.LogExceptionToFile(e,"ispyServer");
            }

            if (_sampleChannel != null)
            {
                _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter;
                _sampleChannel = null;
            }

            if (_waveProvider?.BufferedBytes > 0)
            {
                _waveProvider.ClearBuffer();
            }

            if (WaveOutProvider?.BufferedBytes > 0)
            {
                WaveOutProvider?.ClearBuffer();
            }
        }
예제 #20
0
        private void AudioThread()
        {
            try
            {
                byte[] byteData;

                while (!button3.Enabled)
                {
                    // Recibimos la info
                    try
                    {
                        try
                        {
                            byteData = audioclient.Receive(ref audioremote);

                            byte[] audioBytes = getPayloadRTP(byteData);
                            num_audio++;

                            // Guardamos el numero de sequencia
                            sequence_audio.Add((int)getSequenceRTP(byteData));

                            // Obtenemos los tiempos
                            long audio_time      = (long)((DateTime.Now - new DateTime(1970, 1, 1)).TotalMilliseconds) / 1000;
                            int  audio_timestamp = (int)getTimestampRTP(byteData);

                            // Calculamos Delay + Jitter
                            if (audio_prev_timestamp == 0)
                            {
                                audio_jitter = 0;
                            }
                            else
                            {
                                audio_delay  = (audio_time - audio_prev_time) - (audio_timestamp * 0.000125 - audio_prev_timestamp * 0.000125);
                                audio_jitter = audio_jitter + (Math.Abs(audio_delay) - audio_jitter) / 16;
                            }

                            // Guardamos la información de timestamp + time
                            audio_prev_timestamp = audio_timestamp;
                            audio_prev_time      = audio_time;

                            // Actualizamos las labels
                            label5.Text = String.Format("Delay: {0:0.00} ms", audio_delay);
                            label6.Text = String.Format("Jitter: {0:0.00} ms", audio_jitter);

                            // Marcamos como que recibimos RTP Audio
                            checkBox1.Checked = true;

                            // Teoria
                            // G711 comprime la info al 50%, necesitamos hacer el buffer más grande
                            byte[] byteDecodedData = new byte[audioBytes.Length * 2];

                            //Usando ALaw
                            ALawDecoder.ALawDecode(audioBytes, out byteDecodedData);
                            //Sin comprension
                            //byteDecodedData = new byte[audioBytes.Length];
                            //byteDecodedData = audioBytes;

                            // Reproducimos la información recibida.
                            checkBox3.Checked = true;
                            bufferplayback    = new SecondaryBuffer(bufferDesc, device);
                            bufferplayback.Write(0, byteDecodedData, LockFlag.None);
                            bufferplayback.Play(0, BufferPlayFlags.Default);
                        }
                        catch (Exception)
                        {
                            return;
                        }
                    }
                    catch (Exception)
                    {
                    }
                }
            } catch (Exception)
            {
                MessageBox.Show("Error on audiothread.");
            }
        }