Beispiel #1
0
        private void AudioSourceDataAvailable(object sender, DataAvailableEventArgs e)
        {
            try
            {
                lock (_obj)
                {
                    if (_bTalking && _avstream != null)
                    {
                        byte[] bSrc     = e.RawData;
                        int    totBytes = bSrc.Length;

                        if (!_audioSource.RecordingFormat.Equals(_waveFormat))
                        {
                            using (var ws = new TalkHelperStream(bSrc, totBytes, _audioSource.RecordingFormat))
                            {
                                int j    = -1;
                                var bDst = new byte[44100];
                                totBytes = 0;
                                using (var helpStm = new WaveFormatConversionStream(_waveFormat, ws))
                                {
                                    while (j != 0)
                                    {
                                        j         = helpStm.Read(bDst, totBytes, 10000);
                                        totBytes += j;
                                    }
                                    helpStm.Close();
                                }
                                ws.Close();
                                bSrc = bDst;
                            }
                        }
                        var enc = new byte[totBytes / 2];
                        ALawEncoder.ALawEncode(bSrc, totBytes, enc);

                        try {
                            _avstream.Write(enc, 0, enc.Length);
                            _avstream.Flush();
                        }
                        catch (SocketException)
                        {
                            StopTalk();
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                MainForm.LogExceptionToFile(ex);
                StopTalk();
            }
        }
Beispiel #2
0
        private void AudioSourceDataAvailable(object sender, DataAvailableEventArgs e)
        {
            try
            {
                lock (_obj)
                {
                    if (_bTalking && _avstream != null)
                    {
                        byte[] bSrc     = e.RawData;
                        int    totBytes = bSrc.Length;

                        if (!_audioSource.RecordingFormat.Equals(_waveFormat))
                        {
                            var ws      = new TalkHelperStream(bSrc, totBytes, _audioSource.RecordingFormat);
                            var helpStm = new WaveFormatConversionStream(_waveFormat, ws);
                            totBytes = helpStm.Read(bSrc, 0, 25000);
                            ws.Close();
                            ws.Dispose();
                            helpStm.Close();
                            helpStm.Dispose();
                        }
                        var enc = new byte[totBytes / 2];
                        ALawEncoder.ALawEncode(bSrc, totBytes, enc);

                        try {
                            _avstream.Write(enc, 0, enc.Length);
                            _avstream.Flush();
                        }
                        catch (SocketException)
                        {
                            StopTalk();
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Log.Error("", ex);//MainForm.LogExceptionToFile(ex);
                StopTalk();
            }
        }
Beispiel #3
0
        private void AudioSourceDataAvailable(object sender, DataAvailableEventArgs e)
        {
            try
            {
                lock (_obj)
                {
                    if (_bTalking && _avstream != null)
                    {
                        byte[] bSrc     = e.RawData;
                        int    totBytes = bSrc.Length;

                        if (!_audioSource.RecordingFormat.Equals(_waveFormat))
                        {
                            var ws      = new TalkHelperStream(bSrc, totBytes, _audioSource.RecordingFormat);
                            var helpStm = new WaveFormatConversionStream(_waveFormat, ws);
                            totBytes = helpStm.Read(bSrc, 0, 25000);

                            ws.Close();
                            ws.Dispose();
                            helpStm.Close();
                            helpStm.Dispose();
                        }

                        if (_needsencodeinit)
                        {
                            _enc.EncodeInit(BitConverter.ToInt16(e.RawData, 0), BitConverter.ToInt16(e.RawData, 2));
                            _needsencodeinit = false;
                        }

                        var buff = new byte[25000];
                        int c;
                        unsafe
                        {
                            fixed(byte *src = bSrc)
                            {
                                fixed(byte *dst = buff)
                                {
                                    c = (int)_enc.EncodeFoscam(src, totBytes, dst);
                                }
                            }
                        }
                        Buffer.BlockCopy(buff, 0, _talkBuffer, _talkDatalen, c);
                        _talkDatalen += c;

                        var dtms = (int)(DateTime.Now - _dt).TotalMilliseconds;
                        int i    = 0;
                        int j    = 0;
                        try
                        {
                            while (j + 160 < _talkDatalen)
                            {
                                //need to write out in 160 byte packets for 40ms
                                byte[] cmd = SInit(TalkData, MoIPAvFlag);

                                cmd = AddNext(cmd, dtms + (i * 40));
                                cmd = AddNext(cmd, _seq);
                                cmd = AddNext(cmd, (int)(DateTime.Now - _dt).TotalSeconds);
                                cmd = AddNext(cmd, (byte)0x0);
                                cmd = AddNext(cmd, 160);

                                var pkt = new byte[160];
                                Buffer.BlockCopy(_talkBuffer, j, pkt, 0, 160);
                                cmd = AddNext(cmd, pkt, 160);
                                Encode(ref cmd);

                                _avstream.Write(cmd, 0, cmd.Length);
                                j += 160;
                                _seq++;
                                i++;
                            }
                            if (j < _talkDatalen)
                            {
                                Buffer.BlockCopy(_talkBuffer, j, _talkBuffer, 0, _talkDatalen - j);
                                _talkDatalen = _talkDatalen - j;
                            }
                        }
                        catch (SocketException)
                        {
                            StopTalk(true);
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                Log.Error("", ex);//MainForm.LogExceptionToFile(ex);
                StopTalk(true);
            }
        }
Beispiel #4
0
        private void AudioSourceDataAvailable(object sender, DataAvailableEventArgs e)
        {
            try
            {
                lock (_obj)
                {
                    if (_bTalking && _avstream != null)
                    {
                        byte[] bSrc     = e.RawData;
                        int    totBytes = bSrc.Length;

                        if (!_audioSource.RecordingFormat.Equals(_waveFormat))
                        {
                            using (var helper = new TalkHelperStream(bSrc, totBytes, _audioSource.RecordingFormat))
                            {
                                using (var helpStm = new WaveFormatConversionStream(_waveFormat, helper))
                                {
                                    totBytes = helpStm.Read(bSrc, 0, 25000);
                                }
                            }
                        }
                        var enc = _muLawCodec.Encode(bSrc, 0, totBytes);
                        ALawEncoder.ALawEncode(bSrc, totBytes, enc);


                        Buffer.BlockCopy(enc, 0, _talkBuffer, _talkDatalen, enc.Length);
                        _talkDatalen += enc.Length;


                        int j = 0;
                        try
                        {
                            while (j + 240 < _talkDatalen)
                            {
                                //need to write out in 240 byte packets
                                var pkt = new byte[240];
                                Buffer.BlockCopy(_talkBuffer, j, pkt, 0, 240);

                                // _avstream.Write(_hdr, 0, _hdr.Length);
                                _avstream.Write(pkt, 0, 240);
                                j += 240;
                            }
                            if (j < _talkDatalen)
                            {
                                Buffer.BlockCopy(_talkBuffer, j, _talkBuffer, 0, _talkDatalen - j);
                                _talkDatalen = _talkDatalen - j;
                            }
                        }
                        catch (SocketException)
                        {
                            StopTalk();
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                MainForm.LogExceptionToFile(ex);
                StopTalk();
            }
        }