/// <summary> /// Function to run the listener on a different thread /// </summary> /// <param name="endPoint">The endpoint the listener is connected to [TYPE:IPEndPoint]</param> private void ListenerThread(object endPoint) { var endPointTemp = (IPEndPoint)endPoint; try { while (true) { byte[] buffer = udpListener.Receive(ref endPointTemp); var PlaceInBuffer = waveProvider.BufferLength - waveProvider.BufferedBytes; var BytesToAdd = PlaceInBuffer > buffer.Length ? buffer.Length : PlaceInBuffer; if (waveProvider.BufferedBytes < waveProvider.BufferLength) { waveProvider?.AddSamples(buffer, 0, BytesToAdd); } } } catch (SocketException) { // usually not a problem - just means we have disconnected } }
public void GotAudioRtp(IPEndPoint remoteEndPoint, uint ssrc, uint seqnum, uint timestamp, int payloadID, bool marker, byte[] payload) { if (_waveProvider != null && _audioEncoder != null && _audioEncoder.IsSupported(_selectedSinkFormat)) { var pcmSample = _audioEncoder.DecodeAudio(payload, _selectedSinkFormat, AudioPlaybackRate); _waveProvider?.AddSamples(pcmSample, 0, pcmSample.Length); } }
public void GotAudioRtp(IPEndPoint remoteEndPoint, uint ssrc, uint seqnum, uint timestamp, int payloadID, bool marker, byte[] payload) { if (_waveProvider != null && _audioEncoder != null) { var pcmSample = _audioEncoder.DecodeAudio(payload, _audioFormatManager.SelectedFormat); byte[] pcmBytes = pcmSample.SelectMany(x => BitConverter.GetBytes(x)).ToArray(); _waveProvider?.AddSamples(pcmBytes, 0, pcmBytes.Length); } }
private void EventManager() { Bitmap frame; while (!_stopEvent.WaitOne(5, false) && !MainForm.ShuttingDown) { try { if (_videoQueue.TryDequeue(out frame)) { if (frame != null) { NewFrame?.Invoke(this, new NewFrameEventArgs(frame)); frame.Dispose(); } } byte[] audio; if (!_audioQueue.TryDequeue(out audio)) { continue; } var da = DataAvailable; da?.Invoke(this, new DataAvailableEventArgs(audio)); var sampleBuffer = new float[audio.Length]; int read = SampleChannel.Read(sampleBuffer, 0, audio.Length); _waveProvider?.AddSamples(audio, 0, read); if (WaveOutProvider != null && Listening) { WaveOutProvider?.AddSamples(audio, 0, read); } } catch (Exception ex) { Logger.LogExceptionToFile(ex, "FFMPEG"); } } try { while (_videoQueue != null && _videoQueue.TryDequeue(out frame)) { frame?.Dispose(); } } catch (Exception ex) { Logger.LogExceptionToFile(ex, "FFMPEG"); } }
private void SoundCallback(Sound soundData) { var da = DataAvailable; if (da == null || _needsSetup) { return; } try { var data = new byte[soundData.SamplesSize]; Marshal.Copy(soundData.SamplesData, data, 0, (int)soundData.SamplesSize); if (_realChannels > 2) { //resample audio to 2 channels data = ToStereo(data, _realChannels); } _waveProvider?.AddSamples(data, 0, data.Length); //forces processing of volume level without piping it out var sampleBuffer = new float[data.Length]; int read = _sampleChannel.Read(sampleBuffer, 0, data.Length); da(this, new DataAvailableEventArgs((byte[])data.Clone(), read)); if (Listening) { WaveOutProvider?.AddSamples(data, 0, read); } } catch { //can fail at shutdown } }
void wi_DataAvailable(object sender, WaveInEventArgs e) { bwp.AddSamples(e.Buffer, 0, e.BytesRecorded); }
/// <summary> /// Process the audio. /// </summary> /// <param name="output">If the audio should be output.</param> /// <param name="recording">If the audio is being recorded.</param> public void Process(bool output, bool recording) { float masterStep; float masterLevel; if (_isFading && _fadeMicroFramesLeft == 0) { masterStep = 0; masterLevel = 0; } else { float fromMaster = Volume; float toMaster = Volume; if (_fadeMicroFramesLeft > 0) { const float scale = 10f / 6f; fromMaster *= (_fadePos < 0f) ? 0f : (float)Math.Pow(_fadePos, scale); _fadePos += _fadeStepPerMicroframe; toMaster *= (_fadePos < 0f) ? 0f : (float)Math.Pow(_fadePos, scale); _fadeMicroFramesLeft--; } masterStep = (toMaster - fromMaster) * _samplesReciprocal; masterLevel = fromMaster; } byte[] b = new byte[4]; for (int i = 0; i < _samplesPerBuffer; i++) { int left = 0, right = 0; for (int j = 0; j < 0x10; j++) { Channel chan = Channels[j]; if (chan.Owner != null) { bool muted = Mutes[chan.Owner.Index]; // Get mute first because chan.Process() can call chan.Stop() which sets chan.Owner to null chan.Process(out short channelLeft, out short channelRight); if (!muted) { left += channelLeft; right += channelRight; } } } float f = left * masterLevel; if (f < short.MinValue) { f = short.MinValue; } else if (f > short.MaxValue) { f = short.MaxValue; } left = (int)f; b[0] = (byte)left; b[1] = (byte)(left >> 8); f = right * masterLevel; if (f < short.MinValue) { f = short.MinValue; } else if (f > short.MaxValue) { f = short.MaxValue; } right = (int)f; b[2] = (byte)right; b[3] = (byte)(right >> 8); masterLevel += masterStep; if (output) { _buffer.AddSamples(b, 0, 4); } if (recording) { _waveWriter.Write(b, 0, 4); } } }
/// <summary> /// Plays back an audio frame. /// </summary> /// <param name="buffer">The frame.</param> public override void Render(AudioBuffer buffer) { WaveProvider.AddSamples(buffer.Data, buffer.Index, buffer.Length); }
private void StreamMP3() { HttpWebRequest request = null; try { var resp = _connFactory.GetResponse(_source, "GET", "", out request); var buffer = new byte[16384 * 4]; // needs to be big enough to hold a decompressed frame IMp3FrameDecompressor decompressor = null; using (var responseStream = resp.GetResponseStream()) { var readFullyStream = new ReadFullyStream(responseStream); while (!_abort.WaitOne(20) && !MainForm.ShuttingDown) { if (_bufferedWaveProvider != null && _bufferedWaveProvider.BufferLength - _bufferedWaveProvider.BufferedBytes < _bufferedWaveProvider.WaveFormat.AverageBytesPerSecond / 4) { //Debug.WriteLine("Buffer getting full, taking a break"); Thread.Sleep(100); } else { var da = DataAvailable; if (da != null) { Mp3Frame frame; try { frame = Mp3Frame.LoadFromStream(readFullyStream); } catch (EndOfStreamException) { // reached the end of the MP3 file / stream break; } catch (WebException) { // probably we have aborted download from the GUI thread break; } if (decompressor == null || _bufferedWaveProvider == null) { // don't think these details matter too much - just help ACM select the right codec // however, the buffered provider doesn't know what sample rate it is working at // until we have a frame WaveFormat waveFormat = new Mp3WaveFormat(frame.SampleRate, frame.ChannelMode == ChannelMode.Mono ? 1 : 2, frame.FrameLength, frame.BitRate); RecordingFormat = new WaveFormat(frame.SampleRate, 16, frame.ChannelMode == ChannelMode.Mono ? 1 : 2); decompressor = new AcmMp3FrameDecompressor(waveFormat); _bufferedWaveProvider = new BufferedWaveProvider(decompressor.OutputFormat) { BufferDuration = TimeSpan.FromSeconds(5) }; _sampleChannel = new SampleChannel(_bufferedWaveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; } int decompressed = decompressor.DecompressFrame(frame, buffer, 0); _bufferedWaveProvider.AddSamples(buffer, 0, decompressed); var sampleBuffer = new float[buffer.Length]; int read = _sampleChannel.Read(sampleBuffer, 0, buffer.Length); da(this, new DataAvailableEventArgs((byte[])buffer.Clone(), read)); if (Listening) { WaveOutProvider?.AddSamples(buffer, 0, read); } } } } // was doing this in a finally block, but for some reason // we are hanging on response stream .Dispose so never get there if (decompressor != null) { decompressor.Dispose(); decompressor = null; } } } catch (Exception ex) { _res = ReasonToFinishPlaying.DeviceLost; Logger.LogException(ex, "MP3Stream"); } try { request?.Abort(); } catch { } request = null; AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); }
/// <summary> /// Добавить фрагмент в список воспроизведения /// </summary> /// <param name="fragment">Информация, для воспроизведения</param> public void SetPlayFragment(IDataFragment fragment) { var decodedData = fragment.GetDecodedData(); _provider.AddSamples(decodedData, 0, decodedData.Length); }
private void StreamMP3(object state) { fullyDownloaded = false; string url = (string)state; webRequest = (HttpWebRequest)WebRequest.Create(url); //why HttpWebRequest instead of WebRequest HttpWebResponse response; try { response = (HttpWebResponse)webRequest.GetResponse(); } catch (WebException e) { if (e.Status != WebExceptionStatus.RequestCanceled) { throw; } return; } byte[] buffer = new byte[16384 * 4]; //how to calc this number? IMp3FrameDecompressor decompressor = null; try { using (Stream responseStream = response.GetResponseStream()) { var readFullyStream = new ReadFullyStream(responseStream); do { if (bufferedWaveProvider != null && bufferedWaveProvider.BufferLength - bufferedWaveProvider.BufferedBytes < bufferedWaveProvider.WaveFormat.AverageBytesPerSecond / 4) { //buffer getting full Thread.Sleep(500); } else { Mp3Frame frame; try { frame = Mp3Frame.LoadFromStream(readFullyStream); } catch (EndOfStreamException) { fullyDownloaded = true; //file or stream reach end break; } catch (WebException) { break; } if (decompressor == null) { WaveFormat format = new Mp3WaveFormat(frame.SampleRate, frame.ChannelMode == ChannelMode.Mono ? 1 : 2, frame.FrameLength, frame.BitRate); decompressor = new AcmMp3FrameDecompressor(format); bufferedWaveProvider = new BufferedWaveProvider(decompressor.OutputFormat) { BufferDuration = TimeSpan.FromSeconds(20) }; } int decompressed = decompressor.DecompressFrame(frame, buffer, 0); bufferedWaveProvider.AddSamples(buffer, 0, decompressed); } } while (playbackState != StreamingPlaybackState.Stopped); decompressor.Dispose(); } } finally { if (decompressor != null) { decompressor.Dispose(); } } }
private void OnRecorderDataAvailable(object sender, WaveInEventArgs e) { _bufferedWaveProvider?.AddSamples(e.Buffer, 0, e.BytesRecorded); }
private void StreamWav() { var res = ReasonToFinishPlaying.StoppedByUser; HttpWebRequest request = null; try { using (HttpWebResponse resp = ConnectionFactory.GetResponse(_source, false, out request)) { //1/4 of a second, 16 byte buffer var data = new byte[((RecordingFormat.SampleRate / 4) * 2) * RecordingFormat.Channels]; using (var stream = resp.GetResponseStream()) { if (stream == null) { throw new Exception("Stream is null"); } while (!_stopEvent.WaitOne(10, false) && !MainForm.ShuttingDown) { var da = DataAvailable; if (da != null) { int recbytesize = stream.Read(data, 0, data.Length); if (recbytesize == 0) { throw new Exception("lost stream"); } if (_sampleChannel == null) { continue; } _waveProvider.AddSamples(data, 0, recbytesize); var sampleBuffer = new float[recbytesize]; _sampleChannel.Read(sampleBuffer, 0, recbytesize); if (Listening) { WaveOutProvider?.AddSamples(data, 0, recbytesize); } var dae = new DataAvailableEventArgs((byte[])data.Clone(), recbytesize); da(this, dae); } else { break; } } } } } catch (Exception ex) { res = ReasonToFinishPlaying.DeviceLost; MainForm.LogExceptionToFile(ex, "WavStream"); } finally { AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(res)); } }
/// <summary> /// Handling packets received on the RTP socket. One of the simplest, if not the simplest, cases, is /// PCMU audio packets. The handling can get substantially more complicated if the RTP socket is being /// used to multiplex different protocols. This is what WebRTC does with STUN, RTP and RTCP. /// </summary> /// <param name="rtpSocket">The raw RTP socket.</param> /// <param name="rtpSendSession">The session infor for the RTP pakcets being sent.</param> private static async void RecvRtp(Socket rtpSocket, RTPSession rtpRecvSession, CancellationTokenSource cts) { try { DateTime lastRecvReportAt = DateTime.Now; uint packetReceivedCount = 0; uint bytesReceivedCount = 0; byte[] buffer = new byte[512]; IPEndPoint anyEndPoint = new IPEndPoint((rtpSocket.AddressFamily == AddressFamily.InterNetworkV6) ? IPAddress.IPv6Any : IPAddress.Any, 0); Log.LogDebug($"Listening on RTP socket {rtpSocket.LocalEndPoint}."); using (var waveOutEvent = new WaveOutEvent()) { var waveProvider = new BufferedWaveProvider(new WaveFormat(8000, 16, 1)); waveProvider.DiscardOnBufferOverflow = true; waveOutEvent.Init(waveProvider); waveOutEvent.Play(); var recvResult = await rtpSocket.ReceiveFromAsync(buffer, SocketFlags.None, anyEndPoint); Log.LogDebug($"Initial RTP packet recieved from {recvResult.RemoteEndPoint}."); if (_remoteRtpEndPoint == null || !recvResult.RemoteEndPoint.Equals(_remoteRtpEndPoint)) { _remoteRtpEndPoint = recvResult.RemoteEndPoint as IPEndPoint; Log.LogDebug($"Adjusting remote RTP end point for sends adjusted to {_remoteRtpEndPoint}."); } while (recvResult.ReceivedBytes > 0 && !cts.IsCancellationRequested) { var rtpPacket = new RTPPacket(buffer.Take(recvResult.ReceivedBytes).ToArray()); packetReceivedCount++; bytesReceivedCount += (uint)rtpPacket.Payload.Length; for (int index = 0; index < rtpPacket.Payload.Length; index++) { short pcm = NAudio.Codecs.MuLawDecoder.MuLawToLinearSample(rtpPacket.Payload[index]); byte[] pcmSample = new byte[] { (byte)(pcm & 0xFF), (byte)(pcm >> 8) }; waveProvider.AddSamples(pcmSample, 0, 2); } recvResult = await rtpSocket.ReceiveFromAsync(buffer, SocketFlags.None, anyEndPoint); if (DateTime.Now.Subtract(lastRecvReportAt).TotalSeconds > RTP_REPORTING_PERIOD_SECONDS) { // This is typically where RTCP receiver (SR) reports would be sent. Omitted here for brevity. lastRecvReportAt = DateTime.Now; var remoteRtpEndPoint = recvResult.RemoteEndPoint as IPEndPoint; Log.LogDebug($"RTP recv report {rtpSocket.LocalEndPoint}<-{remoteRtpEndPoint} pkts {packetReceivedCount} bytes {bytesReceivedCount}"); } } } } catch (SocketException sockExcp) { Log.LogWarning($"RecvRTP socket error {sockExcp.SocketErrorCode}"); } catch (ObjectDisposedException) { } // This is how .Net deals with an in use socket being closed. Safe to ignore. catch (Exception excp) { Log.LogError($"Exception RecvRTP. {excp.Message}"); } }
private void StreamMp3(object state) { fullyDownloaded = false; var url = (string)state; webRequest = (HttpWebRequest)WebRequest.Create(url); HttpWebResponse resp; try { resp = (HttpWebResponse)webRequest.GetResponse(); } catch (WebException e) { if (e.Status != WebExceptionStatus.RequestCanceled) { ShowError(e.Message); } return; } var buffer = new byte[16384 * 4]; // needs to be big enough to hold a decompressed frame IMp3FrameDecompressor decompressor = null; try { using (var responseStream = resp.GetResponseStream()) { var readFullyStream = new ReadFullyStream(responseStream); do { if (IsBufferNearlyFull) { Debug.WriteLine("Buffer getting full, taking a break"); Thread.Sleep(500); } else { Mp3Frame frame; try { frame = Mp3Frame.LoadFromStream(readFullyStream); } catch (EndOfStreamException) { fullyDownloaded = true; // reached the end of the MP3 file / stream break; } catch (WebException) { // probably we have aborted download from the GUI thread break; } if (decompressor == null) { // don't think these details matter too much - just help ACM select the right codec // however, the buffered provider doesn't know what sample rate it is working at // until we have a frame decompressor = CreateFrameDecompressor(frame); bufferedWaveProvider = new BufferedWaveProvider(decompressor.OutputFormat); bufferedWaveProvider.BufferDuration = TimeSpan.FromSeconds(20); // allow us to get well ahead of ourselves //this.bufferedWaveProvider.BufferedDuration = 250; } int decompressed = decompressor.DecompressFrame(frame, buffer, 0); //Debug.WriteLine(String.Format("Decompressed a frame {0}", decompressed)); bufferedWaveProvider.AddSamples(buffer, 0, decompressed); } } while (playbackState != StreamingPlaybackState.Stopped); Debug.WriteLine("Exiting"); // was doing this in a finally block, but for some reason // we are hanging on response stream .Dispose so never get there decompressor.Dispose(); } } finally { if (decompressor != null) { decompressor.Dispose(); } } }
/// <summary> /// stream a mp3 audio /// </summary> public void StreamMp3() { fullyDownloaded = false; if (audioUrl != null && !string.IsNullOrEmpty(audioUrl)) { try { webRequest = (HttpWebRequest)WebRequest.Create(audioUrl); } catch (UriFormatException) { Debug.WriteLine("URL format is not valid"); playbackState = StreamingPlaybackState.Stopped; return; } catch (Exception) { Debug.WriteLine("Error in web request create"); playbackState = StreamingPlaybackState.Stopped; return; } HttpWebResponse resp; try { resp = (HttpWebResponse)webRequest.GetResponse(); } catch (WebException e) { if (e.Status != WebExceptionStatus.RequestCanceled) { Debug.WriteLine("Web Exception error : " + e.Message); playbackState = StreamingPlaybackState.Stopped; } return; } var buffer = new byte[16384 * 4]; // needs to be big enough to hold a decompressed frame IMp3FrameDecompressor decompressor = null; try { using (var responseStream = resp.GetResponseStream()) { var readFullyStream = new ReadFullyStream(responseStream); do { if (IsBufferNearlyFull) { Debug.WriteLine("Buffer getting full, taking a break"); Thread.Sleep(500); } else { // read next mp3 frame Mp3Frame frame; try { frame = Mp3Frame.LoadFromStream(readFullyStream); } catch (EndOfStreamException) { fullyDownloaded = true; // reached the end of the MP3 file / stream playbackState = StreamingPlaybackState.Stopped; break; } catch (WebException) { break; } if (decompressor == null) { // don't think these details matter too much - just help ACM select the right codec // however, the buffered provider doesn't know what sample rate it is working at // until we have a frame decompressor = CreateFrameDecompressor(frame); bufferedWaveProvider = new BufferedWaveProvider(decompressor.OutputFormat); bufferedWaveProvider.BufferDuration = TimeSpan.FromSeconds(20); // allow us to get well ahead of ourselves //this.bufferedWaveProvider.BufferedDuration = 250; } if (frame != null) { int decompressed = decompressor.DecompressFrame(frame, buffer, 0); //Debug.WriteLine(String.Format("Decompressed a frame {0}", decompressed)); bufferedWaveProvider.AddSamples(buffer, 0, decompressed); } else { fullyDownloaded = true; break; } } }while (playbackState != StreamingPlaybackState.Stopped); Debug.WriteLine("Exiting"); // was doing this in a finally block, but for some reason // we are hanging on response stream .Dispose so never get there decompressor.Dispose(); } } catch (Exception ex) { Debug.WriteLine("Generic Error: " + ex.Message); playbackState = StreamingPlaybackState.Stopped; } finally { if (decompressor != null) { decompressor.Dispose(); decompressor = null; } } } }
private void StreamMp3(object state) { fullyDownloaded = false; string url = (string)state; webRequest = (HttpWebRequest)WebRequest.Create(url); int metaInt = 0; // blocksize of mp3 data webRequest.Headers.Clear(); webRequest.Headers.Add("GET", "/ HTTP/1.0"); webRequest.Headers.Add("Icy-MetaData", "1"); webRequest.UserAgent = "WinampMPEG/5.09"; HttpWebResponse resp; try { resp = (HttpWebResponse)webRequest.GetResponse(); } catch (WebException e) { if (e.Status != WebExceptionStatus.RequestCanceled) { System.Console.WriteLine(e.Message); } return; } byte[] buffer = new byte[16384 * 4]; // needs to be big enough to hold a decompressed frame try { // read blocksize to find metadata block metaInt = Convert.ToInt32(resp.GetResponseHeader("icy-metaint")); } catch { } IMp3FrameDecompressor decompressor = null; try { using (Stream responseStream = resp.GetResponseStream()) { ReadFullyStream readFullyStream = new ReadFullyStream(responseStream); readFullyStream.MetaInt = metaInt; do { if (IsBufferNearlyFull) { System.Console.WriteLine("Buffer getting full, taking a break"); Thread.Sleep(1000); } else { Mp3Frame frame; try { frame = Mp3Frame.LoadFromStream(readFullyStream); if (metaInt > 0 && !subbedToEvent) { subbedToEvent = true; readFullyStream.StreamTitleChanged += ReadFullyStream_StreamTitleChanged; } else if (metaInt <= 0) { song_info = "none"; } } catch (EndOfStreamException) { fullyDownloaded = true; // reached the end of the MP3 file / stream break; } catch (WebException) { // probably we have aborted download from the GUI thread break; } if (frame == null) { break; } if (decompressor == null) { // don't think these details matter too much - just help ACM select the right codec // however, the buffered provider doesn't know what sample rate it is working at // until we have a frame decompressor = CreateFrameDecompressor(frame); bufferedWaveProvider = new BufferedWaveProvider(decompressor.OutputFormat) { BufferDuration = TimeSpan.FromSeconds(30) // allow us to get well ahead of ourselves }; //this.bufferedWaveProvider.BufferedDuration = 250; decomp = true; //hack to tell main Unity Thread to create AudioClip } int decompressed = decompressor.DecompressFrame(frame, buffer, 0); bufferedWaveProvider.AddSamples(buffer, 0, decompressed); } } while (playbackState != StreamingPlaybackState.Stopped); System.Console.WriteLine("Exiting Thread"); // was doing this in a finally block, but for some reason // we are hanging on response stream .Dispose so never get there decompressor.Dispose(); readFullyStream.Close(); } } finally { if (decompressor != null) { decompressor.Dispose(); } } }
private async Task DecompressFrames() { byte[] buffer = new byte[16384 * 4]; // needs to be big enough to hold a decompressed frame do { try { //WaveBuffer getting full, taking a break if (bufferedWaveProvider != null && bufferedWaveProvider.BufferLength - bufferedWaveProvider.BufferedBytes < bufferedWaveProvider.WaveFormat.AverageBytesPerSecond / 2) { await Task.Delay(500); } //StreamBuffer empty, taking a break else if (stream.Length < 16384 / 4) { await Task.Delay(500); } else { Mp3Frame frame = Mp3Frame.LoadFromStream(stream); if (frame == null) { continue; } if (decompressor == null) { WaveFormat waveFormat = new Mp3WaveFormat(frame.SampleRate, frame.ChannelMode == ChannelMode.Mono ? 1 : 2, frame.FrameLength, frame.BitRate); decompressor = new AcmMp3FrameDecompressor(waveFormat); bufferedWaveProvider = new BufferedWaveProvider(decompressor.OutputFormat); } try { int decompressed = decompressor.DecompressFrame(frame, buffer, 0); if (decompressed > 0) { bufferedWaveProvider.AddSamples(buffer, 0, decompressed); } } catch (NAudio.MmException) { // Just ignore the frame if a MmException occurs } if (waveOut == null) { waveOut = new WaveOutEvent(); VolumeWaveProvider16 volumeProvider = new VolumeWaveProvider16(bufferedWaveProvider); volumeProvider.Volume = 0.5f; waveOut.Init(volumeProvider); waveOut.Play(); } } } catch (EndOfStreamException) { CleanUpAudio(); } } while (IsPlaying); CleanUpAudio(); }
static void Main(string[] args) { MMDeviceEnumerator mmDeviceEnum = new MMDeviceEnumerator(); bool run = true; Task.Factory.StartNew(() => { Console.ReadLine(); run = false; }); Task.Factory.StartNew(async() => { UdpClient udpHost = new UdpClient(8080); BufferedWaveProvider MDWaveProvider = new BufferedWaveProvider(new WaveFormat()); MDWaveProvider.DiscardOnBufferOverflow = true; WaveOut windowsPlayerMic = new WaveOut(); Console.WriteLine("Connected"); windowsPlayerMic.Init(MDWaveProvider); windowsPlayerMic.Play(); bool runClient = true; TimeSpan timeCool = new TimeSpan(0, 0, 2); TimeSpan?timeEnd = null; DateTime?time = null; while (runClient) { if (udpHost.Available > 0) { Console.WriteLine($"Awaiting network, {udpHost.Available}"); IPEndPoint RemoteIpEndPoint = new IPEndPoint(IPAddress.Any, 0); byte[] buffer = udpHost.Receive(ref RemoteIpEndPoint); MDWaveProvider.AddSamples(buffer, 0, buffer.Length); Console.WriteLine($"Recieved bytes {buffer.Length} from network"); } else { } if (!run) { if (time == null) { time = DateTime.Now; } timeEnd = (DateTime.Now - time); runClient = (timeEnd ?? new TimeSpan(0, 0, 5)) < timeCool; } } Thread.Sleep(55); Console.WriteLine($"stopped client after {(timeEnd ?? new TimeSpan(1, 0, 0)).TotalSeconds} seconds"); }); UdpClient udpClient = new UdpClient(); udpClient.Connect("localhost", 8080); foreach (MMDevice d in mmDeviceEnum.EnumerateAudioEndPoints(DataFlow.Capture, DeviceState.Active)) { Console.WriteLine(d.DeviceFriendlyName); WasapiCapture ws = new WasapiCapture(d); ws.WaveFormat = new WaveFormat(); ws.ShareMode = AudioClientShareMode.Exclusive; try { ws.DataAvailable += (s, a) => { udpClient.SendAsync(a.Buffer, a.BytesRecorded); Console.WriteLine($"Send bytes {a.BytesRecorded} to network"); }; Console.WriteLine(ws.CaptureState); ws.StartRecording(); while ((ws.CaptureState != NAudio.CoreAudioApi.CaptureState.Stopped || udpClient.Client.Connected) && run) { Thread.Sleep(500); } } finally { ws.Dispose(); d.Dispose(); } TimeSpan timeCool = new TimeSpan(0, 0, 2); TimeSpan?timeEnd = null; DateTime?time = null; bool runHost = true; while (runHost) { if (time == null) { time = DateTime.Now; } timeEnd = (DateTime.Now - time); runHost = (timeEnd ?? new TimeSpan(0, 0, 5)) < timeCool; } udpClient.Close(); Console.WriteLine($"stopped host after {(timeEnd ?? new TimeSpan(1, 0, 0)).TotalSeconds} seconds"); } Console.ReadLine(); /* * WaveOutEvent woe = new WaveOutEvent(); * * woe.NumberOfBuffers = 2; * * //NAudio.Wave.Mp3FileReader mp3Reader = new NAudio.Wave.Mp3FileReader(@"G:\musik\DAD\Call Of The Wild\DAD - Call Of The Wild - 04 - Marlboro Man.mp3"); * //woe.Init(mp3Reader); * * //woe.Play(); * * //while (true) * //{ * // Console.WriteLine(woe.PlaybackState); * // t * //} * string filePath = @"G:\musik\Eminem\[2005] Curtain Call - The Hits 2CD\"; * * foreach (string f in Directory.EnumerateFiles(filePath, "*.mp3")) * { * string fileName = Path.GetFileNameWithoutExtension(f); * MusicRecoder mr = new MusicRecoder(f); * WaveOut windowsPlayer = new WaveOut(); * * BufferedWaveProvider provider = new BufferedWaveProvider(mr.WaveFormat); * provider.BufferLength = 1024 * 512; * double temp = provider.BufferLength / 3; * mr.BufferSize = Convert.ToInt32(Math.Ceiling(temp)); * mr.clearBuffers(); * * windowsPlayer.Init(provider); * windowsPlayer.Play(); * * provider.AddSamples(mr.Play(), 0, mr.BufferSize); * TimeSpan elapsedTime = new TimeSpan(); * double buffDur = 0; * * while (mr.state != PlaybackState.Stopped) * { * * buffDur = provider.BufferedDuration.TotalMilliseconds; * elapsedTime = elapsedTime.Add(provider.BufferedDuration); * Console.WriteLine("PLaying: " + fileName); * Console.WriteLine(provider.BufferedDuration); * Console.WriteLine(elapsedTime); * Console.WriteLine(provider.BufferedBytes); * Console.WriteLine(mr.WaveFormat); * * * Thread.Sleep(Convert.ToInt32(Math.Floor(buffDur / (double)2))); * byte[] tempB = mr.Play(); * * provider.AddSamples(tempB, 0, mr.BufferSize); * * if (mr.state == PlaybackState.Stopped) * break; * Thread.Sleep(Convert.ToInt32(Math.Ceiling(buffDur / (double)2))); * tempB = mr.Play(); * * provider.AddSamples(tempB, 0, mr.BufferSize); * * Console.Clear(); * * } * * buffDur = provider.BufferedDuration.TotalMilliseconds; * elapsedTime = elapsedTime.Add(provider.BufferedDuration); * Console.WriteLine("PLaying: " + fileName); * Console.WriteLine(provider.BufferedDuration); * Console.WriteLine(elapsedTime); * Console.WriteLine(provider.BufferedBytes); * * Thread.Sleep(provider.BufferedDuration); * windowsPlayer.Stop(); * Thread.Sleep(100); * } */ }
private void SpyServerListener() { var data = new byte[3200]; try { var request = (HttpWebRequest)WebRequest.Create(_source); request.Timeout = 10000; request.ReadWriteTimeout = 5000; var response = request.GetResponse(); using (Stream stream = response.GetResponseStream()) { if (stream == null) { throw new Exception("Stream is null"); } stream.ReadTimeout = 5000; while (!_abort.WaitOne(20) && !MainForm.ShuttingDown) { var da = DataAvailable; if (da != null) { int recbytesize = stream.Read(data, 0, 3200); if (recbytesize == 0) { throw new Exception("lost stream"); } byte[] dec; ALawDecoder.ALawDecode(data, recbytesize, out dec); if (_sampleChannel != null) { _waveProvider.AddSamples(dec, 0, dec.Length); var sampleBuffer = new float[dec.Length]; int read = _sampleChannel.Read(sampleBuffer, 0, dec.Length); da(this, new DataAvailableEventArgs((byte[])dec.Clone(), read)); if (Listening) { WaveOutProvider?.AddSamples(dec, 0, read); } } } else { break; } } } } catch (Exception e) { _res = ReasonToFinishPlaying.DeviceLost; Logger.LogException(e, "ispyServer"); } if (_sampleChannel != null) { _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; _sampleChannel = null; } if (_waveProvider?.BufferedBytes > 0) { _waveProvider.ClearBuffer(); } if (WaveOutProvider?.BufferedBytes > 0) { WaveOutProvider?.ClearBuffer(); } AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); }
private void WaveSourceOnDataAvailable(object sender, WaveInEventArgs e) { // Do important calculation work here // To get data, e.Buffer and e.BytesRecorded must be used waveProvider?.AddSamples(e.Buffer, 0, e.BytesRecorded); }
private void PutAudioInInputbuffer(byte[] bytes) { inputBuffer.AddSamples(bytes, 0, bytes.Length); }
private void FfmpegListener() { AudioFileReader afr = null; Program.WriterMutex.WaitOne(); try { afr = new AudioFileReader(); afr.Open(_source); } catch (Exception ex) { Log.Error("", ex);//MainForm.LogExceptionToFile(ex); } Program.WriterMutex.ReleaseMutex(); if (afr == null || !afr.IsOpen) { if (AudioFinished != null) { AudioFinished(this, ReasonToFinishPlaying.AudioSourceError); } return; } RecordingFormat = new WaveFormat(afr.SampleRate, 16, afr.Channels); _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true }; _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; byte[] data; int mult = afr.BitsPerSample / 8; double btrg = Convert.ToDouble(afr.SampleRate * mult * afr.Channels); DateTime lastPacket = DateTime.Now; bool realTime = _source.IndexOf("://") != -1; try { DateTime req = DateTime.Now; while (!_stopEvent.WaitOne(0, false)) { data = afr.ReadAudioFrame(); if (data.Length > 0) { lastPacket = DateTime.Now; if (DataAvailable != null) { //forces processing of volume level without piping it out _waveProvider.AddSamples(data, 0, data.Length); var sampleBuffer = new float[data.Length]; _sampleChannel.Read(sampleBuffer, 0, data.Length); if (WaveOutProvider != null && Listening) { WaveOutProvider.AddSamples(data, 0, data.Length); } var da = new DataAvailableEventArgs((byte[])data.Clone()); DataAvailable(this, da); } if (realTime) { if (_stopEvent.WaitOne(10, false)) { break; } } else { double f = (data.Length / btrg) * 1000; if (f > 0) { var span = DateTime.Now.Subtract(req); var msec = Convert.ToInt32(f - (int)span.TotalMilliseconds); if ((msec > 0) && (_stopEvent.WaitOne(msec, false))) { break; } req = DateTime.Now; } } } else { if ((DateTime.Now - lastPacket).TotalMilliseconds > 5000) { afr.Close(); Stop(); throw new Exception("Audio source timeout"); } if (_stopEvent.WaitOne(30, false)) { break; } } } if (AudioFinished != null) { AudioFinished(this, ReasonToFinishPlaying.StoppedByUser); } } catch (Exception e) { if (AudioSourceError != null) { AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message)); } Log.Error("", e);//MainForm.LogExceptionToFile(e); } }
private Task SetupEvents(CancellationToken token) { Console.ForegroundColor = ConsoleColor.White; return(Task.Run(() => { client.MessageReceived += (sender, e) => { if (owner == null) { owner = client.GetServersList().Find(x => x.GetMemberByKey(config.OwnerID) != null).GetMemberByKey(config.OwnerID); //prays } if (e.Author == null) { string msg = $"Author had null id in message received!\nRaw JSON:\n```\n{e.RawJson}\n```\n"; msg += $"Args\nChannel: {e.Channel.Name}/{e.Channel.ID}\nMessage: {e.Message}"; try { owner.SlideIntoDMs(msg); } catch { } } else { Console.WriteLine($"[-- Message from {e.Author.Username} in #{e.Channel.Name} on {e.Channel.Parent.Name}: {e.Message.Content}"); if (doingInitialRun) { if (e.Message.Content.StartsWith("?authenticate")) { string[] split = e.Message.Content.Split(new char[] { ' ' }, 2); if (split.Length > 1) { if (codeToEnter.Trim() == split[1].Trim()) { config.OwnerID = e.Author.ID; doingInitialRun = false; e.Channel.SendMessage("Authentication successful! **You are now my owner, " + e.Author.Username + ".**"); CommandsManager.AddPermission(e.Author, PermissionType.Owner); owner = e.Author; } } } } else { if (e.Message.Content.Length > 0 && (e.Message.Content[0] == config.CommandPrefix)) { string rawCommand = e.Message.Content.Substring(1); try { CommandsManager.ExecuteOnMessageCommand(rawCommand, e.Channel, e.Author); } catch (UnauthorizedAccessException ex) { e.Channel.SendMessage(ex.Message); } catch (ModuleNotEnabledException x) { e.Channel.SendMessage($"{x.Message}"); } catch (Exception ex) { e.Channel.SendMessage("Exception occurred while running command:\n```" + ex.Message + "\n```"); } } } //Now, for fun. //if(e.author == owner) //{ // if (StringContainsObjectInArray(e.message.content.ToLower(), NaughtyWords)) // { // try // { // var msg = client.GetMessageLog()[client.GetMessageLog().Count - 1].Value; // if (msg.author == owner // && client.GetLastMessageSent(e.Channel).content != null && // client.GetLastMessageSent(e.Channel).content != "hey now") // { // //if(msg.timestamp.AddMinutes(1) < DateTime.Now) // int timebetween = (DateTime.Now.Minute - msg.timestamp.Minute); // if ((timebetween < 1) && (timebetween > -1)) //less than one minute between his message and my vulgarity // e.Channel.SendMessage("hey now"); // } // } // catch { } // } //} if (e.Channel.ID == "91265608326324224") //discord-sharp on discordapi { if (e.Author != owner) { if (e.Message.Content != null && e.Message.Content.ToLower().Contains("how")) { if (e.Message.Content.ToLower().Contains("bot") && e.Message.Content.ToLower().Contains("tag")) { e.Channel.SendMessage($"<#124294271900712960>");//#api-changes } } } } } }; client.VoiceClientDebugMessageReceived += (sender, e) => { if (e.message.Level != MessageLevel.Unecessary) { Console.WriteLine($"[{e.message.Level} {e.message.TimeStamp.ToString()}] {e.message.Message}"); } }; client.VoiceClientConnected += (sender, e) => { try { owner.SlideIntoDMs($"Voice connection complete."); } catch { } //player = new AudioPlayer(client.GetVoiceClient().VoiceConfig); //bufferedWaveProvider = new BufferedWaveProvider(waveFormat); //bufferedWaveProvider.BufferDuration = new TimeSpan(0, 0, 50); //volumeProvider = new VolumeWaveProvider16(bufferedWaveProvider); //volumeProvider.Volume = 1.1f; //outputDevice.Init(volumeProvider); //stutterReducingTimer = new System.Timers.Timer(500); //stutterReducingTimer.Elapsed += StutterReducingTimer_Elapsed; //PlayAudioAsync(cancelToken); }; client.AudioPacketReceived += (sender, e) => { if (bufferedWaveProvider != null) { byte[] potential = new byte[4000]; int decodedFrames = client.GetVoiceClient().Decoder.DecodeFrame(e.OpusAudio, 0, e.OpusAudioLength, potential); bufferedWaveProvider.AddSamples(potential, 0, decodedFrames); } }; client.GuildCreated += (sender, e) => { if (owner == null) { owner = client.GetServersList().Find(x => x.GetMemberByKey(config.OwnerID) != null).GetMemberByKey(config.OwnerID); } Console.WriteLine($"Joined server {e.Server.Name} ({e.Server.ID})"); try { owner.SlideIntoDMs($"Joined server {e.Server.Name} ({e.Server.ID})"); } catch { } }; client.GuildAvailable += (sender, e) => { Console.WriteLine($"Guild {e.Server.Name} became available."); }; client.SocketClosed += (sender, e) => { Console.Title = "Luigibot - Discord - Socket Closed.."; if (!actuallyExit) { WriteError($"\n\nSocket Closed Unexpectedly! Code: {e.Code}. Reason: {e.Reason}. Clear: {e.WasClean}.\n\n"); Console.WriteLine("Waiting 6 seconds to reconnect.."); Thread.Sleep(6 * 1000); LetsGoAgain(); } else { Console.WriteLine($"Shutting down ({e.Code}, {e.Reason}, {e.WasClean})"); } }; client.UnknownMessageTypeReceived += (sender, e) => { if (!Directory.Exists("dumps")) { Directory.CreateDirectory("dumps"); } string message = $"Ahoy! An unknown message type `{e.RawJson["t"].ToString()}` was discovered with the contents: \n\n"; message += $"```\n{e.RawJson.ToString()}\n```\nIt's been dumped to `dumps/{e.RawJson["t"].ToString()}.json` for your viewing pleasure."; string filename = $"{Environment.CurrentDirectory}{Path.DirectorySeparatorChar}dumps{Path.DirectorySeparatorChar}{e.RawJson["t"].ToString()}.json"; if (!File.Exists(filename)) { File.WriteAllText(e.RawJson.ToString(), filename); try { owner.SlideIntoDMs(message); } catch { } } }; client.TextClientDebugMessageReceived += (sender, e) => { if (e.message.Level == MessageLevel.Error || e.message.Level == MessageLevel.Critical) { WriteError($"(Logger Error) {e.message.Message}"); try { owner.SlideIntoDMs($"Bot error ocurred: ({e.message.Level.ToString()})```\n{e.message.Message}\n```"); } catch { } } if (e.message.Level == MessageLevel.Warning) { WriteWarning($"(Logger Warning) {e.message.Message}"); } }; client.Connected += (sender, e) => { Console.Title = "Luigibot - Discord - Logged in as " + e.User.Username; Console.WriteLine("Connected as " + e.User.Username); if (!String.IsNullOrEmpty(config.OwnerID)) { } else { doingInitialRun = true; RandomCodeGenerator rcg = new RandomCodeGenerator(); codeToEnter = rcg.GenerateRandomCode(); Console.ForegroundColor = ConsoleColor.Cyan; Console.WriteLine("Important: "); Console.ForegroundColor = ConsoleColor.White; Console.WriteLine("\tPlease authenticate yourself as owner by typing the following into any Discord server you and the bot are in: "); Console.WriteLine($"\t{config.CommandPrefix}authenticate " + codeToEnter); } CommandsManager = new CommandsManager(client); if (File.Exists("permissions.json")) { var permissionsDictionary = JsonConvert.DeserializeObject <Dictionary <string, PermissionType> >(File.ReadAllText("permissions.json")); if (permissionsDictionary == null) { permissionsDictionary = new Dictionary <string, PermissionType>(); } if (permissionsDictionary.Count == 0 && owner != null) { permissionsDictionary.Add(owner.ID, PermissionType.Owner); } CommandsManager.OverridePermissionsDictionary(permissionsDictionary); } SetupCommands(); if (config.ModulesDictionary != null) { CommandsManager.OverrideModulesDictionary(config.ModulesDictionary); } //client.UpdateCurrentGame($"DiscordSharp {typeof(DiscordClient).Assembly.GetName().Version.ToString()}"); }; if (client.SendLoginRequest() != null) { client.Connect(); } }, token)); }
private void WasapiCaptureOnDataAvailable(object sender, WaveInEventArgs e) { if (_resampler == null) { //create and use in the same thread or COM issues _resampler = new EventDrivenResampler(windowsN, _wasapiCapture.WaveFormat, new WaveFormat(AudioManager.INPUT_SAMPLE_RATE, 16, 1)); } if (e.BytesRecorded > 0) { //Logger.Info($"Time: {_stopwatch.ElapsedMilliseconds} - Bytes: {e.BytesRecorded}"); short[] resampledPCM16Bit = _resampler.Resample(e.Buffer, e.BytesRecorded); // Logger.Info($"Time: {_stopwatch.ElapsedMilliseconds} - Bytes: {resampledPCM16Bit.Length}"); //fill sound buffer for (var i = 0; i < resampledPCM16Bit.Length; i++) { _micInputQueue.Enqueue(resampledPCM16Bit[i]); } //read out the queue while (_micInputQueue.Count >= AudioManager.SEGMENT_FRAMES) { short[] pcmShort = new short[AudioManager.SEGMENT_FRAMES]; for (var i = 0; i < AudioManager.SEGMENT_FRAMES; i++) { pcmShort[i] = _micInputQueue.Dequeue(); } try { //volume boost pre for (var i = 0; i < pcmShort.Length; i++) { // n.b. no clipping test going on here pcmShort[i] = (short)(pcmShort[i] * MicBoost); } //process with Speex _speex.Process(new ArraySegment <short>(pcmShort)); float max = 0; for (var i = 0; i < pcmShort.Length; i++) { //determine peak if (pcmShort[i] > max) { max = pcmShort[i]; } } //convert to dB MicMax = (float)VolumeConversionHelper.ConvertFloatToDB(max / 32768F); var pcmBytes = new byte[pcmShort.Length * 2]; Buffer.BlockCopy(pcmShort, 0, pcmBytes, 0, pcmBytes.Length); //encode as opus bytes int len; var buff = _encoder.Encode(pcmBytes, pcmBytes.Length, out len); if ((_udpVoiceHandler != null) && (buff != null) && (len > 0)) { //create copy with small buffer var encoded = new byte[len]; Buffer.BlockCopy(buff, 0, encoded, 0, len); // Console.WriteLine("Sending: " + e.BytesRecorded); if (_udpVoiceHandler.Send(encoded, len)) { //send audio so play over local too _micWaveOutBuffer?.AddSamples(pcmBytes, 0, pcmBytes.Length); } } else { Logger.Error($"Invalid Bytes for Encoding - {pcmShort.Length} should be {SEGMENT_FRAMES} "); } _errorCount = 0; } catch (Exception ex) { _errorCount++; if (_errorCount < 10) { Logger.Error(ex, "Error encoding Opus! " + ex.Message); } else if (_errorCount == 10) { Logger.Error(ex, "Final Log of Error encoding Opus! " + ex.Message); } } } } }
private void StreamMp3() { _fullyDownloaded = false; IMp3FrameDecompressor deCompressor = null; try { do { if (IsBufferNearlyFull) { Debug.WriteLine("Buffer getting full, taking a break"); Thread.Sleep(500); } else { try { lock (_repositionLocker) { if (deCompressor == null) { // don't think these details matter too much - just help ACM select the right codec // however, the buffered provider doesn't know what sample rate it is working at // until we have a frame deCompressor = CreateFrameDeCompressor(); _bufferedWaveProvider = new BufferedWaveProvider(deCompressor.OutputFormat) { // allow us to get well ahead of ourselves BufferDuration = TimeSpan.FromSeconds(MaxBufferSizeSeconds), DiscardOnBufferOverflow = true, ReadFully = true }; } else { var frame = Mp3Frame.LoadFromStream(_reader); if (frame != null) { int decompressed = deCompressor.DecompressFrame(frame, _decompressBuffer, 0); _bufferedWaveProvider.AddSamples(_decompressBuffer, 0, decompressed); } else // end of stream { _fullyDownloaded = true; if (_bufferedWaveProvider.BufferedBytes == 0) { break; } } } } } catch (EndOfStreamException) { _fullyDownloaded = true; // reached the end of the MP3 file / stream // break; } } } while (_playbackState != StreamingPlaybackState.Stopped); Debug.WriteLine("Exiting"); // was doing this in a finally block, but for some reason // we are hanging on response stream .Dispose so never get there deCompressor?.Dispose(); } catch (WebException e) when(e.Status == WebExceptionStatus.RequestCanceled) { // ignored cancel exceptions Stop(); } catch (Exception e) { Debug.WriteLine(e.Message); Stop(); } finally { _fullyDownloaded = true; deCompressor?.Dispose(); } }
// Worker thread private void WorkerThread() { _abort = new ManualResetEvent(false); // buffer to read stream var buffer = new byte[BufSize]; var encoding = new ASCIIEncoding(); while (!_abort.WaitOne(0) && !MainForm.ShuttingDown) { HttpWebRequest request = null; WebResponse response = null; Stream stream = null; try { // create request request = (HttpWebRequest)WebRequest.Create(_source.settings.videosourcestring); // set user agent if (_userAgent != null) { request.UserAgent = _userAgent; } // set proxy if (_proxy != null) { request.Proxy = _proxy; } if (_usehttp10) { request.ProtocolVersion = HttpVersion.Version10; } // set timeout value for the request request.Timeout = request.ServicePoint.ConnectionLeaseTimeout = request.ServicePoint.MaxIdleTime = _requestTimeout; request.AllowAutoRedirect = true; // set login and password if ((_login != null) && (_password != null) && (_login != string.Empty)) { request.Credentials = new NetworkCredential(_login, _password); } // set connection group name if (_useSeparateConnectionGroup) { request.ConnectionGroupName = GetHashCode().ToString(); } // get response response = request.GetResponse(); // get response stream stream = response.GetResponseStream(); stream.ReadTimeout = _requestTimeout; byte[] boundary = encoding.GetBytes("--myboundary"); byte[] sep = encoding.GetBytes("\r\n\r\n"); // loop int startPacket = -1; int endPacket = -1; int ttl = 0; bool hasaudio = false; while (!_abort.WaitOne(0) && !MainForm.ShuttingDown) { int read; if ((read = stream.Read(buffer, ttl, ReadSize)) == 0) { throw new ApplicationException(); } ttl += read; if (startPacket == -1) { startPacket = ByteArrayUtils.Find(buffer, boundary, 0, ttl); } else { if (endPacket == -1) { endPacket = ByteArrayUtils.Find(buffer, boundary, startPacket + boundary.Length, ttl - (startPacket + boundary.Length)); } } var nf = NewFrame; if (startPacket > -1 && endPacket > startPacket) { int br = ByteArrayUtils.Find(buffer, sep, startPacket, 100); if (br != -1) { var arr = new byte[br]; Array.Copy(buffer, startPacket, arr, 0, br - startPacket); string s = Encoding.ASCII.GetString(arr); int k = s.IndexOf("Content-type: ", StringComparison.Ordinal); if (k != -1) { s = s.Substring(k + 14); s = s.Substring(0, s.IndexOf("\r\n", StringComparison.Ordinal)); s = s.Trim(); } switch (s) { case "image/jpeg": try { if (ShouldEmitFrame) { using (var ms = new MemoryStream(buffer, br + 4, endPacket - br - 8)) { using (var bmp = (Bitmap)Image.FromStream(ms)) { var dae = new NewFrameEventArgs(bmp); nf.Invoke(this, dae); } } } } catch (Exception ex) { //sometimes corrupted packets come through... Logger.LogException(ex, "KinectNetwork"); } break; case "audio/raw": if (!hasaudio) { hasaudio = true; //fixed 16khz 1 channel format RecordingFormat = new WaveFormat(16000, 16, 1); _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) }; _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; if (HasAudioStream != null) { HasAudioStream(this, EventArgs.Empty); HasAudioStream = null; } } var da = DataAvailable; if (da != null) { int l = endPacket - br - 8; var data = new byte[l]; int d; using (var ms = new MemoryStream(buffer, br + 4, l)) { d = ms.Read(data, 0, l); } if (d > 0) { _waveProvider.AddSamples(data, 0, data.Length); if (Listening) { WaveOutProvider.AddSamples(data, 0, data.Length); } //forces processing of volume level without piping it out var sampleBuffer = new float[data.Length]; int r = _sampleChannel.Read(sampleBuffer, 0, data.Length); da(this, new DataAvailableEventArgs((byte[])data.Clone(), r)); } } break; case "alert/text": // code to handle alert notifications goes here if (AlertHandler != null) { int dl = endPacket - br - 8; var data2 = new byte[dl]; using (var ms = new MemoryStream(buffer, br + 4, dl)) { ms.Read(data2, 0, dl); } string alerttype = Encoding.ASCII.GetString(data2); AlertHandler(this, new AlertEventArgs(alerttype)); } break; } } ttl -= endPacket; Array.Copy(buffer, endPacket, buffer, 0, ttl); startPacket = -1; endPacket = -1; } } } catch (ApplicationException) { // do nothing for Application Exception, which we raised on our own // wait for a while before the next try Thread.Sleep(250); } catch (ThreadAbortException) { break; } catch (Exception ex) { // provide information to clients Logger.LogException(ex, "KinectNetwork"); _res = ReasonToFinishPlaying.DeviceLost; _abort.WaitOne(250); break; // wait for a while before the next try } finally { request?.Abort(); stream?.Flush(); stream?.Close(); response?.Close(); } } if (_waveProvider?.BufferedBytes > 0) { _waveProvider.ClearBuffer(); } Listening = false; PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res)); _abort.Close(); }
private void FfmpegListener() { _isrunning = true; _reasonToStop = ReasonToFinishPlaying.StoppedByUser; _afr = null; bool open = false; string errmsg = ""; try { Program.FFMPEGMutex.WaitOne(); _afr = new AudioFileReader(); int i = _source.IndexOf("://", StringComparison.Ordinal); if (i > -1) { _source = _source.Substring(0, i).ToLower() + _source.Substring(i); } _afr.Timeout = Timeout; _afr.AnalyzeDuration = AnalyseDuration; _afr.Open(_source); open = true; } catch (Exception ex) { MainForm.LogErrorToFile(ex.Message); } finally { try { Program.FFMPEGMutex.ReleaseMutex(); } catch (ObjectDisposedException) { //can happen on shutdown } } if (_afr == null || !_afr.IsOpen || !open) { ShutDown("Could not open audio stream" + ": " + _source); return; } RecordingFormat = new WaveFormat(_afr.SampleRate, 16, _afr.Channels); _waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) }; _sampleChannel = new SampleChannel(_waveProvider); _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter; int mult = _afr.BitsPerSample / 8; double btrg = Convert.ToDouble(_afr.SampleRate * mult * _afr.Channels); LastFrame = Helper.Now; bool realTime = !IsFileSource; try { DateTime req = Helper.Now; while (!_stopEvent.WaitOne(10, false) && !MainForm.Reallyclose) { byte[] data = _afr.ReadAudioFrame(); if (data == null || data.Equals(0)) { if (!realTime) { break; } } if (data != null && data.Length > 0) { LastFrame = Helper.Now; var da = DataAvailable; if (da != null) { //forces processing of volume level without piping it out _waveProvider.AddSamples(data, 0, data.Length); var sampleBuffer = new float[data.Length]; _sampleChannel.Read(sampleBuffer, 0, data.Length); da(this, new DataAvailableEventArgs((byte[])data.Clone())); if (WaveOutProvider != null && Listening) { WaveOutProvider.AddSamples(data, 0, data.Length); } } if (realTime) { if (_stopEvent.WaitOne(30, false)) { break; } } else { // double f = (data.Length / btrg) * 1000; if (f > 0) { var span = Helper.Now.Subtract(req); var msec = Convert.ToInt32(f - (int)span.TotalMilliseconds); if ((msec > 0) && (_stopEvent.WaitOne(msec, false))) { break; } req = Helper.Now; } } } else { if ((Helper.Now - LastFrame).TotalMilliseconds > Timeout) { throw new Exception("Audio source timeout"); } if (_stopEvent.WaitOne(30, false)) { break; } } } } catch (Exception e) { MainForm.LogExceptionToFile(e); errmsg = e.Message; } if (_sampleChannel != null) { _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter; _sampleChannel = null; } if (_waveProvider != null) { if (_waveProvider.BufferedBytes > 0) { _waveProvider.ClearBuffer(); } } ShutDown(errmsg); }
//Stopwatch _stopwatch = new Stopwatch(); private void _waveIn_DataAvailable(object sender, WaveInEventArgs e) { // if(_stopwatch.ElapsedMilliseconds > 22) //Console.WriteLine($"Time: {_stopwatch.ElapsedMilliseconds} - Bytes: {e.BytesRecorded}"); // _stopwatch.Restart(); short[] pcmShort = null; if ((e.BytesRecorded / 2 == SEGMENT_FRAMES) && (_micInputQueue.Count == 0)) { //perfect! pcmShort = new short[SEGMENT_FRAMES]; Buffer.BlockCopy(e.Buffer, 0, pcmShort, 0, e.BytesRecorded); } else { for (var i = 0; i < e.BytesRecorded; i++) { _micInputQueue.Enqueue(e.Buffer[i]); } } //read out the queue while ((pcmShort != null) || (_micInputQueue.Count >= AudioManager.SEGMENT_FRAMES)) { //null sound buffer so read from the queue if (pcmShort == null) { pcmShort = new short[AudioManager.SEGMENT_FRAMES]; for (var i = 0; i < AudioManager.SEGMENT_FRAMES; i++) { pcmShort[i] = _micInputQueue.Dequeue(); } } //null sound buffer so read from the queue if (pcmShort == null) { pcmShort = new short[AudioManager.SEGMENT_FRAMES]; for (var i = 0; i < AudioManager.SEGMENT_FRAMES; i++) { pcmShort[i] = _micInputQueue.Dequeue(); } } try { //volume boost pre for (var i = 0; i < pcmShort.Length; i++) { // n.b. no clipping test going on here pcmShort[i] = (short)(pcmShort[i] * MicBoost); } //process with Speex _speex.Process(new ArraySegment <short>(pcmShort)); float max = 0; for (var i = 0; i < pcmShort.Length; i++) { //determine peak if (pcmShort[i] > max) { max = pcmShort[i]; } } //convert to dB MicMax = (float)VolumeConversionHelper.ConvertFloatToDB(max / 32768F); var pcmBytes = new byte[pcmShort.Length * 2]; Buffer.BlockCopy(pcmShort, 0, pcmBytes, 0, pcmBytes.Length); //encode as opus bytes int len; var buff = _encoder.Encode(pcmBytes, pcmBytes.Length, out len); if ((_tcpVoiceHandler != null) && (buff != null) && (len > 0)) { //create copy with small buffer var encoded = new byte[len]; Buffer.BlockCopy(buff, 0, encoded, 0, len); // Console.WriteLine("Sending: " + e.BytesRecorded); if (_tcpVoiceHandler.Send(encoded, len)) { //send audio so play over local too _micWaveOutBuffer?.AddSamples(pcmBytes, 0, pcmBytes.Length); } } else { Logger.Error($"Invalid Bytes for Encoding - {e.BytesRecorded} should be {SEGMENT_FRAMES} "); } _errorCount = 0; } catch (Exception ex) { _errorCount++; if (_errorCount < 10) { Logger.Error(ex, "Error encoding Opus! " + ex.Message); } else if (_errorCount == 10) { Logger.Error(ex, "Final Log of Error encoding Opus! " + ex.Message); } } pcmShort = null; } }
/// <summary> /// Speichert Audiodaten im Buffer /// </summary> /// <param name="data">Das Audiosampel</param> public void BufferAudio(byte[] data) { outputStream.AddSamples(data, 0, data.Length); }