Пример #1
0
        private void SoundCallback(Sound soundData)
        {
            if (DataAvailable == null || _needsSetup)
            {
                return;
            }

            var samples = new byte[soundData.SamplesSize];

            Marshal.Copy(soundData.SamplesData, samples, 0, (int)soundData.SamplesSize);

            _waveProvider.AddSamples(samples, 0, samples.Length);

            if (Listening && WaveOutProvider != null)
            {
                WaveOutProvider.AddSamples(samples, 0, samples.Length);
            }

            //forces processing of volume level without piping it out
            var sampleBuffer = new float[samples.Length];

            _meteringProvider.Read(sampleBuffer, 0, samples.Length);

            if (DataAvailable != null)
            {
                DataAvailable(this, new DataAvailableEventArgs((byte[])samples.Clone()));
            }
        }
Пример #2
0
        private void AudioThread()
        {
            while (_stopEvent != null && !_stopEvent.WaitOne(0, false))
            {
                int dataLength = _audioStream.Read(_audioBuffer, 0, _audioBuffer.Length);
                if (DataAvailable != null)
                {
                    _waveProvider.AddSamples(_audioBuffer, 0, dataLength);

                    if (Listening)
                    {
                        WaveOutProvider.AddSamples(_audioBuffer, 0, dataLength);
                    }

                    //forces processing of volume level without piping it out
                    var sampleBuffer = new float[dataLength];
                    _sampleChannel.Read(sampleBuffer, 0, dataLength);

                    if (DataAvailable != null)
                    {
                        DataAvailable(this, new DataAvailableEventArgs((byte[])_audioBuffer.Clone()));
                    }
                }
            }
        }
Пример #3
0
        private void AudioThread()
        {
            _abort = new ManualResetEvent(false);
            while (!_abort.WaitOne(0) && !MainForm.ShuttingDown)
            {
                int dataLength = _audioStream.Read(_audioBuffer, 0, _audioBuffer.Length);
                if (DataAvailable != null)
                {
                    _waveProvider.AddSamples(_audioBuffer, 0, dataLength);

                    if (Listening)
                    {
                        WaveOutProvider.AddSamples(_audioBuffer, 0, dataLength);
                    }

                    //forces processing of volume level without piping it out
                    var sampleBuffer = new float[dataLength];
                    int read         = _sampleChannel.Read(sampleBuffer, 0, dataLength);

                    DataAvailable?.Invoke(this, new DataAvailableEventArgs((byte[])_audioBuffer.Clone(), read));
                }
            }


            try
            {
                if (_sensor != null)
                {
                    _sensor.AudioSource?.Stop();

                    _sensor.Stop();
                    _sensor.SkeletonFrameReady -= SensorSkeletonFrameReady;
                    _sensor.ColorFrameReady    -= SensorColorFrameReady;
                    _sensor.DepthFrameReady    -= SensorDepthFrameReady;

                    _sensor.Dispose();

                    _sensor = null;
                }
            }
            catch
            {
                // ignored
            }

            if (_sampleChannel != null)
            {
                _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter;
            }

            if (_waveProvider != null && _waveProvider.BufferedBytes > 0)
            {
                _waveProvider.ClearBuffer();
            }

            Listening = false;

            PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res));
            _abort.Close();
        }
Пример #4
0
        void WaveInDataAvailable(object sender, WaveInEventArgs e)
        {
            if (_waveIn == null)
            {
                return;
            }

            var da = DataAvailable;

            if (da == null)
            {
                return;
            }
            var sc = _sampleChannel;

            if (sc == null)
            {
                return;
            }
            var sampleBuffer = new float[e.BytesRecorded];

            sc.Read(sampleBuffer, 0, e.BytesRecorded);

            if (Listening)
            {
                WaveOutProvider?.AddSamples(e.Buffer, 0, e.BytesRecorded);
            }
            var dae = new DataAvailableEventArgs((byte[])e.Buffer.Clone(), e.BytesRecorded);

            da(this, dae);
        }
Пример #5
0
        //void WaveInDataAvailable(object sender, WaveInEventArgs e)
        //{
        //    _isrunning = true;
        //    if (DataAvailable != null)
        //    {
        //        //forces processing of volume level without piping it out
        //        if (_sampleChannel != null)
        //        {
        //            var sampleBuffer = new float[e.BytesRecorded];
        //            _sampleChannel.Read(sampleBuffer, 0, e.BytesRecorded);

        //            if (Listening && WaveOutProvider!=null)
        //            {
        //                WaveOutProvider.AddSamples(e.Buffer, 0, e.BytesRecorded);
        //            }
        //            var da = new DataAvailableEventArgs((byte[])e.Buffer.Clone(), e.BytesRecorded);
        //            DataAvailable(this, da);
        //        }
        //    }
        //}

        //private long l = 0;
        //private DateTime d = DateTime.Now;

        void WaveInDataAvailable(object sender, WaveInEventArgs e)
        {
            _isrunning = true;
            if (DataAvailable != null)
            {
                //forces processing of volume level without piping it out
                if (_sampleChannel != null)
                {
                    var sampleBuffer = new float[e.BytesRecorded];
                    _sampleChannel.Read(sampleBuffer, 0, e.BytesRecorded);

                    if (Listening && WaveOutProvider != null)
                    {
                        //if (l == 0)
                        //{
                        //    d = DateTime.Now;
                        //}
                        //l += e.BytesRecorded;
                        WaveOutProvider.AddSamples(e.Buffer, 0, e.BytesRecorded);

                        //double s = (DateTime.Now - d).TotalSeconds;
                        //Debug.WriteLine(l + " bytes in " + s + " seconds, should be " + RecordingFormat.AverageBytesPerSecond * s);
                    }
                    //if (!Listening && l > 0)
                    //{
                    //    l = 0;
                    //}
                    var da = new DataAvailableEventArgs((byte[])e.Buffer.Clone(), e.BytesRecorded);
                    DataAvailable(this, da);
                }
            }
        }
Пример #6
0
        private void SoundCallback(Sound soundData)
        {
            if (DataAvailable == null || _needsSetup)
            {
                return;
            }

            if (_sampleChannel != null)
            {
                var samples = new byte[soundData.SamplesSize];
                Marshal.Copy(soundData.SamplesData, samples, 0, (int)soundData.SamplesSize);

                _waveProvider.AddSamples(samples, 0, samples.Length);

                var sampleBuffer = new float[samples.Length];
                _sampleChannel.Read(sampleBuffer, 0, samples.Length);

                if (Listening && WaveOutProvider != null)
                {
                    WaveOutProvider.AddSamples(samples, 0, samples.Length);
                }
                var da = new DataAvailableEventArgs((byte[])samples.Clone());
                if (DataAvailable != null)
                {
                    DataAvailable(this, da);
                }
            }
        }
Пример #7
0
        void ProcessAudio(byte[] data)
        {
            try
            {
                if (DataAvailable != null)
                {
                    _waveProvider.AddSamples(data, 0, data.Length);

                    var sampleBuffer = new float[data.Length];
                    SampleChannel.Read(sampleBuffer, 0, data.Length);

                    DataAvailable(this, new DataAvailableEventArgs((byte[])data.Clone()));

                    if (WaveOutProvider != null && Listening)
                    {
                        WaveOutProvider.AddSamples(data, 0, data.Length);
                    }
                }
            }
            catch (NullReferenceException)
            {
                //DataAvailable can be removed at any time
            }
            catch (Exception ex)
            {
                MainForm.LogExceptionToFile(ex);
            }
        }
Пример #8
0
        void WaveInDataAvailable(object sender, WaveInEventArgs e)
        {
            if (_waveIn == null)
            {
                return;
            }
            try {
                var da = DataAvailable;
                if (da == null)
                {
                    return;
                }
                var sc = _sampleChannel;
                if (sc == null)
                {
                    return;
                }
                var sampleBuffer = new float[e.BytesRecorded];
                int read         = sc.Read(sampleBuffer, 0, e.BytesRecorded);

                da(this, new DataAvailableEventArgs((byte[])e.Buffer.Clone(), read));

                if (Listening)
                {
                    WaveOutProvider?.AddSamples(e.Buffer, 0, read);
                }
            }
            catch (Exception ex)
            {
                var af = AudioFinished;
                af?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost));

                Logger.LogExceptionToFile(ex, "AudioDevice");
            }
        }
Пример #9
0
        private void SoundCallback(Sound soundData)
        {
            if (DataAvailable == null || _needsSetup)
            {
                return;
            }

            var data = new byte[soundData.SamplesSize];

            Marshal.Copy(soundData.SamplesData, data, 0, (int)soundData.SamplesSize);

            if (_realChannels > 2)
            {
                //resample audio to 2 channels
                data = ToStereo(data, _realChannels);
            }

            _waveProvider.AddSamples(data, 0, data.Length);

            if (Listening && WaveOutProvider != null)
            {
                WaveOutProvider.AddSamples(data, 0, data.Length);
            }

            //forces processing of volume level without piping it out
            var sampleBuffer = new float[data.Length];

            _sampleChannel.Read(sampleBuffer, 0, data.Length);

            if (DataAvailable != null)
            {
                DataAvailable(this, new DataAvailableEventArgs((byte[])data.Clone()));
            }
        }
Пример #10
0
        private void WebStreamListener()
        {
            try
            {
                var data = new byte[6400];
                if (_socket != null)
                {
                    while (!stopEvent.WaitOne(0, false))
                    {
                        if (DataAvailable != null)
                        {
                            int recbytesize = _socket.Receive(data, 0, 6400, SocketFlags.None);

                            if (_sampleChannel != null)
                            {
                                _waveProvider.AddSamples(data, 0, recbytesize);

                                var sampleBuffer = new float[recbytesize];
                                _sampleChannel.Read(sampleBuffer, 0, recbytesize);

                                if (Listening && WaveOutProvider != null)
                                {
                                    WaveOutProvider.AddSamples(data, 0, recbytesize);
                                }
                                var da = new DataAvailableEventArgs((byte[])data.Clone());
                                DataAvailable(this, da);
                            }
                        }
                        else
                        {
                            break;
                        }
                        // need to stop ?
                        if (stopEvent.WaitOne(0, false))
                        {
                            break;
                        }
                    }
                }

                if (AudioFinished != null)
                {
                    AudioFinished(this, ReasonToFinishPlaying.StoppedByUser);
                }
            }
            catch (Exception e)
            {
                if (AudioSourceError != null)
                {
                    AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message));
                }
                Log.Error("", e);//MainForm.LogExceptionToFile(e);
            }
            if (_socket != null)
            {
                _socket.Close();
                _socket = null;
            }
        }
Пример #11
0
        private void EventManager()
        {
            Bitmap frame;

            while (_stopEvent != null && !_stopEvent.WaitOne(5, false) && !MainForm.ShuttingDown)
            {
                try
                {
                    var da = DataAvailable;
                    var nf = NewFrame;

                    if (_videoQueue.TryDequeue(out frame))
                    {
                        if (frame != null)
                        {
                            using (var b = (Bitmap)frame.Clone())
                            {
                                //new frame
                                nf?.Invoke(this, new NewFrameEventArgs(b));
                            }
                        }
                    }


                    byte[] audio;
                    if (!_audioQueue.TryDequeue(out audio))
                    {
                        continue;
                    }
                    da?.Invoke(this, new DataAvailableEventArgs(audio));

                    var sampleBuffer = new float[audio.Length];
                    _sampleChannel.Read(sampleBuffer, 0, audio.Length);

                    _waveProvider.AddSamples(audio, 0, audio.Length);

                    if (WaveOutProvider != null && Listening)
                    {
                        WaveOutProvider.AddSamples(audio, 0, audio.Length);
                    }
                }
                catch (Exception ex)
                {
                    MainForm.LogExceptionToFile(ex, "VLC");
                }
            }
            try
            {
                while (_videoQueue != null && _videoQueue.TryDequeue(out frame))
                {
                    frame?.Dispose();
                }
            }
            catch
            {
                // ignored
            }
        }
Пример #12
0
        private void DirectStreamListener()
        {
            try
            {
                var data = new byte[PacketSize];
                if (_stream != null)
                {
                    while (!_stopEvent.WaitOne(0, false) && !MainForm.ShuttingDown)
                    {
                        var da = DataAvailable;
                        if (da != null)
                        {
                            int recbytesize = _stream.Read(data, 0, PacketSize);
                            if (recbytesize > 0)
                            {
                                if (_sampleChannel != null)
                                {
                                    _waveProvider.AddSamples(data, 0, recbytesize);

                                    var sampleBuffer = new float[recbytesize];
                                    int read         = _sampleChannel.Read(sampleBuffer, 0, recbytesize);

                                    da(this, new DataAvailableEventArgs((byte[])data.Clone(), read));

                                    if (Listening)
                                    {
                                        WaveOutProvider?.AddSamples(data, 0, read);
                                    }
                                }
                            }
                            else
                            {
                                break;
                            }


                            if (_stopEvent.WaitOne(Interval, false))
                            {
                                break;
                            }
                        }
                    }
                }

                AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser));
            }
            catch (Exception e)
            {
                AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost));
                //if (AudioSourceError!=null)
                //    AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message));
                MainForm.LogExceptionToFile(e, "Direct");
            }

            _stream?.Close();
            _stream = null;
        }
Пример #13
0
        private void WebStreamListener()
        {
            try
            {
                var data = new byte[6400];
                if (_socket != null)
                {
                    while (!_stopEvent.WaitOne(0, false) && !MainForm.ShuttingDown)
                    {
                        var da = DataAvailable;
                        if (da != null)
                        {
                            int recbytesize = _socket.Receive(data, 0, 6400, SocketFlags.None);

                            if (_sampleChannel != null)
                            {
                                _waveProvider.AddSamples(data, 0, recbytesize);

                                var sampleBuffer = new float[recbytesize];
                                int read         = _sampleChannel.Read(sampleBuffer, 0, recbytesize);

                                da(this, new DataAvailableEventArgs((byte[])data.Clone(), read));

                                if (Listening)
                                {
                                    WaveOutProvider?.AddSamples(data, 0, read);
                                }
                            }
                        }
                        else
                        {
                            break;
                        }
                        // need to stop ?
                        if (_stopEvent.WaitOne(0, false))
                        {
                            break;
                        }
                    }
                }

                AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser));
            }
            catch (Exception e)
            {
                //if (AudioSourceError!=null)
                //    AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message));
                AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost));
                Logger.LogExceptionToFile(e, "WebStream");
            }
            if (_socket != null)
            {
                _socket.Close();
                _socket = null;
            }
        }
Пример #14
0
        private void EventManager()
        {
            Bitmap frame;

            while (!_stopEvent.WaitOne(5, false) && !MainForm.ShuttingDown)
            {
                try
                {
                    if (_videoQueue.TryDequeue(out frame))
                    {
                        if (frame != null)
                        {
                            NewFrame?.Invoke(this, new NewFrameEventArgs(frame));
                            frame.Dispose();
                        }
                    }


                    byte[] audio;
                    if (!_audioQueue.TryDequeue(out audio))
                    {
                        continue;
                    }

                    var da = DataAvailable;
                    da?.Invoke(this, new DataAvailableEventArgs(audio));

                    var sampleBuffer = new float[audio.Length];
                    int read         = SampleChannel.Read(sampleBuffer, 0, audio.Length);

                    _waveProvider?.AddSamples(audio, 0, read);

                    if (WaveOutProvider != null && Listening)
                    {
                        WaveOutProvider?.AddSamples(audio, 0, read);
                    }
                }
                catch (Exception ex)
                {
                    Logger.LogExceptionToFile(ex, "FFMPEG");
                }
            }
            try
            {
                while (_videoQueue != null && _videoQueue.TryDequeue(out frame))
                {
                    frame?.Dispose();
                }
            }
            catch (Exception ex)
            {
                Logger.LogExceptionToFile(ex, "FFMPEG");
            }
        }
Пример #15
0
        private void EventManager()
        {
            byte[] audio;
            Bitmap frame;

            while (!_stopEvent.WaitOne(5, false) && !MainForm.ShuttingDown)
            {
                var da = DataAvailable;
                var nf = NewFrame;

                if (_videoQueue.TryDequeue(out frame))
                {
                    //needs to be cloned for some weird reason
                    var b = (Bitmap)frame.Clone();
                    //new frame
                    if (nf != null)
                    {
                        nf.Invoke(this, new NewFrameEventArgs(frame));
                    }

                    b.Dispose();
                    b = null;
                }


                if (_audioQueue.TryDequeue(out audio))
                {
                    if (da != null)
                    {
                        da.Invoke(this, new DataAvailableEventArgs(audio));
                    }

                    var sampleBuffer = new float[audio.Length];
                    _sampleChannel.Read(sampleBuffer, 0, audio.Length);

                    _waveProvider.AddSamples(audio, 0, audio.Length);

                    if (WaveOutProvider != null && Listening)
                    {
                        WaveOutProvider.AddSamples(audio, 0, audio.Length);
                    }
                }
            }
            while (_videoQueue.TryDequeue(out frame))
            {
                frame.Dispose();
                frame = null;
            }
        }
Пример #16
0
        private void ProcessAudio(IntPtr data, IntPtr samples, uint count, long pts)
        {
            if (!IsRunning || _ignoreAudio || _quit)
            {
                return;
            }
            _lastFrame  = DateTime.UtcNow;
            _connecting = false;
            var da    = DataAvailable;
            int bytes = (int)count * 2;//(16 bit, 1 channel)

            if (HasAudioStream != null)
            {
                HasAudioStream?.Invoke(this, EventArgs.Empty);
                HasAudioStream = null;
            }

            if (da != null)
            {
                var buf = new byte[bytes];
                Marshal.Copy(samples, buf, 0, bytes);

                if (!_audioInited)
                {
                    _audioInited  = true;
                    _waveProvider = new BufferedWaveProvider(RecordingFormat)
                    {
                        DiscardOnBufferOverflow = true,
                        BufferDuration          = TimeSpan.FromMilliseconds(200)
                    };
                    _sampleChannel = new SampleChannel(_waveProvider);

                    _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;
                }

                _waveProvider.AddSamples(buf, 0, bytes);

                var sampleBuffer = new float[bytes];
                var read         = _sampleChannel.Read(sampleBuffer, 0, bytes);

                da(this, new DataAvailableEventArgs(buf, bytes));

                if (Listening)
                {
                    WaveOutProvider?.AddSamples(buf, 0, bytes);
                }
            }
        }
Пример #17
0
 void WaveInDataAvailable(object sender, WaveInEventArgs e)
 {
     _isrunning = true;
     if (DataAvailable != null)
     {
         //forces processing of volume level without piping it out
         if (_sampleChannel != null)
         {
             var sampleBuffer = new float[e.BytesRecorded];
             _sampleChannel.Read(sampleBuffer, 0, e.BytesRecorded);
         }
         if (Listening && WaveOutProvider != null)
         {
             WaveOutProvider.AddSamples(e.Buffer, 0, e.BytesRecorded);
         }
         var da = new DataAvailableEventArgs((byte[])e.Buffer.Clone(), e.BytesRecorded);
         DataAvailable(this, da);
     }
 }
Пример #18
0
        private void SoundCallback(Sound soundData)
        {
            var da = DataAvailable;

            if (da == null || _needsSetup)
            {
                return;
            }

            try
            {
                var data = new byte[soundData.SamplesSize];
                Marshal.Copy(soundData.SamplesData, data, 0, (int)soundData.SamplesSize);

                if (_realChannels > 2)
                {
                    //resample audio to 2 channels
                    data = ToStereo(data, _realChannels);
                }

                _waveProvider?.AddSamples(data, 0, data.Length);

                //forces processing of volume level without piping it out
                var sampleBuffer = new float[data.Length];
                int read         = _sampleChannel.Read(sampleBuffer, 0, data.Length);

                da(this, new DataAvailableEventArgs((byte[])data.Clone(), read));

                if (Listening)
                {
                    WaveOutProvider?.AddSamples(data, 0, read);
                }
            }
            catch
            {
                //can fail at shutdown
            }
        }
Пример #19
0
        private void StreamWav()
        {
            var            res     = ReasonToFinishPlaying.StoppedByUser;
            HttpWebRequest request = null;

            try
            {
                using (HttpWebResponse resp = ConnectionFactory.GetResponse(_source, out request))
                {
                    //1/10 of a second, 16 byte buffer
                    var data = new byte[((RecordingFormat.SampleRate / 4) * 2) * RecordingFormat.Channels];

                    using (var stream = resp.GetResponseStream())
                    {
                        if (stream == null)
                        {
                            throw new Exception("Stream is null");
                        }

                        while (!_stopEvent.WaitOne(10, false) && !MainForm.ShuttingDown)
                        {
                            var da = DataAvailable;
                            if (da != null)
                            {
                                int recbytesize = stream.Read(data, 0, data.Length);
                                if (recbytesize == 0)
                                {
                                    throw new Exception("lost stream");
                                }


                                if (_sampleChannel != null)
                                {
                                    _waveProvider.AddSamples(data, 0, recbytesize);

                                    var sampleBuffer = new float[recbytesize];
                                    _sampleChannel.Read(sampleBuffer, 0, recbytesize);

                                    if (Listening && WaveOutProvider != null)
                                    {
                                        WaveOutProvider.AddSamples(data, 0, recbytesize);
                                    }
                                    var dae = new DataAvailableEventArgs((byte[])data.Clone(), recbytesize);
                                    da(this, dae);
                                }
                            }
                            else
                            {
                                break;
                            }
                        }
                    }
                }

                if (AudioFinished != null)
                {
                    AudioFinished(this, ReasonToFinishPlaying.StoppedByUser);
                }
            }
            catch (Exception ex)
            {
                var af = AudioFinished;
                if (af != null)
                {
                    af(this, ReasonToFinishPlaying.DeviceLost);
                }

                MainForm.LogExceptionToFile(ex, "WavStream");
            }
            finally
            {
                // abort request
                if (request != null)
                {
                    try
                    {
                        request.Abort();
                    }
                    catch { }
                    request = null;
                }
            }
        }
Пример #20
0
        private void ReadFrames()
        {
            AVFrame *   pConvertedFrame       = null;
            sbyte *     pConvertedFrameBuffer = null;
            SwsContext *pConvertContext       = null;

            BufferedWaveProvider waveProvider  = null;
            SampleChannel        sampleChannel = null;

            bool audioInited = false;
            bool videoInited = false;
            var  packet      = new AVPacket();

            do
            {
                ffmpeg.av_init_packet(&packet);

                AVFrame *frame = ffmpeg.av_frame_alloc();
                ffmpeg.av_frame_unref(frame);

                if (ffmpeg.av_read_frame(_formatContext, &packet) < 0)
                {
                    _stopReadingFrames = true;
                    _res = ReasonToFinishPlaying.VideoSourceError;
                    break;
                }

                if ((packet.flags & ffmpeg.AV_PKT_FLAG_CORRUPT) == ffmpeg.AV_PKT_FLAG_CORRUPT)
                {
                    break;
                }

                AVPacket packetTemp = packet;
                var      nf         = NewFrame;
                var      da         = DataAvailable;

                _lastPacket = DateTime.UtcNow;
                if (_audioStream != null && packetTemp.stream_index == _audioStream->index)
                {
                    if (HasAudioStream != null)
                    {
                        HasAudioStream?.Invoke(this, EventArgs.Empty);
                        HasAudioStream = null;
                    }
                    if (da != null)
                    {
                        int  s       = 0;
                        var  buffer  = new sbyte[_audioCodecContext->sample_rate * 2];
                        var  tbuffer = new sbyte[_audioCodecContext->sample_rate * 2];
                        bool b       = false;

                        fixed(sbyte **outPtrs = new sbyte *[32])
                        {
                            fixed(sbyte *bPtr = &tbuffer[0])
                            {
                                outPtrs[0] = bPtr;
                                do
                                {
                                    int gotFrame = 0;
                                    int inUsed   = ffmpeg.avcodec_decode_audio4(_audioCodecContext, frame, &gotFrame,
                                                                                &packetTemp);

                                    if (inUsed < 0 || gotFrame == 0)
                                    {
                                        b = true;
                                        break;
                                    }

                                    int numSamplesOut = ffmpeg.swr_convert(_swrContext,
                                                                           outPtrs,
                                                                           _audioCodecContext->sample_rate,
                                                                           &frame->data0,
                                                                           frame->nb_samples);

                                    var l = numSamplesOut * 2 * _audioCodecContext->channels;
                                    Buffer.BlockCopy(tbuffer, 0, buffer, s, l);
                                    s += l;


                                    packetTemp.data += inUsed;
                                    packetTemp.size -= inUsed;
                                } while (packetTemp.size > 0);
                            }
                        }

                        if (b)
                        {
                            break;
                        }

                        ffmpeg.av_free_packet(&packet);
                        ffmpeg.av_frame_free(&frame);


                        if (!audioInited)
                        {
                            audioInited     = true;
                            RecordingFormat = new WaveFormat(_audioCodecContext->sample_rate, 16,
                                                             _audioCodecContext->channels);
                            waveProvider = new BufferedWaveProvider(RecordingFormat)
                            {
                                DiscardOnBufferOverflow = true,
                                BufferDuration          =
                                    TimeSpan.FromMilliseconds(500)
                            };
                            sampleChannel = new SampleChannel(waveProvider);

                            sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;
                        }

                        byte[] ba = new byte[s];
                        Buffer.BlockCopy(buffer, 0, ba, 0, s);


                        waveProvider.AddSamples(ba, 0, s);

                        var sampleBuffer = new float[s];
                        int read         = sampleChannel.Read(sampleBuffer, 0, s);


                        da(this, new DataAvailableEventArgs(ba, read));


                        if (Listening)
                        {
                            WaveOutProvider?.AddSamples(ba, 0, read);
                        }
                    }
                }

                if (nf != null && _videoStream != null && packet.stream_index == _videoStream->index)
                {
                    int frameFinished = 0;
                    //decode video frame

                    int ret = ffmpeg.avcodec_decode_video2(_codecContext, frame, &frameFinished, &packetTemp);
                    if (ret < 0)
                    {
                        ffmpeg.av_free_packet(&packet);
                        ffmpeg.av_frame_free(&frame);
                        break;
                    }

                    if (frameFinished == 1)
                    {
                        if (!videoInited)
                        {
                            videoInited     = true;
                            pConvertedFrame = ffmpeg.av_frame_alloc();
                            var convertedFrameBufferSize = ffmpeg.avpicture_get_size(AVPixelFormat.AV_PIX_FMT_BGR24,
                                                                                     _codecContext->width, _codecContext->height);

                            pConvertedFrameBuffer = (sbyte *)ffmpeg.av_malloc((ulong)convertedFrameBufferSize);

                            ffmpeg.avpicture_fill((AVPicture *)pConvertedFrame, pConvertedFrameBuffer,
                                                  AVPixelFormat.AV_PIX_FMT_BGR24, _codecContext->width, _codecContext->height);

                            pConvertContext = ffmpeg.sws_getContext(_codecContext->width, _codecContext->height,
                                                                    _codecContext->pix_fmt, _codecContext->width, _codecContext->height,
                                                                    AVPixelFormat.AV_PIX_FMT_BGR24, ffmpeg.SWS_FAST_BILINEAR, null, null, null);
                        }
                        var src       = &frame->data0;
                        var dst       = &pConvertedFrame->data0;
                        var srcStride = frame->linesize;
                        var dstStride = pConvertedFrame->linesize;
                        ffmpeg.sws_scale(pConvertContext, src, srcStride, 0, _codecContext->height, dst, dstStride);

                        var convertedFrameAddress = pConvertedFrame->data0;
                        if (convertedFrameAddress != null)
                        {
                            var imageBufferPtr = new IntPtr(convertedFrameAddress);

                            var linesize = dstStride[0];

                            if (frame->decode_error_flags > 0)
                            {
                                ffmpeg.av_free_packet(&packet);
                                ffmpeg.av_frame_free(&frame);
                                break;
                            }

                            using (
                                var mat = new Bitmap(_codecContext->width, _codecContext->height, linesize,
                                                     PixelFormat.Format24bppRgb, imageBufferPtr))
                            {
                                var nfe = new NewFrameEventArgs((Bitmap)mat.Clone());
                                nf.Invoke(this, nfe);
                            }

                            _lastVideoFrame = DateTime.UtcNow;
                        }
                    }
                }

                if (_videoStream != null)
                {
                    if ((DateTime.UtcNow - _lastVideoFrame).TotalMilliseconds > _timeout)
                    {
                        _res = ReasonToFinishPlaying.DeviceLost;
                        _stopReadingFrames = true;
                    }
                }

                ffmpeg.av_free_packet(&packet);
                ffmpeg.av_frame_free(&frame);
            } while (!_stopReadingFrames && !MainForm.ShuttingDown);


            try
            {
                Program.FfmpegMutex.WaitOne();

                if (pConvertedFrame != null)
                {
                    ffmpeg.av_free(pConvertedFrame);
                }

                if (pConvertedFrameBuffer != null)
                {
                    ffmpeg.av_free(pConvertedFrameBuffer);
                }

                if (_formatContext != null)
                {
                    if (_formatContext->streams != null)
                    {
                        int j = (int)_formatContext->nb_streams;
                        for (var i = j - 1; i >= 0; i--)
                        {
                            AVStream *stream = _formatContext->streams[i];

                            if (stream != null && stream->codec != null && stream->codec->codec != null)
                            {
                                stream->discard = AVDiscard.AVDISCARD_ALL;
                                ffmpeg.avcodec_close(stream->codec);
                            }
                        }
                    }
                    fixed(AVFormatContext **f = &_formatContext)
                    {
                        ffmpeg.avformat_close_input(f);
                    }
                    _formatContext = null;
                }

                _videoStream       = null;
                _audioStream       = null;
                _audioCodecContext = null;
                _codecContext      = null;

                if (_swrContext != null)
                {
                    fixed(SwrContext **s = &_swrContext)
                    {
                        ffmpeg.swr_free(s);
                    }
                    _swrContext = null;
                }

                if (pConvertContext != null)
                {
                    ffmpeg.sws_freeContext(pConvertContext);
                }

                if (sampleChannel != null)
                {
                    sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter;
                    sampleChannel = null;
                }
            }
            catch (Exception ex)
            {
                Logger.LogException(ex, "Media Stream (close)");
            }
            finally
            {
                try
                {
                    Program.FfmpegMutex.ReleaseMutex();
                }
                catch
                {
                }
            }

            PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(_res));
            AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res));
        }
Пример #21
0
        private void DirectStreamListener()
        {
            try
            {
                var data = new byte[PacketSize];
                if (_stream != null)
                {
                    while (!_stopEvent.WaitOne(0, false) && !MainForm.Reallyclose)
                    {
                        if (DataAvailable != null)
                        {
                            int recbytesize = _stream.Read(data, 0, PacketSize);
                            if (recbytesize > 0)
                            {
                                if (_sampleChannel != null)
                                {
                                    _waveProvider.AddSamples(data, 0, recbytesize);

                                    var sampleBuffer = new float[recbytesize];
                                    _sampleChannel.Read(sampleBuffer, 0, recbytesize);

                                    if (Listening && WaveOutProvider != null)
                                    {
                                        WaveOutProvider.AddSamples(data, 0, recbytesize);
                                    }
                                    var da = new DataAvailableEventArgs((byte[])data.Clone(), recbytesize);
                                    DataAvailable(this, da);
                                }
                            }
                            else
                            {
                                break;
                            }


                            if (_stopEvent.WaitOne(Interval, false))
                            {
                                break;
                            }
                        }
                    }
                }

                if (AudioFinished != null)
                {
                    AudioFinished(this, ReasonToFinishPlaying.StoppedByUser);
                }
            }
            catch (Exception e)
            {
                if (AudioFinished != null)
                {
                    AudioFinished(this, ReasonToFinishPlaying.DeviceLost);
                }
                //if (AudioSourceError!=null)
                //    AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message));
                MainForm.LogExceptionToFile(e);
            }
            if (_stream != null)
            {
                _stream.Close();
                _stream = null;
            }
        }
Пример #22
0
        private void ReadFrames()
        {
            pConvertedFrameBuffer = IntPtr.Zero;
            pConvertContext       = null;

            var audioInited = false;
            var videoInited = false;

            byte[] buffer      = null, tbuffer = null;
            var    dstData     = new byte_ptrArray4();
            var    dstLinesize = new int_array4();
            BufferedWaveProvider waveProvider = null;

            sampleChannel = null;
            var packet = new AVPacket();

            do
            {
                ffmpeg.av_init_packet(&packet);
                if (_audioCodecContext != null && buffer == null)
                {
                    buffer  = new byte[_audioCodecContext->sample_rate * 2];
                    tbuffer = new byte[_audioCodecContext->sample_rate * 2];
                }

                if (Log("AV_READ_FRAME", ffmpeg.av_read_frame(_formatContext, &packet)))
                {
                    break;
                }


                if ((packet.flags & ffmpeg.AV_PKT_FLAG_CORRUPT) == ffmpeg.AV_PKT_FLAG_CORRUPT)
                {
                    break;
                }

                var nf = NewFrame;
                var da = DataAvailable;

                _lastPacket = DateTime.UtcNow;

                var ret = -11; //EAGAIN
                if (_audioStream != null && packet.stream_index == _audioStream->index && _audioCodecContext != null && !_ignoreAudio)
                {
                    if (HasAudioStream != null)
                    {
                        HasAudioStream?.Invoke(this, EventArgs.Empty);
                        HasAudioStream = null;
                    }

                    if (da != null)
                    {
                        var s = 0;
                        fixed(byte **outPtrs = new byte *[32])
                        {
                            fixed(byte *bPtr = &tbuffer[0])
                            {
                                outPtrs[0] = bPtr;
                                var af = ffmpeg.av_frame_alloc();

                                ffmpeg.avcodec_send_packet(_audioCodecContext, &packet);
                                do
                                {
                                    ret = ffmpeg.avcodec_receive_frame(_audioCodecContext, af);

                                    if (ret == 0)
                                    {
                                        int numSamplesOut = 0;
                                        try
                                        {
                                            if (_swrContext == null)
                                            {
                                                //need to do this here as send_packet can change channel layout and throw an exception below
                                                initSWR();
                                            }
                                            var dat = af->data[0];

                                            numSamplesOut = ffmpeg.swr_convert(_swrContext,
                                                                               outPtrs,
                                                                               _audioCodecContext->sample_rate,
                                                                               &dat,
                                                                               af->nb_samples);
                                        }
                                        catch (Exception ex)
                                        {
                                            Logger.LogException(ex, "MediaStream - Audio Read");
                                            _ignoreAudio = true;
                                            break;
                                        }

                                        if (numSamplesOut > 0)
                                        {
                                            var l = numSamplesOut * 2 * OutFormat.Channels;
                                            Buffer.BlockCopy(tbuffer, 0, buffer, s, l);
                                            s += l;
                                        }
                                        else
                                        {
                                            ret = numSamplesOut; //(error)
                                        }
                                    }
                                    if (af->decode_error_flags > 0)
                                    {
                                        break;
                                    }
                                } while (ret == 0);
                                ffmpeg.av_frame_free(&af);
                                if (s > 0)
                                {
                                    var ba = new byte[s];
                                    Buffer.BlockCopy(buffer, 0, ba, 0, s);

                                    if (!audioInited)
                                    {
                                        audioInited     = true;
                                        RecordingFormat = new WaveFormat(_audioCodecContext->sample_rate, 16,
                                                                         _audioCodecContext->channels);

                                        waveProvider = new BufferedWaveProvider(RecordingFormat)
                                        {
                                            DiscardOnBufferOverflow = true,
                                            BufferDuration          = TimeSpan.FromMilliseconds(200)
                                        };
                                        sampleChannel = new SampleChannel(waveProvider);

                                        sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;
                                    }


                                    waveProvider.AddSamples(ba, 0, s);

                                    var sampleBuffer = new float[s];
                                    var read         = sampleChannel.Read(sampleBuffer, 0, s);


                                    da(this, new DataAvailableEventArgs(ba, s));


                                    if (Listening)
                                    {
                                        WaveOutProvider?.AddSamples(ba, 0, read);
                                    }
                                }
                            }
                        }
                    }
                }

                if (nf != null && _videoStream != null && packet.stream_index == _videoStream->index &&
                    _videoCodecContext != null)
                {
                    var ef = ShouldEmitFrame;
                    ffmpeg.avcodec_send_packet(_videoCodecContext, &packet);
                    do
                    {
                        var vf = ffmpeg.av_frame_alloc();
                        ret = ffmpeg.avcodec_receive_frame(_videoCodecContext, vf);
                        if (ret == 0 && ef)
                        {
                            AVPixelFormat srcFmt;
                            if (_hwDeviceCtx != null)
                            {
                                srcFmt = AVPixelFormat.AV_PIX_FMT_NV12;
                                var output = ffmpeg.av_frame_alloc();
                                ffmpeg.av_hwframe_transfer_data(output, vf, 0);
                                ffmpeg.av_frame_copy_props(output, vf);
                                ffmpeg.av_frame_free(&vf);
                                vf = output;
                            }
                            else
                            {
                                srcFmt = (AVPixelFormat)vf->format;
                            }

                            if (!videoInited)
                            {
                                videoInited = true;

                                _finalSize = Helper.CalcResizeSize(_source.settings.resize, new Size(_videoCodecContext->width, _videoCodecContext->height), new Size(_source.settings.resizeWidth, _source.settings.resizeHeight));

                                var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(AVPixelFormat.AV_PIX_FMT_BGR24, _finalSize.Width, _finalSize.Height, 1);
                                pConvertedFrameBuffer = Marshal.AllocHGlobal(convertedFrameBufferSize);
                                ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte *)pConvertedFrameBuffer, AVPixelFormat.AV_PIX_FMT_BGR24, _finalSize.Width, _finalSize.Height, 1);
                                pConvertContext = ffmpeg.sws_getContext(_videoCodecContext->width, _videoCodecContext->height, NormalizePixelFormat(srcFmt), _finalSize.Width, _finalSize.Height, AVPixelFormat.AV_PIX_FMT_BGR24, ffmpeg.SWS_FAST_BILINEAR, null, null, null);
                            }

                            Log("SWS_SCALE", ffmpeg.sws_scale(pConvertContext, vf->data, vf->linesize, 0, _videoCodecContext->height, dstData, dstLinesize));


                            if (vf->decode_error_flags > 0)
                            {
                                ffmpeg.av_frame_free(&vf);
                                break;
                            }

                            using (
                                var mat = new Bitmap(_finalSize.Width, _finalSize.Height, dstLinesize[0],
                                                     PixelFormat.Format24bppRgb, pConvertedFrameBuffer))
                            {
                                var nfe = new NewFrameEventArgs(mat);
                                nf.Invoke(this, nfe);
                            }

                            _lastVideoFrame = DateTime.UtcNow;
                            ffmpeg.av_frame_free(&vf);
                            break;
                        }
                        ffmpeg.av_frame_free(&vf);
                    } while (ret == 0);
                }

                if (nf != null && _videoStream != null)
                {
                    if ((DateTime.UtcNow - _lastVideoFrame).TotalMilliseconds * 1000 > _timeoutMicroSeconds)
                    {
                        _res   = ReasonToFinishPlaying.DeviceLost;
                        _abort = true;
                    }
                }

                ffmpeg.av_packet_unref(&packet);
                if (ret == -11)
                {
                    Thread.Sleep(10);
                }
            } while (!_abort && !MainForm.ShuttingDown);

            NewFrame?.Invoke(this, new NewFrameEventArgs(null));

            CleanUp();
        }
Пример #23
0
        private void FfmpegListener()
        {
            ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser;

            VideoFileReader vfr = null;

            Program.WriterMutex.WaitOne();
            try
            {
                vfr = new VideoFileReader();
                vfr.Open(_source);
            }
            catch (Exception ex)
            {
                Log.Error("", ex);//MainForm.LogExceptionToFile(ex);
            }
            Program.WriterMutex.ReleaseMutex();
            if (vfr == null || !vfr.IsOpen)
            {
                if (PlayingFinished != null)
                {
                    PlayingFinished(this, ReasonToFinishPlaying.VideoSourceError);
                }
                return;
            }
            bool hasaudio = false;

            if (vfr.Channels > 0)
            {
                hasaudio        = true;
                RecordingFormat = new WaveFormat(vfr.SampleRate, 16, vfr.Channels);

                WaveOutProvider = new BufferedWaveProvider(RecordingFormat)
                {
                    DiscardOnBufferOverflow = true
                };
                _waveProvider = new BufferedWaveProvider(RecordingFormat)
                {
                    DiscardOnBufferOverflow = true
                };


                _sampleChannel    = new SampleChannel(_waveProvider);
                _meteringProvider = new MeteringSampleProvider(_sampleChannel);
                _meteringProvider.StreamVolume += MeteringProviderStreamVolume;

                if (HasAudioStream != null)
                {
                    HasAudioStream(this, EventArgs.Empty);
                }
            }

            int interval = 1000 / ((vfr.FrameRate == 0) ? 25 : vfr.FrameRate);

            byte[] data;
            Bitmap frame;

            try
            {
                while (!_stopEvent.WaitOne(0, false))
                {
                    DateTime start = DateTime.Now;
                    frame = vfr.ReadVideoFrame();
                    if (frame == null)
                    {
                        reasonToStop = ReasonToFinishPlaying.EndOfStreamReached;
                        break;
                    }

                    if (NewFrame != null)
                    {
                        NewFrame(this, new NewFrameEventArgs(frame));
                    }
                    frame.Dispose();

                    if (hasaudio)
                    {
                        data = vfr.ReadAudioFrame();
                        if (DataAvailable != null)
                        {
                            _waveProvider.AddSamples(data, 0, data.Length);

                            if (Listening)
                            {
                                WaveOutProvider.AddSamples(data, 0, data.Length);
                            }

                            _mFramesReceived++;

                            //forces processing of volume level without piping it out
                            var sampleBuffer = new float[data.Length];

                            _meteringProvider.Read(sampleBuffer, 0, data.Length);
                            DataAvailable(this, new DataAvailableEventArgs((byte[])data.Clone()));
                        }
                    }

                    if (interval > 0)
                    {
                        // get frame extract duration
                        TimeSpan span = DateTime.Now.Subtract(start);

                        // miliseconds to sleep
                        int msec = interval - (int)span.TotalMilliseconds;

                        if ((msec > 0) && (_stopEvent.WaitOne(msec, false)))
                        {
                            break;
                        }
                    }
                }
            }
            catch (Exception e)
            {
                if (VideoSourceError != null)
                {
                    VideoSourceError(this, new VideoSourceErrorEventArgs(e.Message));
                }
                Log.Error("", e);//MainForm.LogExceptionToFile(e);
                reasonToStop = ReasonToFinishPlaying.DeviceLost;
            }
            if (PlayingFinished != null)
            {
                PlayingFinished(this, reasonToStop);
            }
        }
Пример #24
0
        private void DirectStreamListener()
        {
            _abort = new ManualResetEvent(false);
            try
            {
                var data = new byte[PacketSize];
                if (_stream != null)
                {
                    while (!_abort.WaitOne(0) && !MainForm.ShuttingDown)
                    {
                        var da = DataAvailable;
                        if (da != null)
                        {
                            int recbytesize = _stream.Read(data, 0, PacketSize);
                            if (recbytesize > 0)
                            {
                                if (_sampleChannel != null)
                                {
                                    _waveProvider.AddSamples(data, 0, recbytesize);

                                    var sampleBuffer = new float[recbytesize];
                                    int read         = _sampleChannel.Read(sampleBuffer, 0, recbytesize);

                                    da(this, new DataAvailableEventArgs((byte[])data.Clone(), read));

                                    if (Listening)
                                    {
                                        WaveOutProvider?.AddSamples(data, 0, read);
                                    }
                                }
                            }
                            else
                            {
                                break;
                            }
                        }
                    }
                }
            }
            catch (Exception e)
            {
                _res = ReasonToFinishPlaying.DeviceLost;
                Logger.LogException(e, "Direct");
            }

            _stream?.Close();
            _stream = null;


            if (_sampleChannel != null)
            {
                _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter;
            }

            if (_waveProvider != null && _waveProvider.BufferedBytes > 0)
            {
                _waveProvider.ClearBuffer();
            }

            if (WaveOutProvider?.BufferedBytes > 0)
            {
                WaveOutProvider.ClearBuffer();
            }

            AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res));
            _abort.Close();
        }
Пример #25
0
        private void StreamMP3()
        {
            _abort = new ManualResetEvent(false);
            HttpWebRequest request = null;

            try
            {
                var resp   = _connFactory.GetResponse(_source, "GET", "", out request);
                var buffer = new byte[16384 * 4]; // needs to be big enough to hold a decompressed frame
                IMp3FrameDecompressor decompressor = null;

                using (var responseStream = resp.GetResponseStream())
                {
                    var readFullyStream = new ReadFullyStream(responseStream);
                    while (!_abort.WaitOne(20) && !MainForm.ShuttingDown)
                    {
                        if (_bufferedWaveProvider != null &&
                            _bufferedWaveProvider.BufferLength - _bufferedWaveProvider.BufferedBytes <
                            _bufferedWaveProvider.WaveFormat.AverageBytesPerSecond / 4)
                        {
                            //Debug.WriteLine("Buffer getting full, taking a break");
                            Thread.Sleep(100);
                        }
                        else
                        {
                            var da = DataAvailable;
                            if (da != null)
                            {
                                Mp3Frame frame;
                                try
                                {
                                    frame = Mp3Frame.LoadFromStream(readFullyStream);
                                }
                                catch (EndOfStreamException)
                                {
                                    // reached the end of the MP3 file / stream
                                    break;
                                }
                                catch (WebException)
                                {
                                    // probably we have aborted download from the GUI thread
                                    break;
                                }
                                if (decompressor == null || _bufferedWaveProvider == null)
                                {
                                    // don't think these details matter too much - just help ACM select the right codec
                                    // however, the buffered provider doesn't know what sample rate it is working at
                                    // until we have a frame
                                    WaveFormat waveFormat = new Mp3WaveFormat(frame.SampleRate,
                                                                              frame.ChannelMode == ChannelMode.Mono ? 1 : 2, frame.FrameLength, frame.BitRate);

                                    RecordingFormat = new WaveFormat(frame.SampleRate, 16,
                                                                     frame.ChannelMode == ChannelMode.Mono ? 1 : 2);

                                    decompressor          = new AcmMp3FrameDecompressor(waveFormat);
                                    _bufferedWaveProvider = new BufferedWaveProvider(decompressor.OutputFormat)
                                    {
                                        BufferDuration = TimeSpan.FromSeconds(5)
                                    };

                                    _sampleChannel = new SampleChannel(_bufferedWaveProvider);
                                    _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;
                                }

                                int decompressed = decompressor.DecompressFrame(frame, buffer, 0);
                                _bufferedWaveProvider.AddSamples(buffer, 0, decompressed);

                                var sampleBuffer = new float[buffer.Length];
                                int read         = _sampleChannel.Read(sampleBuffer, 0, buffer.Length);

                                da(this, new DataAvailableEventArgs((byte[])buffer.Clone(), read));

                                if (Listening)
                                {
                                    WaveOutProvider?.AddSamples(buffer, 0, read);
                                }
                            }
                        }
                    }

                    // was doing this in a finally block, but for some reason
                    // we are hanging on response stream .Dispose so never get there
                    if (decompressor != null)
                    {
                        decompressor.Dispose();
                        decompressor = null;
                    }
                }
            }
            catch (Exception ex)
            {
                _res = ReasonToFinishPlaying.DeviceLost;
                Logger.LogException(ex, "MP3Stream");
            }
            try
            {
                request?.Abort();
            }
            catch { }
            request = null;
            AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res));
            _abort.Close();
        }
Пример #26
0
        private void SpyServerListener()
        {
            var data = new byte[3200];

            try
            {
                var request = (HttpWebRequest)WebRequest.Create(_source);
                request.Timeout          = 10000;
                request.ReadWriteTimeout = 5000;
                var response = request.GetResponse();
                using (Stream stream = response.GetResponseStream())
                {
                    if (stream == null)
                    {
                        throw new Exception("Stream is null");
                    }

                    stream.ReadTimeout = 5000;
                    while (!_stopEvent.WaitOne(0, false))
                    {
                        int recbytesize = stream.Read(data, 0, 3200);
                        if (recbytesize == 0)
                        {
                            throw new Exception("lost stream");
                        }

                        byte[] dec;
                        ALawDecoder.ALawDecode(data, recbytesize, out dec);
                        var da = DataAvailable;
                        if (da != null)
                        {
                            if (_sampleChannel != null)
                            {
                                _waveProvider.AddSamples(dec, 0, dec.Length);

                                var sampleBuffer = new float[dec.Length];
                                int read         = _sampleChannel.Read(sampleBuffer, 0, dec.Length);

                                da(this, new DataAvailableEventArgs((byte[])dec.Clone(), read));

                                if (Listening)
                                {
                                    WaveOutProvider?.AddSamples(dec, 0, read);
                                }
                            }
                        }
                        else
                        {
                            break;
                        }
                        // need to stop ?
                        if (_stopEvent.WaitOne(0, false))
                        {
                            break;
                        }
                    }
                }

                AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.StoppedByUser));
            }
            catch (Exception e)
            {
                var af = AudioFinished;
                af?.Invoke(this, new PlayingFinishedEventArgs(ReasonToFinishPlaying.DeviceLost));

                //Logger.LogExceptionToFile(e,"ispyServer");
            }

            if (_sampleChannel != null)
            {
                _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter;
                _sampleChannel = null;
            }

            if (_waveProvider?.BufferedBytes > 0)
            {
                _waveProvider.ClearBuffer();
            }

            if (WaveOutProvider?.BufferedBytes > 0)
            {
                WaveOutProvider?.ClearBuffer();
            }
        }
Пример #27
0
        private void FfmpegListener()
        {
            _reasonToStop = ReasonToFinishPlaying.StoppedByUser;
            _afr          = null;
            bool   open   = false;
            string errmsg = "";

            try
            {
                Program.FFMPEGMutex.WaitOne();
                _afr = new AudioFileReader();
                int i = _source.IndexOf("://", StringComparison.Ordinal);
                if (i > -1)
                {
                    _source = _source.Substring(0, i).ToLower() + _source.Substring(i);
                }
                _afr.Timeout         = Timeout;
                _afr.AnalyzeDuration = AnalyseDuration;
                _afr.Open(_source);

                open = true;
            }
            catch (Exception ex)
            {
                MainForm.LogExceptionToFile(ex, "FFMPEG");
            }
            finally
            {
                try
                {
                    Program.FFMPEGMutex.ReleaseMutex();
                }
                catch (ObjectDisposedException)
                {
                    //can happen on shutdown
                }
            }

            if (_afr == null || !_afr.IsOpen || !open)
            {
                ShutDown("Could not open audio stream" + ": " + _source);
                return;
            }


            RecordingFormat = new WaveFormat(_afr.SampleRate, 16, _afr.Channels);
            _waveProvider   = new BufferedWaveProvider(RecordingFormat)
            {
                DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500)
            };

            _sampleChannel = new SampleChannel(_waveProvider);
            _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;

            int    mult = _afr.BitsPerSample / 8;
            double btrg = Convert.ToDouble(_afr.SampleRate * mult * _afr.Channels);

            LastFrame = DateTime.UtcNow;
            bool realTime = !IsFileSource;

            try
            {
                DateTime req = DateTime.UtcNow;
                while (!_stopEvent.WaitOne(10, false) && !MainForm.Reallyclose)
                {
                    byte[] data = _afr.ReadAudioFrame();
                    if (data == null || data.Equals(0))
                    {
                        if (!realTime)
                        {
                            break;
                        }
                    }
                    if (data != null && data.Length > 0)
                    {
                        LastFrame = DateTime.UtcNow;
                        var da = DataAvailable;
                        if (da != null)
                        {
                            //forces processing of volume level without piping it out
                            _waveProvider.AddSamples(data, 0, data.Length);

                            var sampleBuffer = new float[data.Length];
                            _sampleChannel.Read(sampleBuffer, 0, data.Length);

                            da(this, new DataAvailableEventArgs((byte[])data.Clone()));

                            if (WaveOutProvider != null && Listening)
                            {
                                WaveOutProvider.AddSamples(data, 0, data.Length);
                            }
                        }

                        if (realTime)
                        {
                            if (_stopEvent.WaitOne(30, false))
                            {
                                break;
                            }
                        }
                        else
                        {
                            //
                            double f = (data.Length / btrg) * 1000;
                            if (f > 0)
                            {
                                var span = DateTime.UtcNow.Subtract(req);
                                var msec = Convert.ToInt32(f - (int)span.TotalMilliseconds);
                                if ((msec > 0) && (_stopEvent.WaitOne(msec, false)))
                                {
                                    break;
                                }
                                req = DateTime.UtcNow;
                            }
                        }
                    }
                    else
                    {
                        if ((DateTime.UtcNow - LastFrame).TotalMilliseconds > Timeout)
                        {
                            throw new Exception("Audio source timeout");
                        }
                        if (_stopEvent.WaitOne(30, false))
                        {
                            break;
                        }
                    }
                }
            }
            catch (Exception e)
            {
                MainForm.LogExceptionToFile(e, "FFMPEG");
                errmsg = e.Message;
            }

            if (_sampleChannel != null)
            {
                _sampleChannel.PreVolumeMeter -= SampleChannelPreVolumeMeter;
                _sampleChannel = null;
            }

            if (_waveProvider != null)
            {
                if (_waveProvider.BufferedBytes > 0)
                {
                    _waveProvider.ClearBuffer();
                }
            }

            ShutDown(errmsg);
        }
Пример #28
0
        private void StreamWav()
        {
            _abort = new ManualResetEvent(false);
            HttpWebRequest request = null;

            try
            {
                using (HttpWebResponse resp = _connectionFactory.GetResponse(_source, "GET", "", out request))
                {
                    //1/4 of a second, 16 byte buffer
                    var data = new byte[((RecordingFormat.SampleRate / 4) * 2) * RecordingFormat.Channels];

                    using (var stream = resp.GetResponseStream())
                    {
                        if (stream == null)
                        {
                            throw new Exception("Stream is null");
                        }

                        while (!_abort.WaitOne(20) && !MainForm.ShuttingDown)
                        {
                            var da = DataAvailable;
                            if (da != null)
                            {
                                int recbytesize = stream.Read(data, 0, data.Length);
                                if (recbytesize == 0)
                                {
                                    throw new Exception("lost stream");
                                }


                                if (_sampleChannel == null)
                                {
                                    continue;
                                }
                                _waveProvider.AddSamples(data, 0, recbytesize);

                                var sampleBuffer = new float[recbytesize];
                                int read         = _sampleChannel.Read(sampleBuffer, 0, recbytesize);

                                da(this, new DataAvailableEventArgs((byte[])data.Clone(), read));

                                if (Listening)
                                {
                                    WaveOutProvider?.AddSamples(data, 0, read);
                                }
                            }
                            else
                            {
                                break;
                            }
                        }
                    }
                }
            }
            catch (Exception ex)
            {
                _res = ReasonToFinishPlaying.DeviceLost;
                Logger.LogException(ex, "WavStream");
            }

            AudioFinished?.Invoke(this, new PlayingFinishedEventArgs(_res));
            _abort.Close();
        }
Пример #29
0
        // Worker thread
        private void WorkerThread()
        {
            // buffer to read stream
            var buffer   = new byte[BufSize];
            var encoding = new ASCIIEncoding();
            var res      = ReasonToFinishPlaying.StoppedByUser;

            while (!_stopEvent.WaitOne(0, false) && !MainForm.ShuttingDown)
            {
                // reset reload event
                _reloadEvent.Reset();

                // HTTP web request
                HttpWebRequest request = null;
                // web responce
                WebResponse response = null;
                // stream for MJPEG downloading
                Stream stream = null;
                // boundary betweeen images (string and binary versions)

                try
                {
                    // create request
                    request = (HttpWebRequest)WebRequest.Create(_source);
                    // set user agent
                    if (_userAgent != null)
                    {
                        request.UserAgent = _userAgent;
                    }

                    // set proxy
                    if (_proxy != null)
                    {
                        request.Proxy = _proxy;
                    }

                    if (_usehttp10)
                    {
                        request.ProtocolVersion = HttpVersion.Version10;
                    }

                    // set timeout value for the request
                    request.Timeout           = request.ServicePoint.ConnectionLeaseTimeout = request.ServicePoint.MaxIdleTime = _requestTimeout;
                    request.AllowAutoRedirect = true;

                    // set login and password
                    if ((_login != null) && (_password != null) && (_login != string.Empty))
                    {
                        request.Credentials = new NetworkCredential(_login, _password);
                    }
                    // set connection group name
                    if (_useSeparateConnectionGroup)
                    {
                        request.ConnectionGroupName = GetHashCode().ToString();
                    }
                    // get response
                    response = request.GetResponse();

                    // get response stream
                    stream             = response.GetResponseStream();
                    stream.ReadTimeout = _requestTimeout;

                    byte[] boundary = encoding.GetBytes("--myboundary");
                    byte[] sep      = encoding.GetBytes("\r\n\r\n");

                    // loop

                    int startPacket = -1;
                    int endPacket   = -1;
                    int ttl         = 0;

                    bool hasaudio = false;

                    while ((!_stopEvent.WaitOne(0, false)) && (!_reloadEvent.WaitOne(0, false)))
                    {
                        int read;
                        if ((read = stream.Read(buffer, ttl, ReadSize)) == 0)
                        {
                            throw new ApplicationException();
                        }

                        ttl += read;

                        if (startPacket == -1)
                        {
                            startPacket = ByteArrayUtils.Find(buffer, boundary, 0, ttl);
                        }
                        else
                        {
                            if (endPacket == -1)
                            {
                                endPacket = ByteArrayUtils.Find(buffer, boundary, startPacket + boundary.Length, ttl - (startPacket + boundary.Length));
                            }
                        }

                        var nf = NewFrame;


                        if (startPacket > -1 && endPacket > startPacket)
                        {
                            int br = ByteArrayUtils.Find(buffer, sep, startPacket, 100);

                            if (br != -1)
                            {
                                var arr = new byte[br];
                                System.Array.Copy(buffer, startPacket, arr, 0, br - startPacket);
                                string s = Encoding.ASCII.GetString(arr);
                                int    k = s.IndexOf("Content-type: ", StringComparison.Ordinal);
                                if (k != -1)
                                {
                                    s = s.Substring(k + 14);
                                    s = s.Substring(0, s.IndexOf("\r\n", StringComparison.Ordinal));
                                    s = s.Trim();
                                }
                                switch (s)
                                {
                                case "image/jpeg":
                                    try
                                    {
                                        using (var ms = new MemoryStream(buffer, br + 4, endPacket - br - 8))
                                        {
                                            using (var bmp = (Bitmap)Image.FromStream(ms))
                                            {
                                                var dae = new NewFrameEventArgs(bmp);
                                                nf.Invoke(this, dae);
                                            }
                                        }
                                    }
                                    catch (Exception ex)
                                    {
                                        //sometimes corrupted packets come through...
                                        Logger.LogExceptionToFile(ex, "KinectNetwork");
                                    }


                                    break;

                                case "audio/raw":
                                    if (!hasaudio)
                                    {
                                        hasaudio = true;
                                        //fixed 16khz 1 channel format
                                        RecordingFormat = new WaveFormat(16000, 16, 1);

                                        _waveProvider = new BufferedWaveProvider(RecordingFormat)
                                        {
                                            DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500)
                                        };

                                        _sampleChannel = new SampleChannel(_waveProvider);
                                        _sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;
                                        if (HasAudioStream != null)
                                        {
                                            HasAudioStream(this, EventArgs.Empty);
                                            HasAudioStream = null;
                                        }
                                    }

                                    var da = DataAvailable;
                                    if (da != null)
                                    {
                                        int l    = endPacket - br - 8;
                                        var data = new byte[l];
                                        int d;
                                        using (var ms = new MemoryStream(buffer, br + 4, l))
                                        {
                                            d = ms.Read(data, 0, l);
                                        }
                                        if (d > 0)
                                        {
                                            _waveProvider.AddSamples(data, 0, data.Length);

                                            if (Listening)
                                            {
                                                WaveOutProvider.AddSamples(data, 0, data.Length);
                                            }

                                            //forces processing of volume level without piping it out
                                            var sampleBuffer = new float[data.Length];
                                            int r            = _sampleChannel.Read(sampleBuffer, 0, data.Length);

                                            da(this, new DataAvailableEventArgs((byte[])data.Clone(), r));
                                        }
                                    }

                                    break;

                                case "alert/text":
                                    // code to handle alert notifications goes here
                                    if (AlertHandler != null)
                                    {
                                        int dl    = endPacket - br - 8;
                                        var data2 = new byte[dl];
                                        using (var ms = new MemoryStream(buffer, br + 4, dl))
                                        {
                                            ms.Read(data2, 0, dl);
                                        }
                                        string alerttype = Encoding.ASCII.GetString(data2);
                                        AlertHandler(this, new AlertEventArgs(alerttype));
                                    }
                                    break;
                                }
                            }

                            ttl -= endPacket;
                            System.Array.Copy(buffer, endPacket, buffer, 0, ttl);
                            startPacket = -1;
                            endPacket   = -1;
                        }
                    }
                }
                catch (ApplicationException)
                {
                    // do nothing for Application Exception, which we raised on our own
                    // wait for a while before the next try
                    Thread.Sleep(250);
                }
                catch (ThreadAbortException)
                {
                    break;
                }
                catch (Exception ex)
                {
                    // provide information to clients
                    Logger.LogExceptionToFile(ex, "KinectNetwork");
                    res = ReasonToFinishPlaying.DeviceLost;
                    break;
                    // wait for a while before the next try
                    //Thread.Sleep(250);
                }
                finally
                {
                    request?.Abort();
                    stream?.Flush();
                    stream?.Close();
                    response?.Close();
                }

                // need to stop ?
                if (_stopEvent.WaitOne(0, false))
                {
                    break;
                }
            }

            PlayingFinished?.Invoke(this, new PlayingFinishedEventArgs(res));
        }
Пример #30
0
        private void SpyServerListener()
        {
            HttpWebRequest request  = null;
            WebResponse    response = null;
            Stream         stream   = null;

            var data = new byte[3200];

            try
            {
                request                  = (HttpWebRequest)WebRequest.Create(_source);
                request.Timeout          = 10000;
                request.ReadWriteTimeout = 5000;
                response                 = request.GetResponse();
                stream = response.GetResponseStream();

                if (stream != null)
                {
                    stream.ReadTimeout = 5000;
                    while (!_stopEvent.WaitOne(0, false))
                    {
                        if (DataAvailable != null)
                        {
                            int recbytesize = stream.Read(data, 0, 3200);
                            if (recbytesize == 0)
                            {
                                throw new Exception("lost stream");
                            }

                            byte[] dec;
                            ALawDecoder.ALawDecode(data, recbytesize, out dec);

                            if (_sampleChannel != null)
                            {
                                _waveProvider.AddSamples(dec, 0, dec.Length);

                                var sampleBuffer = new float[dec.Length];
                                _sampleChannel.Read(sampleBuffer, 0, dec.Length);

                                if (Listening && WaveOutProvider != null)
                                {
                                    WaveOutProvider.AddSamples(dec, 0, dec.Length);
                                }
                                var da = new DataAvailableEventArgs((byte[])dec.Clone());
                                DataAvailable(this, da);
                            }
                        }
                        else
                        {
                            break;
                        }
                        // need to stop ?
                        if (_stopEvent.WaitOne(0, false))
                        {
                            break;
                        }
                    }
                }

                if (AudioFinished != null)
                {
                    AudioFinished(this, ReasonToFinishPlaying.StoppedByUser);
                }
            }
            catch (Exception e)
            {
                if (AudioSourceError != null)
                {
                    AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message));
                }
                MainForm.LogExceptionToFile(e);
            }
            if (stream != null)
            {
                try
                {
                    stream.Close();
                }
                catch
                {
                }
                stream = null;
            }
        }