예제 #1
0
        // FileStream file = new FileStream(@"d:\test_enc3.h264", FileMode.Create);
        //private void MfEncoder_DataReady(byte[] buf)
        private void VideoEncoder_DataEncoded(byte[] buf, double _time)
        {
            //throw new NotImplementedException();
            var time = MediaTimer.GetRelativeTime();

            // var memo = new MemoryStream(buf);
            // memo.CopyTo(file);

            RtpSender.Push(buf, time);

            // rtpStreamer.Send(buf, time);
            var processingTime = sw.ElapsedMilliseconds;

            //logger.Debug(processingTime);


            //var ts = hwContext.sw.ElapsedMilliseconds;
            //Console.WriteLine("ElapsedMilliseconds " + ts);
            streamStats.Update(time, buf.Length, processingTime);
        }
예제 #2
0
        private void AudioSource_DataAvailable(byte[] data)
        {
            if (closing)
            {
                return;
            }

            if (data.Length > 0)
            {
                //bufferedWaveProvider.AddSamples(data, 0, data.Length);

                //var audioBuffer = new float[data.Length];

                //sampleChannel.Read(audioBuffer, 0, data.Length);

                byte[] dest = null;
                audioResampler.Resample2(data, out dest);
                if (dest != null && dest.Length > 0)
                {
                    //Debug.WriteLine("dest.Length " + dest.Length);

                    rtpTimestamp += (uint)(sw.ElapsedMilliseconds * 8.0);
                    sw.Restart();

                    double ralativeTime = MediaTimer.GetRelativeTime();
                    //uint rtpTime = (uint)(ralativeTime * 8000);

                    // streamer.Push(dest, ralativeTime);

                    RtpSender.Push(dest, ralativeTime);

                    //fs.Write(dest, 0, dest.Length);
                    //fs.Write(a.Buffer, 0, a.BytesRecorded);
                }
            }
        }