Esempio n. 1
0
        private void PresentNV12P010(MediaFrame frame, bool dispose = true)
        {
            try
            {
                Utilities.Dispose(ref vpiv);

                videoDevice1.CreateVideoProcessorInputView(frame.textureHW, vpe, vpivd, out vpiv);

                VideoProcessorStream vps = new VideoProcessorStream()
                {
                    PInputSurface = vpiv,
                    Enable        = new RawBool(true)
                };
                vpsa[0] = vps;
                videoContext1.VideoProcessorBlt(videoProcessor, vpov, 0, 1, vpsa);

                context.PixelShader.SetShaderResource(0, srvRGB);
                context.PixelShader.Set(pixelShader);
            } catch (Exception e) {
                Console.WriteLine(e.Message);
            } finally { if (dispose)
                        {
                            Utilities.Dispose(ref frame.textureHW);
                        }
            }
        }
        public static MediaFrameEntity MediaFrame2MediaSteamEntity(MediaFrame frame)
        {
            var result = new MediaFrameEntity()
            {
                Buffer     = frame.Data,
                Length     = (ushort)frame.nSize,
                EncodTime  = frame.nTimetick,
                MediaType  = frame.nIsAudio == 1 ? MediaType.AudioES : MediaType.VideoES,
                SampleRate = 32000,
                FrameRate  = 25,
                Width      = (ushort)frame.nWidth,
                Height     = (ushort)frame.nHeight,
                KeyFrame   = (byte)(frame.nIsAudio == 0 && frame.nIsKeyFrame == 1 ? 1 : 0),
            };

            if (frame.nIsAudio == 1 && frame.nIsKeyFrame == 1)
            {
                result.SampleRate = (ushort)frame.nFrequency;
            }
            if (frame.IsCommandMediaFrame())
            {
                result.Buffer = new byte[0];
                result.Length = 0;
            }
            return(result);
        }
Esempio n. 3
0
        private void PresentYUV(MediaFrame frame, bool dispose = true)
        {
            try
            {
                srvY = new ShaderResourceView(device, frame.textureY, srvDescYUV);
                srvU = new ShaderResourceView(device, frame.textureU, srvDescYUV);
                srvV = new ShaderResourceView(device, frame.textureV, srvDescYUV);

                context.PixelShader.SetShaderResources(0, srvY, srvU, srvV);
                context.PixelShader.Set(pixelShaderYUV);
            } catch (Exception) {
            } finally
            {
                if (dispose)
                {
                    Utilities.Dispose(ref frame.textureY);
                    Utilities.Dispose(ref frame.textureU);
                    Utilities.Dispose(ref frame.textureV);
                }

                Utilities.Dispose(ref srvY);
                Utilities.Dispose(ref srvU);
                Utilities.Dispose(ref srvV);
            }
        }
Esempio n. 4
0
        // Audio        [Send / Sync]
        private void SendAudioFrames()
        {
            double curRate = audioFlowBytes / ((DateTime.UtcNow.Ticks - audioFlowTicks) / 10000000.0);

            if (curRate > audioBytesPerSecond)
            {
                return;
            }

            lock (aFrames)
            {
                int count = 0;
                while (aFrames.Count > 0 && isPlaying && decoder.isRunning) //(limit < 1 || count < limit))
                {
                    MediaFrame aFrame = aFrames.Dequeue();
                    AudioFrameClbk(aFrame.data, 0, aFrame.data.Length);
                    audioFlowBytes += aFrame.data.Length;
                    count++;

                    // Check on every frame that we send to ensure the buffer will not be full
                    curRate = audioFlowBytes / ((DateTime.UtcNow.Ticks - audioFlowTicks) / 10000000.0);
                    if (curRate > audioBytesPerSecond)
                    {
                        return;
                    }
                }
            }
        }
Esempio n. 5
0
        private void OnCaptured(MediaFrame mf)
        {
            lock (this)
            {
                if (!mf.IsCommandMediaFrame())
                {
                    if (mf.nTimetick == _lastTick)
                    {
                        mf.nTimetick++;
                    }
                    if (mf.nTimetick > _lastTick)
                    {
                        _lastTick = mf.nTimetick;
                    }
                }
            }
            if (!IsAudioPub && mf.nIsAudio == 1)
            {
                return;
            }
            if (!IsVideoPub && mf.nIsAudio == 0)
            {
                return;
            }

            if (Captured != null)
            {
                Captured(this, new EventArgsEx <MediaFrame>(mf));
            }
        }
Esempio n. 6
0
 private void OnCaptured(MediaFrame mf)
 {
     if (Captured != null)
     {
         Captured(this, new EventArgsEx <MediaFrame>(mf));
     }
 }
Esempio n. 7
0
        private void Init(MediaFrame frame, bool reinit = false)
        {
            if (_inited && !reinit)
            {
                return;
            }

            if (_ffimp != null)
            {
                _ffimp.Release();
            }

            _inited = true;
            _width  = frame.nWidth;
            _height = frame.nHeight;
            _ffimp  = new FFImp(AVCodecCfg.CreateVideo(_width, _height), true, true);
            if (_yuvDraw == null && _control != null)
            {
                _yuvDraw = new YUVGDIDraw(_control);
            }
            _yuvDraw.SetSize(_width, _height);

            _yuvDataBuffer = new byte[_width * _height * 3 / 2];

            _drawHandle = new Action <byte[]>(Draw);
        }
Esempio n. 8
0
        protected virtual void PlayThread()
        {
            MediaFrame frame = null;

            while (_isworking)
            {
                lock (_queue) {
                    if (_queue.Count > 0 && !_isPaused)
                    {
                        frame = _queue.Dequeue();
                    }
                    else
                    {
                        frame = null;
                    }
                }
                if (frame != null)
                {
                    if (Speed == 1)
                    {
                        var sleep = (int)(frame.NTimetick - _syncPlayTime);

                        if (sleep < -3000)
                        {
                            lock (_queue)
                            {
                                _queue.Clear();
                            }
                        }
                        if (sleep > 0 && !_reseting)
                        {
                            Thread.Sleep(sleep);
                        }
                    }
                    else
                    {
                        var sleep = (int)(frame.NTimetick - _syncPlayTime);
                        if (sleep > 0 && !_reseting)
                        {
                            Thread.Sleep(sleep);
                        }
                    }
                    if (!_reseting)
                    {
                        _Play(frame);
                        lock (_queue) {
                            if (_curPlayTime < frame.NTimetick && !_reseting)
                            {
                                _curPlayTime = frame.NTimetick;
                            }
                        }
                    }
                    _reseting = false;
                }
                else
                {
                    ThreadEx.Sleep(10);
                }
            }
        }
Esempio n. 9
0
        private bool CheckInit(MediaFrame frame, bool reinit = false)
        {
            if (!_inited)
            {
                if (frame.nIsKeyFrame == 1)
                {
                    Init(frame);
                }
            }

            if (!_inited)
            {
                return(false);
            }
            if (frame.IsCommandMediaFrame() && frame.GetCommandType() == MediaFrameCommandType.ResetCodec)
            {
                _inited = false;
                return(false);
            }
            else if (frame.nIsKeyFrame == 1)
            {
                if (_width != -1 && _height != -1 && (frame.nWidth != _width || frame.nHeight != _height))
                {
                    Init(frame, true);
                }
            }
            return(true);
        }
Esempio n. 10
0
        public void PushFrame(
            IFrameInfo infoFrame,
            IFrameVideo videoFrame,
            IFrameSound soundFrame)
        //IVideoFrame videoFrame, ISoundFrame soundFrame)
        {
            if (_threadRecord == null)
            {
                return;
            }
            var frame = new MediaFrame();

            frame.Width  = videoFrame.Size.Width;
            frame.Height = videoFrame.Size.Height;
            frame.Ratio  = videoFrame.Ratio;
            frame.Image  = new int[frame.Width * frame.Height];
            Array.Copy(videoFrame.Buffer, frame.Image, frame.Image.Length);

            frame.SampleRate = _sampleRate;// soundFrame.SampleRate;
            //frame.Audio = new uint[frame.SampleRate / 50];
            //Array.Copy(soundFrame.Buffer, frame.Audio, frame.Audio.Length);
            _queueMP4.Enqueue(frame);

            // Process audio...
            var bufferSrc = soundFrame.GetBuffer();
            var bufferWr  = new uint[bufferSrc.Length];

            Array.Copy(bufferSrc, bufferWr, bufferSrc.Length);
            _queueWAV.Enqueue(bufferWr);

            _eventFrame.Set();
        }
Esempio n. 11
0
        public void Play(MediaFrame frame)
        {
            if (!_isworking)
            {
                return;
            }
            lock (_queue) {
                if (_lastReceiveFrameTick < frame.nTimetick)
                {
                    _lastReceiveFrameTick = frame.nTimetick;
                }

                if (frame.nIsAudio == 1 && IsAudioPlay)
                {
                    _queue.Enqueue(frame);
                }

                if (frame.nIsAudio == 0 && IsVideoPlay)
                {
                    if (hasKey || frame.nIsKeyFrame == 1)
                    {
                        _queue.Enqueue(frame);
                        hasKey = true;
                    }
                }

                if (!IsReadPlay)
                {
                    _cache.Add(frame);
                }
            }
        }
Esempio n. 12
0
            private MediaFrame CreateAudioMediaFrame(byte[] data, long tick)
            {
                if (_firstAudioFrame)
                {
                    var adts       = new AAC_ADTS(data);
                    var frequencys = new int[] { 96000, 88200, 64000, 48000, 44100, 32000, 24000, 22050, 16000, 2000, 11025 };
                    Frequency   = frequencys[adts.MPEG_4_Sampling_Frequency_Index];
                    Channel     = adts.MPEG_4_Channel_Configuration;
                    AudioFormat = 16;
                    Samples     = 0;
                }
                VideoFrameCount++;
                var mf = new MediaFrame()
                {
                    nIsAudio          = 1,
                    nIsKeyFrame       = 1,
                    Data              = data,
                    nSize             = data.Length,
                    nChannel          = Channel,
                    nFrequency        = Frequency,
                    nAudioFormat      = AudioFormat,
                    nTimetick         = tick,
                    nOffset           = 0,
                    nEncoder          = MediaFrame.AAC_Encoder,
                    nEx               = (byte)(_firstAudioFrame ? 0 : 1),
                    MediaFrameVersion = 1,
                };

                _firstAudioFrame = false;

                return(mf);
            }
 protected override void NewMediaFrame(MediaFrame frame)
 {
     if (_firstTimeTick == 0 && frame.nIsKeyFrame == 1 && frame.nIsAudio == 0)
     {
         _firstTimeTick = frame.nTimetick;
         _startTimeTick = DateTime.Now.Ticks / 10000;
     }
     if (_firstTimeTick == 0)
     {
         return;
     }
     if (_lastTimeTick <= frame.nTimetick)
     {
         _lastTimeTick = frame.nTimetick;
         var span  = DateTime.Now.Ticks / 10000 - _startTimeTick;
         int sleep = (int)((_lastTimeTick - _firstTimeTick) - span);
         //Console.WriteLine((_lastTimeTick - _firstTimeTick) + "   " + span);
         if (sleep > 40)
         {
             sleep = 40;
         }
         if (PlaySync)
         {
             if (sleep > 0)
             {
                 ThreadEx.Sleep(sleep);
             }
         }
     }
     base.NewMediaFrame(frame);
 }
Esempio n. 14
0
        //private void PlayForward(MediaFrame frame)
        //{
        //    if (frame != null)
        //    {
        //        if (frame.nTimetick < _curPlayMediaTimetick)
        //        {
        //            _isHasBFrame = true;
        //        }
        //        if (_curPlayMediaTimetick < frame.nTimetick)
        //            _curPlayMediaTimetick = frame.nTimetick;
        //    }

        //    if (frame != null)
        //    {
        //        if (!_isHasBFrame)
        //        {
        //            if (Speed == 1)
        //            {
        //                var sleep = (int)(_syncPlayTime);
        //                //if (_queue.Count > (4 + errorsleep))
        //                //{
        //                //    errorsleep +=1;
        //                //    iptvdefaultsleep -= 4;
        //                //}

        //                if (queueCount>6)
        //                {

        //                   // errorsleep = decsleep / 2 + decsleep % 2;
        //                    //if (decsleep != 0)
        //                    //    iptvdefaultsleep -= decsleep / 2 ;
        //                    //else
        //                        iptvdefaultsleep -= 1;// +errorsleep % 2;
        //                    min = iptvdefaultsleep;
        //                    decsleep = 0;
        //                    errorsleep += 1;
        //                }

        //                if (_queue.Count <3)
        //                {
        //                    // decsleep = errorsleep / 2 + errorsleep % 2;
        //                    if (errorsleep != 0)
        //                    {
        //                        iptvdefaultsleep += errorsleep / 2;
        //                    }
        //                    else
        //                        iptvdefaultsleep += 1;// +decsleep % 2; ;
        //                    max = iptvdefaultsleep;

        //                    errorsleep = 0;
        //                    decsleep++;
        //                }

        //                if (iptvdefaultsleep > 100)
        //                {
        //                    iptvdefaultsleep = 100;
        //                }

        //                if (iptvdefaultsleep * queueCount > 1000)
        //                {
        //                    iptvdefaultsleep -= 1;
        //                }

        //                if (iptvdefaultsleep <= 0)
        //                    iptvdefaultsleep = (min + max) / 2 <= 0 ? 40 : (min + max) / 2;
        //                //if (iptvdefaultsleep>40)
        //                //    iptvdefaultsleep = 40;
        //                Thread.Sleep(iptvdefaultsleep);
        //                //Console.Clear();
        //                //Console.WriteLine("tick:" + iptvdefaultsleep + " cachecount:" + queueCount + " lastcount:" + lastcount + " sleep:" + sleep);
        //                lastcount = queueCount;
        //                _lastPlayMediaTimetick = frame.nTimetick;

        //                //}
        //                //else
        //                //{
        //                //    var sleep = (int)((frame.nTimetick - _syncPlayTime));
        //                //    if (sleep > 200)
        //                //        sleep = 40;
        //                //    if (sleep < 0)
        //                //        sleep = 0;
        //                //    Thread.Sleep(sleep);
        //                //    _lastPlayMediaTimetick = frame.nTimetick;
        //                //}
        //            }
        //            else
        //            {
        //                var sysSpan = Environment.TickCount - _lastPlaySystemTime;
        //                var sleep = (int)((frame.nTimetick - _lastPlayMediaTimetick - sysSpan) / Speed);
        //                if (sleep > 200 || sleep < 0)
        //                    sleep = 40;
        //                Thread.Sleep(sleep);
        //                _lastPlayMediaTimetick = frame.nTimetick;
        //            }
        //        }
        //        if (!CheckInit(frame))
        //            return;

        //        _lastPlaySystemTime = Environment.TickCount;

        //        byte[] yuv = _ffimp.VideoDec(frame.Data, _yuvDataBuffer);

        //        //_drawHandle.BeginInvoke(frame.Data, null, null);

        //        Draw(yuv);

        //    }
        //    else
        //    {
        //        ThreadEx.Sleep(10);
        //    }

        //}

        private void PlayBackward(MediaFrame frame)
        {
            if (frame != null)
            {
                if (frame.nIsKeyFrame == 1)
                {
                    if (_stackRewindFrame != null)
                    {
                        _stackRewindFrame.Push(frame);
                        PlayBackward(_stackRewindFrame);
                    }
                    _stackRewindFrame = new Stack <MediaFrame>();
                }
                else
                {
                    if (_stackRewindFrame == null)
                    {
                        _stackRewindFrame = new Stack <MediaFrame>();
                    }
                    _stackRewindFrame.Push(frame);
                }
                _PlayBackwardResetPos = false;
            }
            else
            {
                ThreadEx.Sleep(10);
            }
        }
 /// <summary>
 /// Create a new EncoderPackage
 /// </summary>
 public EncoderPackage(PackageSpec SrcSpec, int Index, EncoderJob Job, MediaFrame Frame)
 {
     Buffers       = new List <IEncoderBuffer>();
     this.JobIndex = Index;
     this.Job      = Job;
     this.Frame    = Frame;
     Specification = SrcSpec;
 }
Esempio n. 16
0
        public void PresentFrame(MediaFrame frame = null)
        {
            if (device == null)
            {
                return;
            }

            // Drop Frames | Priority on video frames
            bool gotIn = frame == null?Monitor.TryEnter(device, 1) : Monitor.TryEnter(device, 5);   // Should be calculated based on fps (also calculate time of present)

            if (gotIn)
            {
                try
                {
                    if (frame != null)
                    {
                        // NV12 | P010
                        if (frame.textureHW != null)
                        {
                            PresentNV12P010(frame);
                        }

                        // YUV420P
                        else if (frame.textureY != null)
                        {
                            PresentYUV(frame);
                        }

                        // RGB
                        else if (frame.textureRGB != null)
                        {
                            PresentRGB(frame);
                        }
                    }

                    context.OutputMerger.SetRenderTargets(rtv);
                    context.ClearRenderTargetView(rtv, clearColor);
                    context.Draw(6, 0);

                    if (OSDEnabled)
                    {
                        rtv2d.BeginDraw();
                        try
                        {
                            PresentOSD();
                        } finally {
                            rtv2d.EndDraw();
                        }
                    }

                    swapChain.Present(vsync, PresentFlags.None);
                } finally { Monitor.Exit(device); }
            }
            else
            {
                Console.WriteLine("[RENDERER] Drop Frame - Lock timeout " + (frame != null ? Utils.TicksToTime(frame.timestamp) : "")); player.ClearVideoFrame(frame);
            }
        }
Esempio n. 17
0
 public static void DisposeVideoFrame(MediaFrame frame)
 {
     if (frame != null && frame.textures != null)
     {
         for (int i = 0; i < frame.textures.Length; i++)
         {
             Utilities.Dispose(ref frame.textures[i]);
         }
     }
 }
Esempio n. 18
0
 public void Play(MediaFrame frame)
 {
     if (!_isworking)
     {
         return;
     }
     lock (_queue) {
         _queue.Enqueue(frame);
     }
 }
Esempio n. 19
0
        /// <summary>
        /// Adds a block to the playback blocks by converting the given frame.
        /// If there are no more blocks in the pool, the oldest block is returned to the pool
        /// and reused for the new block. The source frame is automatically disposed.
        /// </summary>
        /// <param name="source">The source.</param>
        /// <param name="container">The container.</param>
        /// <returns>The filled block.</returns>
        internal MediaBlock Add(MediaFrame source, MediaContainer container)
        {
            if (source == null)
            {
                return(null);
            }

            lock (SyncLock)
            {
                try
                {
                    // Check if we already have a block at the given time
                    if (IsInRange(source.StartTime) && source.HasValidStartTime)
                    {
                        var repeatedBlock = PlaybackBlocks.FirstOrDefault(f => f.StartTime.Ticks == source.StartTime.Ticks);
                        if (repeatedBlock != null)
                        {
                            PlaybackBlocks.Remove(repeatedBlock);
                            PoolBlocks.Enqueue(repeatedBlock);
                        }
                    }

                    // if there are no available blocks, make room!
                    if (PoolBlocks.Count <= 0)
                    {
                        // Remove the first block from playback
                        var firstBlock = PlaybackBlocks[0];
                        PlaybackBlocks.RemoveAt(0);
                        PoolBlocks.Enqueue(firstBlock);
                    }

                    // Get a block reference from the pool and convert it!
                    var targetBlock = PoolBlocks.Dequeue();
                    var lastBlock   = PlaybackBlocks.Count > 0 ? PlaybackBlocks[PlaybackBlocks.Count - 1] : null;

                    if (container.Convert(source, ref targetBlock, true, lastBlock) == false)
                    {
                        // return the converted block to the pool
                        PoolBlocks.Enqueue(targetBlock);
                        return(null);
                    }

                    // Add the target block to the playback blocks
                    PlaybackBlocks.Add(targetBlock);

                    // return the new target block
                    return(targetBlock);
                }
                finally
                {
                    // update collection-wide properties
                    UpdateCollectionProperties();
                }
            }
        }
Esempio n. 20
0
        private bool ResynchSubs(long syncTimestamp)
        {
            lock (sFrames)
            {
                Log("[SUBS] Resynch Request to -> " + syncTimestamp / 10000);

                sFrames = new Queue <MediaFrame>();
                decoder.SeekAccurate((int)((syncTimestamp / 10000) - 50), FFmpeg.AutoGen.AVMediaType.AVMEDIA_TYPE_SUBTITLE);
                decoder.RunSubs();

                // Fill Sub Frames
                int escapeInfinity = 0;
                while (sFrames.Count < SUBS_MIN_QUEUE_SIZE && isPlaying && decoder.isRunning)
                {
                    escapeInfinity++;
                    Thread.Sleep(10);
                    if (escapeInfinity > 50)
                    {
                        Log("[ERROR EI2] Sub Frames Queue will not be filled by decoder"); return(false);
                    }
                }
                if (!isPlaying || !decoder.isRunning)
                {
                    return(false);
                }

                MediaFrame sFrame     = sFrames.Peek();
                MediaFrame sFrameNext = sFrame;

                // Find Closest Subs Timestamp
                while (isPlaying)
                {
                    if (sFrameNext.timestamp > syncTimestamp)
                    {
                        break;
                    }
                    sFrame = sFrames.Dequeue();
                    if (sFrames.Count < 1)
                    {
                        return(false);
                    }
                    sFrameNext = sFrames.Peek();
                }
                if (!isPlaying)
                {
                    return(false);
                }

                Log("[SUBS] Resynch Successfully to -> " + sFrame.timestamp / 10000 + " ++");

                SendSubFrame();

                return(true);
            }
        }
Esempio n. 21
0
        private bool InternalSeek(ref double absoluteTimestamp)
        {
            //0. We have to send codecs again
            _audioVideoCodecsSent = false;

            //1. Switch to millisecond.FrameIndex table
            if (!_pSeekFile.SeekTo(_timeToIndexOffset))
            {
                Logger.FATAL("Failed to seek to ms.FrameIndex table");
                return(false);
            }

            //2. Read the sampling rate
            var samplingRate = _pSeekFile.Br.ReadUInt32();

            //3. compute the index in the time2frameindex
            var tableIndex = (uint)(absoluteTimestamp / samplingRate);

            //4. Seek to that corresponding index
            _pSeekFile.SeekAhead(tableIndex * 4);

            //5. Read the frame index
            var frameIndex = _pSeekFile.Br.ReadUInt32();

            //7. Position the seek file to that particular frame
            if (!_pSeekFile.SeekTo(_framesBaseOffset + frameIndex * MediaFrame.MediaFrameSize))
            {
                Logger.FATAL("Unablt to seek inside seek file");
                return(false);
            }

            //8. Read the frame
            if (!MediaFrame.ReadFromMediaFile(_pSeekFile, out _currentFrame))
            {
                Logger.FATAL("Unable to read frame from seeking file");
                return(false);
            }

            //9. update the stream counters
            _startFeedingTime  = DateTime.Now;
            _totalSentTime     = 0;
            _currentFrameIndex = frameIndex;
            _totalSentTimeBase = (uint)(_currentFrame.AbsoluteTime / 1000);
            absoluteTimestamp  = _currentFrame.AbsoluteTime;

            //10. Go back on the frame of interest
            if (!_pSeekFile.SeekTo(_framesBaseOffset + frameIndex * MediaFrame.MediaFrameSize))
            {
                Logger.FATAL("Unablt to seek inside seek file");
                return(false);
            }

            //11. Done
            return(true);
        }
Esempio n. 22
0
        private static Mat BgraToMat(MediaFrame frame)
        {
            Mat mat    = new Mat(frame.AVFrame.height, frame.AVFrame.width, DepthType.Cv8U, 4);
            int stride = mat.Step;

            for (int i = 0; i < frame.AVFrame.height; i++)
            {
                FFmpegHelper.CopyMemory(mat.DataPointer + i * stride, frame.Data[0] + i * frame.AVFrame.linesize[0], (uint)stride);
            }
            return(mat);
        }
Esempio n. 23
0
        private static Mat BgraToMat(MediaFrame frame)
        {
            Mat mat    = new Mat(frame.AVFrame.height, frame.AVFrame.width, MatType.CV_8UC4);
            int stride = (int)(uint)mat.Step();

            for (int i = 0; i < frame.AVFrame.height; i++)
            {
                FFmpegHelper.CopyMemory(mat.Data + i * stride, frame.Data[0] + i * frame.AVFrame.linesize[0], (uint)stride);
            }
            return(mat);
        }
Esempio n. 24
0
        public void Test264FileFrame()
        {
            int width = 320, height = 240;
            var x264 = new X264Native(new X264Params(width, height, 10, 320));

            //x264.SetIKeyIntMax(10);
            x264.Init();
            var        fs    = new FileStream("./testfile.test", FileMode.CreateNew);
            var        ls    = StreamingKit.Media.ReadFile.GetBuffByFile1(@".\test.yuv");
            AVCodecCfg cf    = AVCodecCfg.CreateVideo(width, height, (int)StreamingKit.AVCode.CODEC_ID_H264, 100000);
            FFImp      ffimp = new FFImp(cf, true);
            //FFScale ffscale = new FFScale(width, height, 26, 12, width, height, 12, 12);
            FFScale ffscale = new FFScale(width, height, 0, 12, width, height, 3, 24);

            foreach (var item1 in ls)
            {
                var item    = ffscale.FormatS(item1);
                var in_buf  = FunctionEx.BytesToIntPtr(item);
                var out_buf = Marshal.AllocHGlobal(item.Length);
                //var bKeyFrame = false;
                //var nOutLen = 0;
                var nInLen = item.Length;
                //  var size = X264Encode(x264.obj, in_buf, ref nInLen, out_buf, ref nOutLen, ref bKeyFrame);
                // var buf = FunctionEx.IntPtrToBytes(out_buf, 0, size);
                var buf = x264.Encode(item);
                Console.WriteLine(buf.To16Strs(0, 16));
                var size = buf.Length;

                if (w == null)  //OK
                {
                    w = new BinaryWriter(new FileStream("4567.es", FileMode.Create));
                }
                w.Write(buf);

                var mf = new MediaFrame();
                mf.IsKeyFrame = (byte)(x264.IsKeyFrame() ? 1 : 0);
                mf.Width      = width;
                mf.Height     = height;
                mf.Encoder    = MediaFrame.H264Encoder;
                //mf.Timetick = 0;
                mf.Size = size;
                mf.SetData(buf);
                buf = mf.GetBytes();
                fs.Write(BitConverter.GetBytes(buf.Length), 0, 4);
                fs.Write(buf, 0, buf.Length);
                fs.Flush();
                // IntPtr intt = IntPtr.Zero;
                //var sssss = ffimp.VideoDec(buf, ref intt);
                //Console.WriteLine(buf.Take(32).ToArray().To16Strs());
                // var size = Encode1(ii, in_buf, ref nInLen, out_buf);
            }

            fs.Close();
        }
Esempio n. 25
0
 public override void Play(MediaFrame frame)
 {
     if (!_isworking)
     {
         return;
     }
     lock (_queue) {
         _queue.Enqueue(frame);
     }
     _lastMediaFrameTime = frame.NTimetick;
 }
Esempio n. 26
0
 public virtual bool BuildFrame(MediaFile file, MediaFrame mediaFrame, Stream buffer)
 {
     if (!file.SeekTo((long)mediaFrame.Start))
     {
         FATAL("Unable to seek to position {0}", mediaFrame.Start);
         return(false);
     }
     //3. Read the data
     file.DataStream.CopyPartTo(buffer, (int)mediaFrame.Length);
     buffer.Position = 0;
     return(true);
 }
Esempio n. 27
0
 /// <summary>
 /// Convert to Mat
 /// <para>
 /// video frame: convert to AV_PIX_FMT_BGRA and return new Mat(frame.Height, frame.Width, MatType.CV_8U4)
 /// </para>
 /// <para>
 /// audio frame:
 /// <list type="bullet">
 /// <item>if is planar, return new Mat(frame.AVFrame.nb_samples, frame.AVFrame.channels , MatType.MakeType(depth, 1));</item>
 /// <item>if is packet, return new Mat(frame.AVFrame.nb_samples, 1 , MatType.MakeType(depth, frame.AVFrame.channels));</item>
 /// </list>
 /// <para><see cref="AVSampleFormat"/> to <see cref="MatType.Depth"/> mapping table</para>
 /// <list type="table" >
 /// <item>
 /// <term><see cref="AVSampleFormat.AV_SAMPLE_FMT_U8"/>/<see cref="AVSampleFormat.AV_SAMPLE_FMT_U8P"/></term>
 /// <description><see cref="MatType.CV_8U"/></description>
 /// </item>
 /// <item>
 /// <term><see cref="AVSampleFormat.AV_SAMPLE_FMT_S16"/>/<see cref="AVSampleFormat.AV_SAMPLE_FMT_S16P"/></term>
 /// <description><see cref="MatType.CV_16S"/></description>
 /// </item>
 /// <item>
 /// <term><see cref="AVSampleFormat.AV_SAMPLE_FMT_S32"/>/<see cref="AVSampleFormat.AV_SAMPLE_FMT_S32P"/></term>
 /// <description><see cref="MatType.CV_32S"/></description>
 /// </item>
 /// <item>
 /// <term><see cref="AVSampleFormat.AV_SAMPLE_FMT_FLT"/>/<see cref="AVSampleFormat.AV_SAMPLE_FMT_FLTP"/></term>
 /// <description><see cref="MatType.CV_32F"/></description>
 /// </item>
 /// <item>
 /// <term><see cref="AVSampleFormat.AV_SAMPLE_FMT_DBL"/>/<see cref="AVSampleFormat.AV_SAMPLE_FMT_DBLP"/></term>
 /// <description><see cref="MatType.CV_64F"/></description>
 /// </item>
 /// <item>
 /// <term><see cref="AVSampleFormat.AV_SAMPLE_FMT_S64"/>/<see cref="AVSampleFormat.AV_SAMPLE_FMT_S64P"/></term>
 /// <description><see cref="MatType.CV_64F"/></description>
 /// </item>
 /// <item>NOTE: OpenCV not supported 64S, replace with CV_64F, so read result by bytes convert to int64, otherwise will read <see cref="double.NaN"/>
 /// </item>
 /// </list>
 /// </para>
 /// </summary>
 /// <param name="frame"></param>
 /// <returns></returns>
 public static Mat ToMat(this MediaFrame frame)
 {
     if (frame.IsVideoFrame)
     {
         return(VideoFrameToMat(frame as VideoFrame));
     }
     else if (frame.IsAudioFrame)
     {
         return(AudioFrameToMat(frame as AudioFrame));
     }
     throw new FFmpegException(FFmpegException.InvalidFrame);
 }
Esempio n. 28
0
 protected virtual void NewMediaFrame(MediaFrame frame)
 {
     if (!EnabledFrameSequence)
     {
         //不排序
         OnNewMediaFrame(frame);
     }
     else
     {
         //排序
         if (frame.nIsAudio == 0)
         {
             _qVideoMediaFrame.Enqueue(frame);
         }
         else if (frame.nIsAudio == 1)
         {
             _qAudioMediaFrame.Enqueue(frame);
         }
         while (true)
         {
             if (_qVideoMediaFrame.Count > 0 && _qAudioMediaFrame.Count > 0)
             {
                 var v = _qVideoMediaFrame.Peek();
                 var a = _qAudioMediaFrame.Peek();
                 if (v.nTimetick < a.nTimetick)
                 {
                     v = _qVideoMediaFrame.Dequeue();
                     OnNewMediaFrame(v);
                 }
                 else
                 {
                     a = _qAudioMediaFrame.Dequeue();
                     OnNewMediaFrame(a);
                 }
             }
             else if (_qVideoMediaFrame.Count > 5)
             {
                 var v = _qVideoMediaFrame.Dequeue();
                 OnNewMediaFrame(v);
             }
             else if (_qAudioMediaFrame.Count > 50)
             {
                 var a = _qAudioMediaFrame.Dequeue();
                 OnNewMediaFrame(a);
             }
             else
             {
                 break;
             }
         }
     }
 }
Esempio n. 29
0
 public override bool BuildFrame(MediaFile file, MediaFrame mediaFrame, Stream buffer)
 {
     //1. add the binary header
     if (mediaFrame.IsBinaryHeader)
     {
         buffer.Write(_audioCodecHeaderInit, 0, _audioCodecHeaderInit.Length);
     }
     else
     {
         buffer.Write(_audioCodecHeader, 0, _audioCodecHeader.Length);
     }
     return(base.BuildFrame(file, mediaFrame, buffer));
 }
Esempio n. 30
0
        protected override bool FeedMetaData(MediaFile pFile, MediaFrame mediaFrame)
        {
            //1. Seek into the data file at the correct position
            if (!pFile.SeekTo(mediaFrame.Start))
            {
                FATAL("Unable to seek to position {0}", mediaFrame.Start);
                return(false);
            }
            var endPosition = pFile.Position + (long)mediaFrame.Length;

            //2. Read the data
            //_metadataBuffer.IgnoreAll();
            //if (!_metadataBuffer.ReadFromFs(pFile, (int) mediaFrame.Length)) {
            //    Logger.FATAL("Unable to read {0} bytes from offset {1}", mediaFrame.Length, mediaFrame.Start);
            //    return false;
            //}

            //3. Parse the metadata
            _metadataName = "";
            _metadataParameters.SetValue();

            var _tempVariant = _amf0Reader.ReadVariant();

            //if (!_amfSerializer.Read(_metadataBuffer, _tempVariant)) {
            //    Logger.WARN("Unable to read metadata");
            //    return true;
            //}
            if (_tempVariant != VariantType.String)
            {
                WARN("Unable to read metadata");
                return(true);
            }
            _metadataName = _tempVariant;

            while (pFile.Position < endPosition)
            {
                _metadataParameters.Add(_amf0Reader.ReadVariant());
            }

            var message = GenericMessageFactory.GetNotify(
                ((BaseOutNetRTMPStream )OutStreams.Last()).CommandsChannelId,
                ((BaseOutNetRTMPStream )OutStreams.Last()).RTMPStreamId,
                mediaFrame.AbsoluteTime,
                true,
                _metadataName,
                _metadataParameters);

            //5. Send it
            return(((BaseRTMPProtocol )Protocol).SendMessage(message, true));
        }
        /// <summary>
        /// Release memory previously locked by LoadToFrame()
        /// </summary>
        public void UnloadFrame(ref MediaFrame Frame)
        {
            // Check how much was used, and re-integrate unused samples.
            lock (_sampleBuffer) {
                if (_sampleBuffer.Count > 0 && Frame.AudioSamplesConsumed == Frame.AudioSize) {
                    _sampleBuffer.RemoveAt(0); // Buffer was fully used. Drop it
                } else {
                    PrependUnusedSamples(Frame);
                }
            }

            Frame.AudioBuffer = IntPtr.Zero;
            Frame.AudioSize = 0;
            Frame.AudioSamplesConsumed = 0;

            if (_pinSamples.IsAllocated) _pinSamples.Free();
            _samples = null;
        }
Esempio n. 32
0
        /// <summary>
        /// Release memory previously locked by LoadToFrame()
        /// </summary>
        public void UnloadFrame(ref MediaFrame Frame)
        {
            if (pinY.IsAllocated) pinY.Free();
            if (pinU.IsAllocated) pinU.Free();
            if (pinV.IsAllocated) pinV.Free();
            Frame.VideoSize = 0;

            Y = null;
            u = null;
            v = null;
        }
        /// <summary>
        /// Initialise an encoder job based on previously setup capture devices.
        /// Need to have one job per 'ReductionFactor' in the config.
        /// </summary>
        private void EncoderSetup()
        {
            var factors = config.EncoderSettings.ReductionFactors;

            Packages = new List<EncoderPackage>();

            int fps = (cam != null) ? (cam.m_frameRate) : (config.Video.InputFrameRate);

            var needed_packages = ListRequiredPackages();

            int pkg_id = 0;
            foreach (var np in needed_packages) {
                EncoderJob job = new EncoderJob();
                job.OldSegmentNumber = 1;
                string joined = Path.Combine(config.EncoderSettings.LocalSystemOutputFolder, config.EncoderSettings.LocalSystemFilePrefix);

                joined += "_" + pkg_id;
                int bitrate = (int)(config.EncoderSettings.VideoBitrate * np.Quality); // linear scale

                int error = EncoderBridge.InitialiseEncoderJob(
                    ref job,											// job to complete
                    np.VideoSize.Width,									// OUTPUT video width
                    np.VideoSize.Height,								// OUTPUT video height
                    joined,												// OUTPUT folder + filename prefix
                    fps,												// INPUT frame rate (output will match)
                    bitrate,											// OUTPUT video bit rate
                    config.EncoderSettings.FragmentSeconds);			// Fragment length (seconds)

                if (error != 0) throw new Exception("Encoder setup error #" + error);
                if (!job.IsValid) throw new Exception("Job rejected");

                var mf = new MediaFrame();
                mf.ForceAudioConsumption = (np.HasVideo) ? ((byte)0) : ((byte)1); // don't sync if no video.

                var pkg = new EncoderPackage(np, pkg_id, job, mf);

                ConnectPackageToBuffers(pkg, np);

                Packages.Add(pkg); pkg_id++;
            }
        }
        /// <summary>
        /// Load the buffer into a MediaFrame for the encoder.
        /// IMPORTANT: You must call UnloadFrame after this method is called.
        /// For effciency, unload as soon as possible.
        /// </summary>
        public void LoadToFrame(ref MediaFrame Frame)
        {
            try {
                TimedSample ts = null;
                lock (_sampleBuffer) {
                    if (_sampleBuffer.Count < 1) {
                        Frame.AudioSize = 0UL;
                        Frame.AudioSamplesConsumed = 0;
                        return;
                    }
                    _sampleBuffer.RemoveAll(a => a == null);
                    _sampleBuffer.Sort((a, b) => a.Seconds.CompareTo(b.Seconds));

                    if (_sampleBuffer[0].Samples.Length < FrameSize) MergeFirstSample(); // Make sure frames are large enough!

                    if (_sampleBuffer.Count > 0) {
                        ts = _sampleBuffer[0];
                    } else {
                        Frame.AudioSize = 0;
                        Frame.AudioBuffer = IntPtr.Zero;
                        return;
                    }
                }

                _samples = ts.Samples;

                Frame.AudioSampleTime = ts.Seconds;
                Frame.AudioSize = (ulong)_samples.LongLength;
                Frame.AudioSamplesConsumed = 0;

                // Outgoing sample rate is always 44100, to support iPhone
                Frame.AudioSampleRate = 44100; // this is used to correct timing on the encoder.

                _pinSamples = GCHandle.Alloc(_samples, GCHandleType.Pinned);
                Frame.AudioBuffer = _pinSamples.AddrOfPinnedObject();
            } catch (Exception ex) {
                UnloadFrame(ref Frame);
                Console.WriteLine("Loading audio frame failed: " + ex.Message);
            }
        }
        /// <summary>
        /// Add unused samples back into the sample buffer.
        /// </summary>
        /// <remarks>Time stamps need to be properly re-integrated!</remarks>
        private void PrependUnusedSamples(MediaFrame Frame)
        {
            // checks:
            if (_sampleBuffer == null) return;
            _sampleBuffer.RemoveAll((a) => a == null); // clean out bad transfers
            if (_sampleBuffer.Count < 1) return;
            if ((ulong)_sampleBuffer[0].Samples.LongLength != Frame.AudioSize) throw new Exception("Frames unloaded out-of-sync. Frames must be loaded then unloaded in order and one-at-a-time!"); // wrong frame!

            // Build new truncated sample:
            ulong new_sample_count = Frame.AudioSize - Frame.AudioSamplesConsumed;
            if (new_sample_count < 1) {
                _sampleBuffer.RemoveAt(0);
                return;
            }

            short[] cut = new short[new_sample_count]; // pun intended ;-)
            Array.Copy(_sampleBuffer[0].Samples, (long)Frame.AudioSamplesConsumed, cut, 0, (long)new_sample_count);
            double new_time_stamp = Frame.AudioSampleTime + (Frame.AudioSamplesConsumed / 44100.0);
            TimedSample sample = new TimedSample(cut, new_time_stamp);

            lock (_sampleBuffer) {
                // Over-write the old sample with the new, shorter version
                _sampleBuffer[0] = sample;

                // clean out bad transfers:
                _sampleBuffer.Sort((a, b) => a.Seconds.CompareTo(b.Seconds));
            }

            // merge function to join first two samples if the first is small.
            if (_sampleBuffer.Count >= 2) {
                if (_sampleBuffer[0].Samples.Length < 4608) MergeFirstSample(); // 4 frames
            }
        }
Esempio n. 36
0
        /// <summary>
        /// Load the buffer into a MediaFrame for the encoder.
        /// IMPORTANT: You must call UnloadFrame after this method is called.
        /// For efficiency, unload as soon as possible.
        /// </summary>
        public void LoadToFrame(ref MediaFrame Frame)
        {
            try {
                if (WaitingFrames.Count > 0) {
                    TimedImage img = null;
                    lock (WaitingFrames) {
                        WaitingFrames.RemoveAll(a => a == null);
                        WaitingFrames.Sort((a, b) => a.Seconds.CompareTo(b.Seconds));

                        img = WaitingFrames[0];
                        WaitingFrames.RemoveAt(0);
                    }

                    if (img.Luma == null || img.Cr == null || img.Cb == null) return; // crap frame

                    Y = img.Luma;
                    u = img.Cr;
                    v = img.Cb;

                    Frame.VideoSize = (ulong)Y.Length;
                    Frame.VideoSampleTime = img.Seconds;

                    pinY = GCHandle.Alloc(Y, GCHandleType.Pinned);
                    Frame.Yplane = pinY.AddrOfPinnedObject();

                    pinU = GCHandle.Alloc(u, GCHandleType.Pinned);
                    Frame.Uplane = pinU.AddrOfPinnedObject();

                    pinV = GCHandle.Alloc(v, GCHandleType.Pinned);
                    Frame.Vplane = pinV.AddrOfPinnedObject();
                } else {
                    Frame.Yplane = IntPtr.Zero;
                    Frame.Uplane = IntPtr.Zero;
                    Frame.Vplane = IntPtr.Zero;
                    Frame.VideoSize = 0;
                    Console.WriteLine("Frame buffer was empty (in ImageToYUV_Buffer.LoadToFrame())");
                }
            } catch {
                // Drop the bad frame data:
                UnloadFrame(ref Frame); // this can still be sent to the encoder, it should just mean a dropped frame
                Console.WriteLine("Lost a frame (no image)");
            }
        }