Example #1
0
 private void _ups_Unpacked(object arg1, Nalu arg2)
 {
     if (_decoder == null)
     {
         _decoder = new VideoDecoder();
         var codicId = Constants.AVCodecID.AV_CODEC_ID_H264;
         _decoder.Create(codicId);
     }
     if (arg2.Header.Type == 7 || arg2.Header.Type == 8)
     {
         _decoder.Decode(arg2.BytesWithStartCode(), out _curWidth, out _curHeight);
     }
     else
     {
         byte[] frameData = _decoder.Decode(arg2.BytesWithStartCode(), out _curWidth, out _curHeight);
         if (frameData != null)
         {
             if (_curWidth != _width || _curHeight != _height)
             {
                 _width  = _curWidth;
                 _height = _curHeight;
                 _renderSource.SetupSurface(_curWidth, _curHeight);
             }
             renderFrame(frameData);
         }
     }
 }
Example #2
0
        internal VideoTexture(string videoFile, SkinSource source, Clocks clock)
        {
            this.clock = clock;

            videoFile = GeneralHelper.PathSanitise(videoFile);
            byte[] videoBytes = null;
            if ((source & SkinSource.Skin) > 0)
            {
                string filename = Path.Combine(SkinManager.Current.FullPath, videoFile.IndexOf('.') < 0 ? videoFile + @".mp4" : videoFile);
                videoBytes = GetFileBytes(filename);
            }
            if (videoBytes == null && (source & SkinSource.Osu) > 0)
            {
                videoBytes = (byte[])osu_ui.ResourcesStore.ResourceManager.GetObject(videoFile);
            }
            if (videoBytes == null && (source & SkinSource.Beatmap) > 0)
            {
                videoBytes = BeatmapManager.Current.GetFileBytes(videoFile);
            }

            vd = new VideoDecoder(clock == Clocks.Game ? 4 : (int)(AudioEngine.CurrentPlaybackRate / 100f * 4));
            if (!vd.Open(videoBytes))
            {
                return;
            }

            width  = vd.width;
            height = vd.height;

            videoTextures = new pTexture[FRAME_BUFFER];

            Initialize();

            length = (int)vd.Length;
        }
Example #3
0
        public void Load(int id)
        {
            string filename, path;

            var directory = ServiceLocator.FileStorage.GetDirectoryName(_vm.Settings.Game.Path);

            LoadSubtitles(id);

            // For the PSX version, we'll try the PlayStation stream files
            if (SystemVars.Platform == Core.IO.Platform.PSX)
            {
                _vm.GraphicsManager.PixelFormat = PixelFormat.Rgb16;

                // The demo uses the normal file names
                filename = (SystemVars.IsDemo ? SequenceList[id] : SequenceListPsx[id]) + ".str";
                _decoder = new PsxStreamDecoder(_vm.Mixer, CDSpeed.CD2x, _vm.GraphicsManager.PixelFormat);
            }
            else
            {
                filename = $"{SequenceList[id]}.smk";
                _decoder = new SmackerDecoder(_vm.Mixer);
            }

            path = ScummHelper.LocatePath(directory, filename);
            var stream = ServiceLocator.FileStorage.OpenFileRead(path);

            _decoder.LoadStream(stream);
            _decoder.Start();
        }
Example #4
0
        public void Dispose()
        {
            if (_hik != null)
            {
                _hik.StreamEvent -= onHikStream;
                _hik.Dispose();
                _hik = null;
            }

            if (_renderSource != null)
            {
                _renderSource.ImageSourceChanged -= onImageSource;
                _renderSource.Dispose();
                _renderSource = null;
            }

            if (_decoder != null)
            {
                _decoder.Dispose();
                _decoder = null;
            }

            if (_rtsp != null)
            {
                _rtsp.Dispose();
                _rtsp = null;
            }
        }
Example #5
0
        public bool TryDecode(ref VideoPacket packet, out VideoFrame frame)
        {
            if (_videoDecoder == null)
            {
                _videoDecoder = new VideoDecoder();
            }

            frame = new VideoFrame();
            AVFrame avFrame;

            if (_videoDecoder.TryDecode(ref packet.Data, out avFrame))
            {
                if (_videoConverter == null)
                {
                    _videoConverter = new VideoConverter(_pixelFormat.ToAVPixelFormat());
                }

                frame.Timestamp   = packet.Timestamp;
                frame.FrameNumber = packet.FrameNumber;
                frame.Width       = packet.Width;
                frame.Height      = packet.Height;
                frame.PixelFormat = _pixelFormat;
                frame.Data        = _videoConverter.ConvertFrame(avFrame);

                return(true);
            }
            return(false);
        }
        private void SetupPlayer()
        {
            if (_decoder != null)
            {
                _decoder.Stop();
                _decoder.ClearBuffer();
                _decoder.Dispose();
                _decoder = null;
            }

            string file = "";

            Dispatcher.Invoke(() => file = txtFileName.Text);

            _decoder = new VideoDecoder();
            _decoder.Open(file);
            _decoder.PlayerOutputWidth  = _decoder.VideoInfo.Width;
            _decoder.PlayerOutputHeight = _decoder.VideoInfo.Height;

            Dispatcher.Invoke(() =>
            {
                sliderTime.Minimum = 0;
                sliderTime.Maximum = _decoder.VideoInfo.TotalFrames - 1;
            });

            if (iterator == null)
            {
                iterator = new FrameIterator(_decoder.VideoInfo.TotalFrames,
                                             templateView.TemplatePaths.Keys.Count,
                                             AppSettings.Default.BurstSize
                                             );

                var startFrame = (int)Math.Round(AppSettings.Default.StartTime * _decoder.VideoInfo.FPS);
                var endFrame   = (int)Math.Round(AppSettings.Default.EndTime * _decoder.VideoInfo.FPS);

                if (AppSettings.Default.UseRandom)
                {
                    iterator.InitRandomSequence(
                        AppSettings.Default.MaxFrames,
                        Math.Max(0, startFrame),
                        Math.Min(_decoder.VideoInfo.TotalFrames, endFrame) - 1,
                        AppSettings.Default.UseSeed ? AppSettings.Default.Seed : (int?)null
                        );
                }
                else
                {
                    iterator.InitLinearSequence(
                        AppSettings.Default.EveryNth,
                        Math.Max(0, startFrame),
                        Math.Min(_decoder.VideoInfo.TotalFrames - 1, endFrame)
                        );
                }
            }

            _decoder.SeekTo(iterator.BurstBeginFrameIndex);
            _decoder.Start();
            _decoder.FrameDecoder.FrameBufferCapacity  = 1;
            _decoder.FrameDecoder.MinimumWorkingFrames = 1;
        }
Example #7
0
 public void Release()
 {
     if (_decoder != null)
     {
         _decoder.Dispose();
     }
     _decoder = null;
 }
Example #8
0
        /// <summary>
        /// <p>[This documentation is preliminary and is subject to change.]</p><p><strong>Applies to: </strong>desktop apps | Metro style apps</p><p>Gets a reference to a DirectX Video Acceleration (DXVA) decoder buffer.</p>
        /// </summary>
        /// <param name="decoder"><dd> <p>A reference to the <strong><see cref="SharpDX.Direct3D11.VideoDecoder"/></strong> interface. To get this reference, call <strong><see cref="SharpDX.Direct3D11.VideoDevice.CreateVideoDecoder"/></strong>.</p> </dd></param>
        /// <param name="type"><dd> <p>The type of buffer to retrieve, specified as a member of the <strong><see cref="SharpDX.Direct3D11.VideoDecoderBufferType"/></strong> enumeration.</p> </dd></param>
        /// <returns>An <see cref="DataPointer"/> to the memory buffer.</returns>
        /// <remarks>
        /// <p>The graphics driver allocates the buffers that are used for DXVA decoding. This method locks the Microsoft Direct3D surface that contains the buffer. When you are done using the buffer, call <strong><see cref="SharpDX.Direct3D11.VideoContext.ReleaseDecoderBuffer"/></strong> to unlock the surface. </p>
        /// </remarks>
        /// <include file='.\Documentation\CodeComments.xml' path="/comments/comment[@id='ID3D11VideoContext::GetDecoderBuffer']/*"/>
        /// <msdn-id>hh447711</msdn-id>
        /// <unmanaged>HRESULT ID3D11VideoContext::GetDecoderBuffer([In] ID3D11VideoDecoder* pDecoder,[In] D3D11_VIDEO_DECODER_BUFFER_TYPE Type,[Out] unsigned int* pBufferSize,[Out] void** ppBuffer)</unmanaged>
        /// <unmanaged-short>ID3D11VideoContext::GetDecoderBuffer</unmanaged-short>
        public DataPointer GetDecoderBuffer(VideoDecoder decoder, VideoDecoderBufferType type)
        {
            int    size;
            IntPtr dataPtr;

            GetDecoderBuffer(decoder, type, out size, out dataPtr);

            return(new DataPointer(dataPtr, size));
        }
Example #9
0
        /// <summary>	
        /// <p>[This documentation is preliminary and is subject to change.]</p><p><strong>Applies to: </strong>desktop apps | Metro style apps</p><p>Gets a reference to a DirectX Video Acceleration (DXVA) decoder buffer.</p>	
        /// </summary>	
        /// <param name="decoder"><dd> <p>A reference to the <strong><see cref="SharpDX.Direct3D11.VideoDecoder"/></strong> interface. To get this reference, call <strong><see cref="SharpDX.Direct3D11.VideoDevice.CreateVideoDecoder"/></strong>.</p> </dd></param>	
        /// <param name="type"><dd> <p>The type of buffer to retrieve, specified as a member of the <strong><see cref="SharpDX.Direct3D11.VideoDecoderBufferType"/></strong> enumeration.</p> </dd></param>	
        /// <returns>An <see cref="DataPointer"/> to the memory buffer.</returns>
        /// <remarks>	
        /// <p>The graphics driver allocates the buffers that are used for DXVA decoding. This method locks the Microsoft Direct3D surface that contains the buffer. When you are done using the buffer, call <strong><see cref="SharpDX.Direct3D11.VideoContext.ReleaseDecoderBuffer"/></strong> to unlock the surface. </p>	
        /// </remarks>	
        /// <include file='.\Documentation\CodeComments.xml' path="/comments/comment[@id='ID3D11VideoContext::GetDecoderBuffer']/*"/>	
        /// <msdn-id>hh447711</msdn-id>	
        /// <unmanaged>HRESULT ID3D11VideoContext::GetDecoderBuffer([In] ID3D11VideoDecoder* pDecoder,[In] D3D11_VIDEO_DECODER_BUFFER_TYPE Type,[Out] unsigned int* pBufferSize,[Out] void** ppBuffer)</unmanaged>	
        /// <unmanaged-short>ID3D11VideoContext::GetDecoderBuffer</unmanaged-short>	
        public DataPointer GetDecoderBuffer(VideoDecoder decoder, VideoDecoderBufferType type)
        {
            int size;
            IntPtr dataPtr;

            GetDecoderBuffer(decoder, type, out size, out dataPtr);

            return new DataPointer(dataPtr, size);
        }
Example #10
0
        private void ShowSettings()
        {
            pnlSettings.Visibility = Visibility.Visible;

            using (var decoder = new VideoDecoder())
            {
                decoder.Open(txtFileName.Text);
                videoSettingsControl.LoadSettings(decoder.VideoInfo);
            }
        }
Example #11
0
    void Awake()
    {
        videoDecoder = GetComponent <VideoDecoder>();
        videoDecoder.PlayingCompleted += OnPlayingCompleted;
        string gameData = Path.Combine(DataStorage.Instance.GetPath(), SaveLoadXML.gameData);

        if (File.Exists(gameData))
        {
            StartCoroutine(BlockCoroutine(2f));
        }
    }
 private void stop()
 {
     VideoInfo.IsEnabled = true;
     disposeHikSource();
     disposeHikDecoder();
     if (_decoder != null)
     {
         _decoder.Dispose();
     }
     _decoder = null;
     disposeRenderSource();
     ImageSrc = null;
 }
Example #13
0
        public void Init(Constants.AVCodecID codecID, int width, int height)
        {
            if (_decoder == null || _codecId != codecID || _width != width || _Height != height)
            {
                Release();

                _decoder = new VideoDecoder();
                _decoder.Create(codecID);
                _codecId = codecID;
                _width   = width;
                _Height  = height;
            }
        }
Example #14
0
 // Token: 0x060035A4 RID: 13732
 // RVA: 0x0016F304 File Offset: 0x0016D504
 internal Class915(string string_0)
 {
     byte[] array = Class466.Current.method_63(string_0);
     this.videoDecoder_0 = new VideoDecoder((int)((float)Class331.smethod_67() / 100f * 4f));
     this.videoDecoder_0.Open(array);
     this.int_2 = this.videoDecoder_0.get_width();
     this.int_0 = this.videoDecoder_0.get_height();
     this.class731_0 = new Class731[1];
     this.Initialize();
     this.int_1 = (int)this.videoDecoder_0.get_Length() * 1000;
     Class115.smethod_53(new Delegate1(this.method_1));
     Class115.smethod_50(new Delegate1(this.method_0));
 }
Example #15
0
    // Start is called before the first frame update
    void Start()
    {
        _videoDecoder   = new VideoDecoder();
        _spriteRenderer = GetComponent <SpriteRenderer>();
        _videoDecoder.OnFrameRendered += OnFrameRendered;

        _backgroundTask = Task.Run(() =>
        {
            var sourceFile = "rtsp://wowzaec2demo.streamlock.net/vod/mp4:BigBuckBunny_115k.mov";
            var frameIndex = 1000;
            _videoDecoder.Run(sourceFile, frameIndex);
        });
        // _backgroundTask.Wait();
    }
 private void onHeader(IHeaderPacket packet)
 {
     lock (_objLock)
     {
         checkStyleChanged(packet);
         if (_isTransform)
         {
             _decoder = new VideoDecoder();
             _decoder.Create(Constants.AVCodecID.AV_CODEC_ID_H264);
         }
         else
         {
             initHikDecoder((packet as HikHeaderPacket).Buffer);
         }
     }
 }
Example #17
0
        public IEnumerator DecodeFrameToTexture2D_Frame5_ShouldSaveImage()
        {
            // Given
            // var sourceFile = "/Users/madison/Dropbox/game_development/VideoSnapshot/Assets/Tests/capture123.h264";
            var videoDecoder = new VideoDecoder();
            var sourceFile   = "rtsp://wowzaec2demo.streamlock.net/vod/mp4:BigBuckBunny_115k.mov";
            var frameIndex   = 50;

            // When
            videoDecoder.Run(sourceFile, frameIndex);

            // Then
            yield return(null);

            Assert.IsTrue(UnityEngine.Windows.File.Exists($"image_{frameIndex:D5}.png"));
        }
Example #18
0
    void OnPlayingCompleted(VideoDecoder sender)
    {
        OnChangePlayingState();

        if (hideOnVideoStop)
        {
            meshRenderer.enabled = false;
        }

//		if (enableOnVideoStop != null)
//			enableOnVideoStop.SetActive(true);

        if (destroyOnVideoStop != null)
        {
            Destroy(destroyOnVideoStop);
        }
    }
Example #19
0
        public override void Execute(byte[] bytes)
        {
            if (bytes.Length == 0)
            {
                return;
            }
            var aa = Thread.CurrentThread.ManagedThreadId.ToString();

            Console.WriteLine($"DecodeDataImpl {aa} ==> {bytes.Length}");
            if (null == stream)
            {
                stream = new MemoryStream();
            }
            else
            {
                try
                {
                    stream.Seek(0, SeekOrigin.Begin);
                }
                catch (Exception ex)
                {
                    stream = new MemoryStream();
                    Console.WriteLine($"Execute   {ex.ToString()}");
                }
            }
            stream.Write(bytes, 0, bytes.Length);
            stream.Seek(0, SeekOrigin.Begin);

            if (null == media)
            {
                media             = new MediaReader(stream);
                decoder           = media.Decoders.OfType <VideoDecoder>().First();
                decoder.OutFormat = new VideoFormat(decoder.InFormat.Width, decoder.InFormat.Height, AVPixelFormat.Bgr24, 4);
            }

            if (null == frame)
            {
                frame = new VideoFrame();
            }
            if (media.NextFrame(frame, decoder.StreamIndex))
            {
                Bitmap image = new Bitmap(frame.Format.Width, frame.Format.Height, frame.Format.Strides[0], PixelFormat.Format24bppRgb, frame.Scan0);
                ImgMgr.Get().SetImg("live.png", image);
            }
        }
Example #20
0
        public NvGpu(IGalRenderer renderer)
        {
            Renderer = renderer;

            ResourceManager = new GpuResourceManager(this);

            Pusher = new DmaPusher(this);

            Fifo       = new NvGpuFifo(this);
            Engine2d   = new NvGpuEngine2d(this);
            Engine3d   = new NvGpuEngine3d(this);
            EngineM2mf = new NvGpuEngineM2mf(this);
            EngineP2mf = new NvGpuEngineP2mf(this);

            _cdmaProcessor     = new CdmaProcessor(this);
            VideoDecoder       = new VideoDecoder(this);
            VideoImageComposer = new VideoImageComposer(this);
        }
Example #21
0
        private void onHeader(IHeaderPacket packet)
        {
            var header = packet as StandardHeaderPacket;

            Console.Write("header {0}:", header.Buffer.Length);
            for (int i = 0; i < header.Buffer.Length; i++)
            {
                Console.Write("{0:X2}, ", header.Buffer[i]);
            }
            Console.WriteLine();

            _decoder = new VideoDecoder();
            _decoder.Create((Constants.AVCodecID)header.CodecID);
            int width  = 0;
            int height = 0;

            _decoder.Decode(header.Buffer, out width, out height);
        }
Example #22
0
        public Decoder()
        {
            InitializeComponent();

            Init.Initialize();

            _decoder   = new VideoDecoder();
            _avPacket  = new AVPacket();
            _converter = new VideoConverter(AVPixelFormat.AV_PIX_FMT_BGR24);

            _socket   = new Socket(AddressFamily.InterNetwork, SocketType.Dgram, ProtocolType.Udp);
            _endPoint = new IPEndPoint(IPAddress.Any, 1234);
            _socket.Bind(_endPoint);
            _socketThread = new Thread(SocketThread)
            {
                IsBackground = true
            };
            _socketThread.Start();
        }
        private async Task StopVideoStream()
        {
            if (VideoSource != null)
            {
                DoStopVideoStream();
                await VideoSource.Stop();

                VideoSource.Destroy();
                VideoSource = null;
            }
            if (VideoDepacketizer != null)
            {
                VideoDepacketizer.Destroy();
                VideoDepacketizer = null;
            }
            if (VideoDecoder != null)
            {
                VideoDecoder.Destroy();
                VideoDecoder = null;
            }
            if (ResetVideoPipe != null)
            {
                ResetVideoPipe.Destroy();
                ResetVideoPipe = null;
            }
            if (VideoConverter != null)
            {
                VideoConverter.Destroy();
                VideoConverter = null;
            }
            if (VideoEncoder != null)
            {
                VideoEncoder.Destroy();
                VideoEncoder = null;
            }
            if (VideoPacketizer != null)
            {
                VideoPacketizer.Destroy();
                VideoPacketizer = null;
            }
        }
Example #24
0
    void Awake()
    {
        meshRenderer = GetComponent <MeshRenderer>();
        videoDecoder = GetComponent <VideoDecoder>();

        if (videoDecoder.Material == null)
        {
            SetDefaultShader();
        }

        videoDecoder.PlayingCompleted += OnPlayingCompleted;
        videoDecoder.PlayingStarted   += OnPlayingStarted;

        SetupMeshFilter();
        SetupRenderer();

        if (hideUntilVideoLoaded)
        {
            meshRenderer.enabled = false;
        }
    }
 private Task StopVideoStream()
 {
     if (VideoDepacketizer != null)
     {
         VideoDepacketizer.Destroy();
         VideoDepacketizer = null;
     }
     if (VideoDecoder != null)
     {
         VideoDecoder.Destroy();
         VideoDecoder = null;
     }
     if (VideoConverter != null)
     {
         VideoConverter.Destroy();
         VideoConverter = null;
     }
     if (ResetVideoPipe != null)
     {
         ResetVideoPipe.Destroy();
         ResetVideoPipe = null;
     }
     if (VideoEncoder != null)
     {
         VideoEncoder.Destroy();
         VideoEncoder = null;
     }
     if (VideoPacketizer != null)
     {
         VideoPacketizer.Destroy();
         VideoPacketizer = null;
     }
     if (VideoSink != null)
     {
         VideoSink.Destroy();
         VideoSink = null;
     }
     return(Task.CompletedTask);
 }
Example #26
0
        private void onHeader(IHeaderPacket packet)
        {
            var header = packet as StandardHeaderPacket;

            _rtspServer.UpdateHeader(header.Buffer);

            Console.WriteLine();
            string rtspstr = $"rtsp header {header.Buffer.Length}:";

            for (int i = 0; i < header.Buffer.Length; i++)
            {
                rtspstr += string.Format("{0:X2}, ", header.Buffer[i]);
            }
            Console.WriteLine(rtspstr);

            _decoder = new VideoDecoder();
            _decoder.Create((Constants.AVCodecID)header.CodecID);
            int width  = 0;
            int height = 0;

            byte[] frame = _decoder.Decode(header.Buffer, out width, out height);
        }
 private void VideoThreadFunc()
 {
     try
     {
         int num = 0;
         while (this.videoThreadContinue_)
         {
             VideoDecoder videoDecoder = null;
             List <MediaKitProcessor.OGVControl> obj = this.controls_;
             lock (obj)
             {
                 if (num < this.videoDecoders_.Count)
                 {
                     videoDecoder = this.videoDecoders_[num++];
                 }
                 else
                 {
                     num = 0;
                     if (num < this.videoDecoders_.Count)
                     {
                         videoDecoder = this.videoDecoders_[num++];
                     }
                 }
             }
             if (videoDecoder != null)
             {
                 videoDecoder.InBackground();
             }
             else
             {
                 Thread.Sleep(10);
             }
         }
     }
     catch (Exception ex)
     {
         this.exception_ = ex;
     }
 }
Example #28
0
 public CdmaProcessor()
 {
     _videoDecoder       = new VideoDecoder();
     _videoImageComposer = new VideoImageComposer(_videoDecoder);
 }
Example #29
0
 void OnPlayingStarted(VideoDecoder sender)
 {
     OnChangePlayingState();
 }
Example #30
0
        public int MostRecentFrameIndex = -1; //Always resumes one frame ahead

        public ProcessorWorker()
        {
            VD = new VideoDecoder();
        }
Example #31
0
 void OnPlayingCompleted(VideoDecoder sender)
 {
     SceneManager.LoadScene(loadScene);
 }
Example #32
0
 public VideoImageComposer(VideoDecoder vdec)
 {
     _vdec = vdec;
 }
 internal VideoUtils()
 {
     decoder = new VideoDecoder(maxWidth, maxHeight);
     output = new byte[maxWidth * maxHeight * 3];
 }