Beispiel #1
0
    public async UniTask <VideoPacket> GetVideo()
    {
        await UniTask.WaitForEndOfFrame();

        var request = AsyncGPUReadback.Request(localRenderTexture);
        await UniTask.WaitUntil(() => request.done);

        rawData = request.GetData <Color32>().ToArray();

        await UniTask.SwitchToThreadPool();

        await UniTask.Run(() =>
        {
            jpgData = ImageConversion.EncodeArrayToJPG(rawData, GraphicsFormat.R8G8B8A8_UNorm, width, height);
        });

        byte[] data = jpgData;

        VideoPacket packet = new VideoPacket();

        packet.Id        = ConfigManager.LOCAL_ID;
        packet.Width     = width;
        packet.Height    = height;
        packet.Timestamp = ConvertDateTimeToLong(DateTime.Now);
        packet.Data      = data;

        return(packet);
    }
 private void OnVideoPacketAcquired(VideoPacket packet)
 {
     if (VideoPacketAcquired != null)
     {
         VideoPacketAcquired(packet);
     }
 }
Beispiel #3
0
    public Texture2D DecodeVideoData(VideoPacket packet)
    {
        Texture2D texture = new Texture2D(width, height, TextureFormat.RGB565, false);

        texture.LoadImage(packet.Data);
        return(texture);
    }
Beispiel #4
0
        VideoPacket ConvertVideoPacket(byte[] data)
        {
            long           timestamp   = BitConverter.ToInt64(data, 0);
            uint           framenumber = BitConverter.ToUInt32(data, 8);
            ushort         height      = BitConverter.ToUInt16(data, 8 + 4);
            ushort         width       = BitConverter.ToUInt16(data, 8 + 4 + 2);
            VideoFrameType ft          = (VideoFrameType)BitConverter.ToInt32(data, 8 + 4 + 2 + 2);

            var viddata = new byte[data.Length - (8 + 4 + 2 + 2 + 4)];

            Array.Copy(data, 8 + 4 + 2 + 2 + 4, viddata, 0, viddata.Length);


            var packet = new VideoPacket
            {
                Timestamp   = timestamp,
                FrameNumber = framenumber,
                Height      = height,
                Width       = width,
                FrameType   = ft,
                Data        = viddata
            };

            return(packet);
        }
        /// <summary>
        /// 线程解析数据
        /// </summary>
        /// <param name="token"></param>
        /// <returns></returns>
        protected override void Loop(CancellationToken token)
        {
            //创立文件流
            using (var stream = new FileStream(_path, FileMode.Open))
                //创立包解析类
                using (var reader = new PacketReader(stream))
                {
                    while (stream.Position < stream.Length && token.IsCancellationRequested == false)
                    {
                        PacketType packetType = reader.ReadPacketType();
                        switch (packetType)
                        {
                        case PacketType.Navigation:
                            NavigationPacket navigationPacket = reader.ReadNavigationPacket();
                            _navigationPacketAcquired(navigationPacket);
                            break;

                        case PacketType.Video:
                            VideoPacket videoPacket = reader.ReadVideoPacket();
                            _videoPacketAcquired(videoPacket);
                            break;

                        default:
                            throw new ArgumentOutOfRangeException();
                        }
                    }
                }
            if (OnFileEnd != null)
            {
                OnFileEnd();
            }
        }
Beispiel #6
0
        public bool TryDecode(ref VideoPacket packet, out VideoFrame frame)
        {
            if (_videoDecoder == null)
            {
                _videoDecoder = new VideoDecoder();
            }

            frame = new VideoFrame();
            AVFrame avFrame;

            if (_videoDecoder.TryDecode(ref packet.Data, out avFrame))
            {
                if (_videoConverter == null)
                {
                    _videoConverter = new VideoConverter(_pixelFormat.ToAVPixelFormat());
                }

                frame.Timestamp   = packet.Timestamp;
                frame.FrameNumber = packet.FrameNumber;
                frame.Width       = packet.Width;
                frame.Height      = packet.Height;
                frame.PixelFormat = _pixelFormat;
                frame.Data        = _videoConverter.ConvertFrame(avFrame);

                return(true);
            }
            return(false);
        }
Beispiel #7
0
        //[HlePspNotImplemented]
        public int sceMpegAvcDecode(SceMpegPointer *SceMpegPointer, SceMpegAu *MpegAccessUnit, int FrameWidth,
                                    PspPointer *OutputBufferPointer, PspPointer *Init)
        {
            //if (*Init == 1)
            //{
            //	throw (new SceKernelException(SceKernelErrors.ERROR_MPEG_NO_DATA));
            //}

            var SceMpegData = GetSceMpegData(SceMpegPointer);
            var Mpeg        = GetMpeg(SceMpegPointer);

            // Dummy
            var VideoPacket = new VideoPacket();

            //Console.Error.WriteLine("0x{0:X}", PspMemory.PointerToPspAddress(OutputBuffer));
            //Console.WriteLine("{0:X8}", (*OutputBufferPointer).Address);
            Mpeg.AvcDecode(
                MpegAccessUnit,
                FrameWidth,
                SceMpegData->SceMpegAvcMode.PixelFormat,
                *OutputBufferPointer
                );

            SceMpegData->AvcFrameStatus = 1;
            //Init = SceMpegData->AvcFrameStatus;

            //throw (new SceKernelException(SceKernelErrors.ERROR_MPEG_NO_DATA));
            return(0);
        }
Beispiel #8
0
    /// <summary>
    /// 发送视频数据
    /// </summary>
    void SendVideo()
    {
        //获取SDK捕捉的视频数据
        VideoPacket packet = UnityChatSDK.Instance.GetVideo();

        if (packet != null)
        {
            packet.Id = ChatManager.Instance.UserID;
            byte[] video = GetPbVideoPacket(packet).ToByteArray();

            udpPacketIndex++;
            List <UdpPacket> list = UdpPacketSpliter.Split(udpPacketIndex, video, ChunkLength);
            for (int i = 0; i < list.Count; i++)
            {
                UdplDataModel model = new UdplDataModel();
                model.Request = RequestByte.REQUEST_VIDEO;

                CallInfo info = new CallInfo();
                info.UserID = ChatManager.Instance.UserID;
                info.CallID = ChatManager.Instance.CallID;
                info.PeerList.Add(ChatManager.Instance.ChatPeerID);

                model.ChatInfoData = info.ToByteArray();
                model.ChatData     = UdpPacketEncode(list[i]);

                UdpSocketManager.Instance.Send(UdpMessageCodec.Encode(model));
            }
        }
    }
        public void AddBlock(VideoPacket NewBlock)
        {
            //	need to insert at the correct time
            var Blocks = this.Blocks;

            System.Func <int, VideoPacket> GetAt = (Index) =>
            {
                return(Blocks[Index]);
            };
            System.Func <VideoPacket, BinaryChop.CompareDirection> Compare = (OtherBlock) =>
            {
                //return OtherBlock.GetLineIndexDirection(NewBlock.StartLine);
                return(OtherBlock.GetTimeDirection(NewBlock.GetStartTime()));
            };
            int?Match;
            int?NearestPrev;

            if (Blocks.Count == 0)
            {
                Match       = null;
                NearestPrev = -1;
            }
            else
            {
                BinaryChop.Search(0, Blocks.Count - 1, GetAt, Compare, out NearestPrev, out Match);
                if (Match.HasValue)
                {
                    throw new System.Exception("Block already exists in stream");
                }
            }
            Blocks.Insert(NearestPrev.Value + 1, NewBlock);
        }
Beispiel #10
0
    /// <summary>
    /// send video data
    /// </summary>
    void SendVideo()
    {
        //capture video data by SDK
        VideoPacket packet = UnityChatSDK.Instance.GetVideo();

        if (packet == null || packet.Data == null || packet.Data.Length == 0)
        {
            return;
        }

        if (UnityChatSDK.Instance.EnableSync)
        {
            videoPacketQueue.Enqueue(packet);

            if (videoPacketQueue.Count >= UnityChatSDK.Instance.Framerate / UnityChatSDK.Instance.AudioSample)
            {
                packet = videoPacketQueue.Dequeue();
            }
            else
            {
                return;
            }
        }

        packet.Id = TestUid;//use your userID
        byte[] video = GetVideoPacketData(packet);
        SendDataByYourNetwork(video);

        //On receiving video data,just for testing
        ReceivedVideoDataQueue.Enqueue(video);
    }
Beispiel #11
0
    /// <summary>
    /// send video data
    /// </summary>
    void SendVideo()
    {
        //获取SDK捕捉的视频数据
        VideoPacket packet = UnityChatSDK.Instance.GetVideo();

        if (UnityChatSDK.Instance.EnableSync)
        {
            if (packet != null)
            {
                videoPacketQueue.Enqueue(packet);
            }

            if (videoPacketQueue.Count >= UnityChatSDK.Instance.Framerate / UnityChatSDK.Instance.AudioSample)
            {
                packet = videoPacketQueue.Dequeue();
            }
            else
            {
                return;
            }
        }

        if (packet != null)
        {
            packet.Id = 1001;//use your userID
            byte[] video = GetPbVideoPacket(packet);
            SendDataByYourNetwork(video);

            //just for testing
            ReceivedVideoDataQueue.Enqueue(video);
        }
    }
Beispiel #12
0
 /// <summary>
 /// Creates a new <see cref="VideoFrameTag"/> instance.
 /// </summary>
 /// <param name="streamId">Stream id.</param>
 /// <param name="frameNum">Frame num.</param>
 /// <param name="video">Video.</param>
 public VideoFrameTag(ushort streamId, ushort frameNum, ScreenVideoPacket video)
 {
     this._tagCode = (int)TagCodeEnum.VideoFrame;
     this.streamId = streamId;
     this.frameNum = frameNum;
     this.video    = video;
 }
Beispiel #13
0
    /// <summary>
    /// called when video data is received
    /// </summary>
    /// <param name="data"></param>
    public void OnReceiveVideo(byte[] data)
    {
        //decode video data and render video
        VideoPacket packet = DecodeVideoPacket(data);

        UnityChatSDK.Instance.DecodeVideoData(packet);
    }
        public unsafe bool TryDecode(ref VideoPacket packet, out VideoFrame frame)
        {
            if (_videoDecoder == null)
            {
                _videoDecoder = new VideoDecoder();


                fixed(byte *pData = &packet.Data[0])
                {
                    _avPacket.data = pData;
                    _avPacket.size = packet.Data.Length;
                    frame          = new VideoFrame();
                    if (_videoDecoder.TryDecode(ref _avPacket, ref _avFrame))
                    {
                        if (_videoConverter == null)
                        {
                            _videoConverter = new VideoConverter(_pixelFormat.ToAVPixelFormat());
                        }

                        byte[] data = _videoConverter.ConvertFrame(ref _avFrame);

                        frame.Timestamp   = packet.Timestamp;
                        frame.Number      = packet.FrameNumber;
                        frame.Width       = packet.Width;
                        frame.Height      = packet.Height;
                        frame.Depth       = data.Length / (packet.Width * packet.Height);
                        frame.PixelFormat = _pixelFormat;
                        frame.Data        = data;

                        return(true);
                    }
                }

                return(false);
        }
 //decode data when receive living data
 public void DecodeVideoData(VideoPacket videoPacket)
 {
     if (LiveImage != null)
     {
         LiveImage.texture = UnityChatSDK.Instance.DecodeVideoData(videoPacket);
     }
 }
Beispiel #16
0
 ////////////////////////////////////////////////////
 ////////////////////////////////////////////////////
 ////////////////////// VIDEO ///////////////////////
 ////////////////////////////////////////////////////
 ////////////////////////////////////////////////////
 private unsafe void OnVideoPacketAcquired(VideoPacket packet)
 {
     if (_videoPacketDecoderWorker.IsAlive)
     {
         _videoPacketDecoderWorker.EnqueuePacket(packet);
     }
 }
Beispiel #17
0
 private void OnVideoPacketAqcuired(VideoPacket packet)
 {
     // If the video decoder is alive, enqueue the packet to be decoded.
     if (videoDecoder.IsAlive)
     {
         videoDecoder.EnqueuePacket(packet);
     }
 }
    public void PushPacket(Mpeg4.TSample Sample, string StreamName)
    {
        var Stream = GetStream(StreamName);
        var Packet = new VideoPacket();

        Packet.Sample = Sample;
        Stream.AddBlock(Packet);
    }
Beispiel #19
0
 private void MyFrameEncodedCallback(IntPtr hEncoder, IntPtr hPacket)
 {
     if (hEncoder == _encoderHandle)
     {
         var videoPacket = new VideoPacket(hPacket);
         OnFrameEncoded(new FrameEncodedEventArgs(videoPacket));
     }
 }
Beispiel #20
0
 public void EnqueuePacket(VideoPacket packet)
 {
     if (_skipFrames && packet.FrameType == VideoFrameType.I && _packetQueue.Count > 0)
     {
         _packetQueue.Flush();
     }
     _packetQueue.Enqueue(packet);
 }
Beispiel #21
0
        /// <summary>
        /// 写图像文件
        /// </summary>
        /// <param name="videoPacket"></param>
        /// <returns></returns>
        private void Write(VideoPacket videoPacket)
        {
            VideoFrame frame;

            if (_videoDecoder.TryDecode(ref videoPacket, out frame))
            {
                WriteBitmap(frame);
            }
        }
Beispiel #22
0
 public void EnqueuePacket(VideoPacket packet)
 {
     if (_skipFrames && packet.FrameType == VideoFrameType.I && _packetQueue.Count > SkipFramesThreshold)
     {
         Trace.TraceInformation("Skipping {0} frames.", _packetQueue.Count);
         _packetQueue.Flush();
     }
     _packetQueue.Enqueue(packet);
 }
Beispiel #23
0
 public static void WritePacket(this BinaryWriter writer, VideoPacket packet)
 {
     writer.Write(packet.Timestamp);
     writer.Write(packet.FrameNumber);
     writer.Write(packet.Height);
     writer.Write(packet.Width);
     writer.Write((byte) packet.FrameType);
     writer.Write(packet.Data.Length);
     writer.Write(packet.Data);
 }
 public static void WriteVideoPacket(BinaryWriter writer, VideoPacket packet)
 {
     writer.Write(packet.Timestamp);
     writer.Write(packet.FrameNumber);
     writer.Write(packet.Height);
     writer.Write(packet.Width);
     writer.Write((byte)packet.FrameType);
     writer.Write(packet.Data.Length);
     writer.Write(packet.Data);
 }
 public void Write(VideoPacket packet)
 {
     Write(packet.Timestamp);
     Write(packet.FrameNumber);
     Write(packet.Height);
     Write(packet.Width);
     Write((byte)packet.FrameType);
     Write(packet.Data.Length);
     Write(packet.Data);
 }
Beispiel #26
0
 private void Encoder_FrameEncoded(object sender, FrameEncodedEventArgs e)
 {
     try
     {
         VideoPacket packet = e.Packet;
         _pusher.PushPacket(packet);
     }
     catch
     {
     }
 }
 void _droneClient_VideoPacketAcquired(VideoPacket packet)
 {
     if (_packetRecorderWorker != null && _packetRecorderWorker.IsAlive)
     {
         _packetRecorderWorker.EnqueuePacket(packet);
     }
     if (_videoPacketDecoderWorker.IsAlive)
     {
         _videoPacketDecoderWorker.EnqueuePacket(packet);
     }
 }
Beispiel #28
0
        public void PushPacket(VideoPacket packet)
        {
            var result = Environment.Is64BitProcess ?
                         Interop64.PushPacket(_pusherHandle, packet.Handle) :
                         Interop32.PushPacket(_pusherHandle, packet.Handle);

            if (result != 0)
            {
                throw new PusherException(result);
            }
        }
Beispiel #29
0
 private void OnVideoPacketAcquired(VideoPacket packet)
 {
     if (_packetRecorderWorker != null && _packetRecorderWorker.IsAlive)
     {
         _packetRecorderWorker.EnqueuePacket(packet);
     }
     if (_videoPacketDecoderWorker.IsAlive)
     {
         _videoPacketDecoderWorker.EnqueuePacket(packet);
     }
 }
Beispiel #30
0
 public static VideoPacket ReadVideoPacket(this BinaryReader reader)
 {
     var packet = new VideoPacket();
     packet.Timestamp = reader.ReadInt64();
     packet.FrameNumber = reader.ReadUInt32();
     packet.Height = reader.ReadUInt16();
     packet.Width = reader.ReadUInt16();
     packet.FrameType = (FrameType) reader.ReadByte();
     int dataSize = reader.ReadInt32();
     packet.Data = reader.ReadBytes(dataSize);
     return packet;
 }
Beispiel #31
0
 public static void OnVideoPacketAcquired(VideoPacket packet)
 {
     //check if the worker is valid then send the nav packet to the worker.
     if (_packetRecorderWorker != null && _packetRecorderWorker.IsAlive)
     {
         _packetRecorderWorker.EnqueuePacket(packet);
     }
     if (VideoPacketDecoderWorker.IsAlive)
     {
         VideoPacketDecoderWorker.EnqueuePacket(packet);
     }
 }
 private void Update()
 {
     //test==
     if (Living)
     {
         VideoPacket packet = UnityChatSDK.Instance.GetVideo();
         if (packet != null && packet.Data != null)
         {
             DecodeVideoData(packet);
         }
     }
     //==test
 }
        public bool TryDecode(ref VideoPacket packet, out VideoFrame frame)
        {
            if (_videoDecoder == null)
                _videoDecoder = new VideoDecoder();

            frame = new VideoFrame();
            AVFrame avFrame;
            if (_videoDecoder.TryDecode(ref packet.Data, out avFrame))
            {
                if (_videoConverter == null)
                    _videoConverter = new VideoConverter(_pixelFormat.ToAVPixelFormat());

                frame.Timestamp = packet.Timestamp;
                frame.FrameNumber = packet.FrameNumber;
                frame.Width = packet.Width;
                frame.Height = packet.Height;
                frame.PixelFormat = _pixelFormat;
                frame.Data = _videoConverter.ConvertFrame(avFrame);

                return true;
            }
            return false;
        }
Beispiel #34
0
 private void OnVideoPacketAcquired(VideoPacket packet)
 {
     if (VideoPacketAcquired != null)
         VideoPacketAcquired(packet);
 }
 public void EnqueuePacket(VideoPacket packet)
 {
     _packetQueue.Enqueue(packet);
 }
Beispiel #36
0
 private void OnVideoPacketAcquired(VideoPacket packet)
 {
     if (_packetRecorderWorker.IsAlive) _packetRecorderWorker.EnqueuePacket(packet);
     if (_videoPacketDecoderWorker.IsAlive) _videoPacketDecoderWorker.EnqueuePacket(packet);
 }
        protected override unsafe void Loop(CancellationToken token)
        {
            using (var tcpClient = new TcpClient(_configuration.DroneHostname, VideoPort))
            using (NetworkStream stream = tcpClient.GetStream())
            {
                var packet = new VideoPacket();
                byte[] packetData = null;
                int offset = 0;
                int frameStart = 0;
                int frameEnd = 0;
                var buffer = new byte[FrameBufferSize];
                fixed (byte* pBuffer = &buffer[0])
                    while (token.IsCancellationRequested == false)
                    {
                        offset += stream.Read(buffer, offset, NetworkStreamReadSize);
                        if (packetData == null)
                        {
                            // lookup for a frame start
                            int maxSearchIndex = offset - sizeof (parrot_video_encapsulation_t);
                            for (int i = 0; i < maxSearchIndex; i++)
                            {
                                if (buffer[i] == 'P' &&
                                    buffer[i + 1] == 'a' &&
                                    buffer[i + 2] == 'V' &&
                                    buffer[i + 3] == 'E')
                                {
                                    parrot_video_encapsulation_t pve = *(parrot_video_encapsulation_t*) (pBuffer + i);
                                    packetData = new byte[pve.payload_size];
                                    packet = new VideoPacket
                                        {
                                            Timestamp = DateTime.UtcNow.Ticks,
                                            FrameNumber = pve.frame_number,
                                            Width = pve.display_width,
                                            Height = pve.display_height,
                                            FrameType = Convert(pve.frame_type),
                                            Data = packetData
                                        };
                                    frameStart = i + pve.header_size;
                                    frameEnd = frameStart + packet.Data.Length;
                                    break;
                                }
                            }
                            if (packetData == null)
                            {
                                // frame is not detected
                                offset -= maxSearchIndex;
                                Array.Copy(buffer, maxSearchIndex, buffer, 0, offset);
                            }
                        }
                        if (packetData != null && offset >= frameEnd)
                        {
                            // frame acquired
                            Array.Copy(buffer, frameStart, packetData, 0, packetData.Length);
                            _videoPacketAcquired(packet);

                            // clean up acquired frame
                            packetData = null;
                            offset -= frameEnd;
                            Array.Copy(buffer, frameEnd, buffer, 0, offset);
                        }
                        Thread.Sleep(10);
                    }
            }
        }