示例#1
0
 void OnStart(PipelineProfile obj)
 {
     q = new FrameQueue(1);
     using (var depth = obj.Streams.FirstOrDefault(s => s.Stream == Stream.Depth && s.Format == Format.Z16).As <VideoStreamProfile>())
         CreateResources(depth.Width, depth.Height);
     pointSource.OnNewSample += OnNewSample;
 }
示例#2
0
        internal void Update(float time)
        {
            for (int i = 0; i < _gameObjects.Count; i++)
            {
                if (_gameObjects[i].IsActive)
                {
                    _gameObjects[i].Update(time);
                }
            }

            FrameQueue.ConsumeActions();

            for (int i = 0; i < _updatables.Count; i++)
            {
                _updatables[i].Update(time);
            }

            var toRemove = new List <GameObject>(_toRemove.Count);

            for (int i = 0; i < _toRemove.Count; i++)
            {
                var obj = _toRemove[i];
                if (obj.HasJobs)
                {
                    toRemove.Add(obj);
                }
                else
                {
                    obj.RemovedFromCurrentScene();
                }
            }
            _toRemove = toRemove;
        }
示例#3
0
        void Start()
        {
            _colorQueue = new FrameQueue(1);
            _pointQueue = new FrameQueue(1);

            _colorSource.OnNewSample += OnNewColorSample;
            _pointSource.OnNewSample += OnNewPointSample;
        }
示例#4
0
    private void OnStartStreaming(PipelineProfile obj)
    {
        q = new FrameQueue(1);

        using (var depth = obj.Streams.FirstOrDefault(s => s.Stream == Stream.Depth) as VideoStreamProfile)
            ResetMesh(depth.Width, depth.Height);

        source.OnNewSample += OnNewSample;
    }
 public void Stop()
 {
     if (!Timer.IsRunning)
     {
         return;
     }
     Timer.Stop();
     FrameQueue.CompleteAdding();
 }
 protected void OnStopStreaming()
 {
     Source.OnNewSample -= OnNewSample;
     if (q != null)
     {
         q.Dispose();
         q = null;
     }
 }
示例#7
0
 public void init(float i_DepthScale)
 {
     m_Units     = i_DepthScale;
     m_Ransac    = new Ransac();
     m_Queue     = new FrameQueue <DepthFrame>(1);
     m_IsRunning = true;
     m_Thread    = new Thread(AnalyzeDepthImage);
     m_Thread.Start();
 }
示例#8
0
        public Http2Connection (Http2ConnectionSettings connectionSettings, IStreamManager streamManager, IFlowControlManager flowControlManager)
        {
            this.flowControlManager = flowControlManager;
            this.streamManager = streamManager;

            ConnectionSettings = connectionSettings;
            Settings = new Http2Settings ();

            queue = new FrameQueue (flowControlManager);
        }
示例#9
0
    private void Dispose()
    {
        Source.OnNewSample -= OnNewSample;

        if (q != null)
        {
            q.Dispose();
            q = null;
        }
    }
    public void OnStartStreaming(PipelineProfile activeProfile)
    {
        q = new FrameQueue(1);

        matcher = new Predicate <Frame>(Matches);

        Source.OnNewSample += OnNewSample;
        // e.Reset();

        // RsDevice.Instance.OnNewSampleSet += OnNewSampleSet;
    }
示例#11
0
        /// <summary>
        /// Callback when the Unity component is disabled. This is the proper way to disable the
        /// video source and get it to stop video capture.
        /// </summary>
        protected void OnDisable()
        {
            var nativePeer = PeerConnection.Peer;

            if ((nativePeer != null) && nativePeer.Initialized)
            {
                VideoStreamStopped.Invoke();
                nativePeer.I420LocalVideoFrameReady -= I420LocalVideoFrameReady;
                nativePeer.RemoveLocalVideoTrack();
                FrameQueue.Clear();
            }
        }
        internal void SaveFrame(byte[] frameBytes, int left, int top, int width, int height, string viewerID, string machineName, DateTimeOffset startTime)
        {
            var rcFrame = new RemoteControlFrame(frameBytes, left, top, width, height, viewerID, machineName, startTime);

            FrameQueue.Enqueue(rcFrame);

            lock (LockObject)
            {
                if (ProcessingTask?.IsCompleted ?? true)
                {
                    ProcessingTask = Task.Run(new Action(StartProcessing));
                }
            }
        }
        private void AddLocalVideoTrackImpl(WebRTC.PeerConnection nativePeer)
        {
            string videoProfileId   = VideoProfileId;
            var    videoProfileKind = VideoProfileKind;
            int    width            = Constraints.width;
            int    height           = Constraints.height;
            double framerate        = Constraints.framerate;

#if ENABLE_WINMD_SUPPORT
            if (Mode == LocalVideoSourceFormatMode.Automatic)
            {
                // Do not constrain resolution by default, unless the device calls for it (see below).
                width  = 0; // auto
                height = 0; // auto

                // Avoid constraining the framerate; this is generally not necessary (formats are listed
                // with higher framerates first) and is error-prone as some formats report 30.0 FPS while
                // others report 29.97 FPS.
                framerate = 0; // auto

                // For HoloLens, use video profile to reduce resolution and save power/CPU/bandwidth
                if (Windows.Graphics.Holographic.HolographicSpace.IsAvailable)
                {
                    if (!Windows.Graphics.Holographic.HolographicDisplay.GetDefault().IsOpaque)
                    {
                        if (Windows.ApplicationModel.Package.Current.Id.Architecture == Windows.System.ProcessorArchitecture.X86)
                        {
                            // Holographic AR (transparent) x86 platform - Assume HoloLens 1
                            videoProfileKind = WebRTC.PeerConnection.VideoProfileKind.VideoRecording; // No profile in VideoConferencing
                            width            = 896;                                                   // Target 896 x 504
                        }
                        else
                        {
                            // Holographic AR (transparent) non-x86 platform - Assume HoloLens 2
                            videoProfileKind = WebRTC.PeerConnection.VideoProfileKind.VideoConferencing;
                            width            = 1280; // Target 1280 x 720
                        }
                    }
                }
            }
#endif
            // Force again PreferredVideoCodec right before starting the local capture,
            // so that modifications to the property done after OnPeerInitialized() are
            // accounted for.
            nativePeer.PreferredVideoCodec = PreferredVideoCodec;

            FrameQueue.Clear();
            var trackSettings = new WebRTC.PeerConnection.LocalVideoTrackSettings
            {
                videoDevice                 = default,
示例#14
0
    private void OnStopStreaming()
    {
        source.OnNewSample -= OnNewSample;
        if (q != null)
        {
            q.Dispose();
            q = null;
        }

        if (handle.IsAllocated)
        {
            handle.Free();
        }
    }
    private void Dispose()
    {
        Source.OnNewSample -= OnNewSample;

        if (q != null)
        {
            q.Dispose();
            q = null;
        }

        if (handle.IsAllocated)
        {
            handle.Free();
        }
    }
示例#16
0
        public void Reset(bool inBios = false, Cartridge cartridge = null)
        {
            CyclesCount = 0;
            Cpu.Reset(inBios, cartridge);
            Mmu.Reset();
            Ppu.Reset();
            Apu.Reset();
            Input.Reset();
            Timer.Reset();
            Dma.Reset();
            LinkCable.Reset();
            IsAlive = true;

            frameQueue = new FrameQueue();
        }
        /// <summary>
        /// Encodes and sends a packet with the Herkulex protocol
        /// </summary>
        /// <param name="port">Serial port to use</param>
        /// <param name="pID">Servo ID</param>
        /// <param name="CMD">Command ID</param>
        private void EncodeAndSendPacket(SerialPort port, byte pID, byte CMD)
        {
            byte[] packet = new byte[7];

            packet[0] = 0xFF;
            packet[1] = 0xFF;
            packet[2] = 7;
            packet[3] = pID;
            packet[4] = CMD;
            packet[5] = CommonMethods.CheckSum1(packet[2], packet[3], packet[4]);
            packet[6] = CommonMethods.CheckSum2(packet[5]);

            FrameQueue.Enqueue(packet);
            MessageEnqueuedEvent.Set();
            //port.Write(packet, 0, packet.Length);
        }
示例#18
0
        //private Rect BoundingBoxAroundEyes(EyePoints leftEye, EyePoints rightEye, int buffer = 0)
        //{
        //    // get upper left corner
        //    int upperLeftX = leftEye.Points[0].X - buffer;
        //    List<OpenCvSharp.Point> lst = new List<OpenCvSharp.Point>();
        //    lst.Add(leftEye.Points[1]);
        //    lst.Add(leftEye.Points[2]);
        //    lst.Add(rightEye.Points[1]);
        //    lst.Add(rightEye.Points[2]);
        //    int upperLeftY = lst.Min(n => n.Y) - buffer;

        //    int lowerRightX = rightEye.Points[3].X + buffer;
        //    lst.Clear();
        //    lst.Add(leftEye.Points[4]);
        //    lst.Add(leftEye.Points[5]);
        //    lst.Add(rightEye.Points[4]);
        //    lst.Add(rightEye.Points[5]);

        //    int lowerRightY = lst.Max(n => n.Y) + buffer;

        //    Rect boundingBox = new Rect(upperLeftX, upperLeftY, lowerRightX - upperLeftX, lowerRightY - upperLeftY);
        //    return boundingBox;
        //}

        private void WebCamBackgroundWorker_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e)
        {
            if (e.Cancelled || e.Error != null)
            {
                FrameQueue.Clear();
                //cap.Release();
                IsRunning = false;
            }
            else
            {
                //if (cap != null)
                //{
                //    cap.Release();
                //}
            }
        }
    protected void OnStopStreaming()
    {
        Source.OnNewSample -= OnNewSample;
        // RsDevice.Instance.OnNewSampleSet -= OnNewSampleSet;

        // e.Set();

        if (q != null)
        {
            // foreach (var f in q)
            // f.Dispose();

            q.Dispose();
            q = null;
        }
    }
示例#20
0
        private void Application_Startup(object sender, StartupEventArgs e)
        {
            Thread thread = new Thread(() =>
            {
                FrameQueue.Create();
                Backend.InitBackend();
                Client.Create();
                Client.instance.ConnectToServer();
                while (true)
                {
                    ThreadManager.UpdateMain();
                    Thread.Sleep(10);
                }
            });

            thread.Start();
        }
    void Start()
    {
        Thread videoPlayer = new Thread(() => RunVideo(gap_h, gap_w, width, height));

        videoPlayer.Start();
        videoPlayer.IsBackground = true;
        yuvm        = GameObject.FindGameObjectWithTag("BaseLayer").GetComponent <MeshRenderer>().material;
        fq          = new FrameQueue();
        texY        = new Texture2D(width * gap_w, height * gap_h, TextureFormat.Alpha8, false);
        texU        = new Texture2D(width * gap_w / 2, height * gap_h / 2, TextureFormat.Alpha8, false);
        texV        = new Texture2D(width * gap_w / 2, height * gap_h / 2, TextureFormat.Alpha8, false);
        Y_raw       = new byte[width * gap_w * height * gap_h];
        U_raw       = new byte[width * gap_w * height * gap_h / 4];
        V_raw       = new byte[width * gap_w * height * gap_h / 4];
        currentPath = Application.dataPath;
        StartCoroutine(Refresh());
    }
示例#22
0
    // Use this for initialization
    void Start()
    {
        if (FrameQueue == null)
        {
            FrameQueue = new FrameQueue(5, false);
        }


        tex = new Texture2D(2, 2);
        tex.SetPixel(0, 0, Color.blue);
        tex.SetPixel(1, 1, Color.blue);
        tex.Apply();
        GetComponent <RawImage>().texture = tex;

        //change the material to Remote materia
        //GetComponent<RawImage>().material = SepctatorMat;
        GetComponent <RectTransform>().localRotation = Quaternion.Euler(new Vector3(0, 0, 180));
    }
示例#23
0
        private async void DoAutoStartActions(WebRTC.PeerConnection nativePeer)
        {
            if (AutoStartCapture)
            {
                nativePeer.I420LocalVideoFrameReady += I420LocalVideoFrameReady;

                // TODO - Currently AddLocalVideoTrackAsync() both open the capture device AND add a video track
            }

            if (AutoAddTrack)
            {
                // Force again PreferredVideoCodec right before starting the local capture,
                // so that modifications to the property done after OnPeerInitialized() are
                // accounted for.
                nativePeer.PreferredVideoCodec = PreferredVideoCodec;

                FrameQueue.Clear();
                await nativePeer.AddLocalVideoTrackAsync(default, EnableMixedRealityCapture);
示例#24
0
        void OnDestroy()
        {
            if (_colorQueue != null)
            {
                _colorQueue.Dispose();
                _colorQueue = null;
            }

            if (_pointQueue != null)
            {
                _pointQueue.Dispose();
                _pointQueue = null;
            }

            if (_colorBuffer != null)
            {
                _colorBuffer.Dispose();
                _colorBuffer = null;
            }

            if (_positionBuffer != null)
            {
                _positionBuffer.Dispose();
                _positionBuffer = null;
            }

            if (_remapBuffer != null)
            {
                _remapBuffer.Dispose();
                _remapBuffer = null;
            }

            if (_tempColorMap != null)
            {
                Destroy(_tempColorMap);
                _tempColorMap = null;
            }

            if (_tempPositionMap != null)
            {
                Destroy(_tempPositionMap);
                _tempPositionMap = null;
            }
        }
示例#25
0
 void Start()
 {
     JpegQueue = new FrameQueue <PixelFrame, byte[]>((Frame) =>
     {
         return(PopEncodeJpeg.EncodeToJpeg(Frame.Pixels, (int)Frame.Size.x, (int)Frame.Size.y, Frame.Channels, Frame.Rgb));
     },
                                                     (Bytes, OnCompleted) =>
     {
         try
         {
             Socket.Send(Bytes, OnCompleted);
         }
         catch
         {
             OnCompleted.Invoke(false);
         }
     }
                                                     );
 }
        /// <summary>
        /// Add frames to the queue
        /// </summary>
        /// <param name="frame">The frame to add</param>
        /// <returns>A task</returns>
        public void Enqueue(T frame)
        {
            if (!Timer.IsRunning)
            {
                return;
            }

            try
            {
                if (FrameQueue.IsCompleted || FrameQueue.IsAddingCompleted || cancellationToken.IsCancellationRequested)
                {
                    return;
                }
                FrameQueue.TryAdd(frame, -1, cancellationToken.Token);
            }
            catch (InvalidOperationException ex)
            {
                Debug.WriteLine("Failed to add to queue:" + ex.ToString());
            }
        }
        /// <summary>
        /// Encodes and sends a packet with the Herkulex protocol
        /// </summary>
        /// <param name="port">Serial port to use</param>
        /// <param name="pID">Servo ID</param>
        /// <param name="CMD">Command ID</param>
        /// <param name="dataToSend">Data</param>
        private void EncodeAndSendPacket(SerialPort port, byte pID, byte CMD, byte[] dataToSend)
        {
            byte packetSize = (byte)(7 + dataToSend.Length);

            byte[] packet = new byte[packetSize];

            packet[0] = 0xFF;
            packet[1] = 0xFF;
            packet[2] = packetSize;
            packet[3] = pID;
            packet[4] = CMD;
            packet[5] = CommonMethods.CheckSum1(packet[2], packet[3], packet[4], dataToSend);
            packet[6] = CommonMethods.CheckSum2(packet[5]);

            for (int i = 0; i < dataToSend.Length; i++)
            {
                packet[7 + i] = dataToSend[i];
            }

            FrameQueue.Enqueue(packet);
            //Console.WriteLine("inQueue : " + FrameQueue.Count);
            MessageEnqueuedEvent.Set();
            //port.Write(packet, 0, packet.Length);
        }
 private void OnStartStreaming(PipelineProfile profile)
 {
     q = new FrameQueue(1);
     Source.OnNewSample += OnNewSample;
 }
示例#29
0
    private void ResetMesh(int width, int height)
    {
        Assert.IsTrue(SystemInfo.SupportsTextureFormat(TextureFormat.RGFloat));
        uvmap = new Texture2D(width, height, TextureFormat.RGFloat, false, true)
        {
            wrapMode   = TextureWrapMode.Clamp,
            filterMode = FilterMode.Point,
        };
        GetComponent <MeshRenderer>().sharedMaterial.SetTexture("_UVMap", uvmap);

        if (mesh != null)
        {
            mesh.Clear();
        }
        else
        {
            mesh = new Mesh()
            {
                indexFormat = IndexFormat.UInt32,
            }
        };

        vertices = new Vector3[width * height];

        var indices = new int[vertices.Length];

        for (int i = 0; i < vertices.Length; i++)
        {
            indices[i] = i;
        }

        mesh.MarkDynamic();
        mesh.vertices = vertices;

        var uvs = new Vector2[width * height];

        Array.Clear(uvs, 0, uvs.Length);
        for (int j = 0; j < height; j++)
        {
            for (int i = 0; i < width; i++)
            {
                uvs[i + j * width].x = i / (float)width;
                uvs[i + j * width].y = j / (float)height;
            }
        }

        mesh.uv = uvs;

        mesh.SetIndices(indices, MeshTopology.Points, 0, false);
        mesh.bounds = new Bounds(Vector3.zero, Vector3.one * 10f);

        GetComponent <MeshFilter>().sharedMesh = mesh;
    }

    void OnDestroy()
    {
        if (q != null)
        {
            q.Dispose();
            q = null;
        }

        if (mesh != null)
        {
            Destroy(null);
        }
    }
 public void OnStartStreaming(PipelineProfile activeProfile)
 {
     q                   = new FrameQueue(1);
     matcher             = new Predicate <Frame>(Matches);
     Source.OnNewSample += OnNewSample;
 }
示例#31
0
 /// <summary>
 /// Protected Singleton constructor for an instance of the MJPEG HTTP Server
 /// </summary>
 public VideoServer()
 {
     socketList = new ConcurrentDictionary <int, VideoSocketHandler>();
     state      = ServerState.STOPPED;
     frameQueue = new FrameQueue(socketList);
 }