public void DidUpdateFrame(ARSession session, ARFrame frame)
        {
            // Do not enqueue other buffers for processing while another Vision task is still running.
            // The camera stream has only a finite amount of buffers available; holding too many buffers for analysis would starve the camera.
            if (this.currentBuffer == null && frame.Camera.TrackingState == ARTrackingState.Normal)
            {
                // Retain the image buffer for Vision processing.
                this.currentBuffer = frame.CapturedImage;
                this.ClassifyCurrentImage();
            }

            frame.Dispose();
        }
        public void DidUpdateFrame(ARSession session, ARFrame frame)
        {
            switch (frame.WorldMappingStatus)
            {
            case ARWorldMappingStatus.NotAvailable:
            case ARWorldMappingStatus.Limited:
                this.sendMapButton.Enabled = false;
                break;

            case ARWorldMappingStatus.Extending:
                this.sendMapButton.Enabled = this.multipeerSession.ConnectedPeers.Any();
                break;

            case ARWorldMappingStatus.Mapped:
                this.sendMapButton.Enabled = this.multipeerSession.ConnectedPeers.Any();
                break;
            }

            this.mappingStatusLabel.Text = frame.WorldMappingStatus.GetDescription();
            this.UpdateSessionInfoLabel(frame, frame.Camera.TrackingState, frame.Camera.TrackingStateReason);

            frame?.Dispose();
        }
#pragma warning disable IDE0060 // Remove unused parameter
        public void DidUpdateFrame(ARSession session, ARFrame frame)
#pragma warning restore IDE0060 // Remove unused parameter
        {
            DispatchQueue.MainQueue.DispatchAsync(() =>
            {
                foreach (ARAnchor anchor in frame.Anchors)
                {
                    if (anchor is ARFaceAnchor faceAnchor)
                    {
                        if (faceAnchor.IsTracked)
                        {
                            if (!IsTracked)
                            {
                                sensor.OnTrackingStarted(new EventArgs());
                                IsTracked = true;
                            }
                            // Log
                            sensor.OnReadingTaken(new FaceAnchorChangedEventArgs(
                                                      new FaceAnchorReading(TimerClock.Now,
                                                                            faceAnchor.Transform.ToFloatMatrix4(),
                                                                            faceAnchor.LeftEyeTransform.ToFloatMatrix4(),
                                                                            faceAnchor.RightEyeTransform.ToFloatMatrix4(),
                                                                            faceAnchor.LookAtPoint.ToFloatVector3(),
                                                                            faceAnchor.BlendShapes.ToDictionary()
                                                                            )));
                        }
                        else if (IsTracked)
                        {
                            sensor.OnTrackingStopped(new EventArgs());
                            IsTracked = false;
                        }
                    }
                }
                // Important otherwise frame will not be disposed
                frame.Dispose();
            });
        }
Beispiel #4
0
        public unsafe void ProcessARFrame(ARSession session, ARFrame frame)
        {
            var arcamera  = frame?.Camera;
            var transform = arcamera.Transform;
            var prj       = arcamera.GetProjectionMatrix(UIInterfaceOrientation.LandscapeRight, new CoreGraphics.CGSize(Graphics.Width, Graphics.Height), 0.01f, 30f);

            //Urho accepts projection matrix in DirectX format (negative row3 + transpose)
            var urhoProjection = new Matrix4(
                prj.M11, prj.M21, -prj.M31, prj.M41,
                prj.M12, prj.M22, -prj.M32, prj.M42,
                prj.M13, prj.M23, -prj.M33, prj.M43,
                prj.M14, prj.M24, -prj.M34, prj.M44);

            Camera.SetProjection(urhoProjection);
            ApplyTransform(CameraNode, transform);

            if (!yuvTexturesInited)
            {
                var img = frame.CapturedImage;

                // texture for Y-plane;
                cameraYtexture = new Texture2D();
                cameraYtexture.SetNumLevels(1);
                cameraYtexture.FilterMode = TextureFilterMode.Bilinear;
                cameraYtexture.SetAddressMode(TextureCoordinate.U, TextureAddressMode.Clamp);
                cameraYtexture.SetAddressMode(TextureCoordinate.V, TextureAddressMode.Clamp);
                cameraYtexture.SetSize((int)img.Width, (int)img.Height, Graphics.LuminanceFormat, TextureUsage.Dynamic);
                cameraYtexture.Name = nameof(cameraYtexture);
                ResourceCache.AddManualResource(cameraYtexture);

                // texture for UV-plane;
                cameraUVtexture = new Texture2D();
                cameraUVtexture.SetNumLevels(1);
                cameraUVtexture.SetSize((int)img.GetWidthOfPlane(1), (int)img.GetHeightOfPlane(1), Graphics.LuminanceAlphaFormat, TextureUsage.Dynamic);
                cameraUVtexture.FilterMode = TextureFilterMode.Bilinear;
                cameraUVtexture.SetAddressMode(TextureCoordinate.U, TextureAddressMode.Clamp);
                cameraUVtexture.SetAddressMode(TextureCoordinate.V, TextureAddressMode.Clamp);
                cameraUVtexture.Name = nameof(cameraUVtexture);
                ResourceCache.AddManualResource(cameraUVtexture);

                RenderPath rp = new RenderPath();
                rp.Load(ResourceCache.GetXmlFile("ARRenderPath.xml"));
                var cmd = rp.GetCommand(1);                                    //see ARRenderPath.xml, second command.
                cmd->SetTextureName(TextureUnit.Diffuse, cameraYtexture.Name); //sDiffMap
                cmd->SetTextureName(TextureUnit.Normal, cameraUVtexture.Name); //sNormalMap

                var   capturedImage = frame.CapturedImage;
                var   nativeBounds  = UIScreen.MainScreen.NativeBounds;
                float imageAspect   = (float)capturedImage.Width / (float)capturedImage.Height;
                float screenAspect  = (float)nativeBounds.Size.Height / (float)nativeBounds.Size.Width;

                cmd->SetShaderParameter("CameraScale", screenAspect / imageAspect);

                //rp.Append(CoreAssets.PostProcess.FXAA2);
                Viewport.RenderPath = rp;
                yuvTexturesInited   = true;
            }

            if (ContinuesHitTestAtCenter)
            {
                LastHitTest = HitTest();
            }


            // display tracking state (quality)
            DebugHud.AdditionalText = $"{arcamera.TrackingState}\n";
            if (arcamera.TrackingStateReason != ARTrackingStateReason.None)
            {
                DebugHud.AdditionalText += arcamera.TrackingStateReason;
            }

            // see "Render with Realistic Lighting"
            // https://developer.apple.com/documentation/arkit/displaying_an_ar_experience_with_metal
            var ambientIntensity = (float)frame.LightEstimate.AmbientIntensity / 1000f;

            //Light.Brightness = 0.5f + ambientIntensity / 2;
            DebugHud.AdditionalText += "\nAmb: " + ambientIntensity.ToString("F1");

            //use outside of InvokeOnMain?
            if (yuvTexturesInited)
            {
                UpdateBackground(frame);
            }

            // required!
            frame.Dispose();
        }
Beispiel #5
0
        public unsafe void ProcessARFrame(ARSession session, ARFrame frame)
        {
            var arcamera  = frame?.Camera;
            var transform = arcamera.Transform;

            var   viewportSize = new CoreGraphics.CGSize(Application.Graphics.Width, Application.Graphics.Height);
            float near         = 0.001f;
            float far          = 1000f;
            var   prj          = arcamera.GetProjectionMatrix(Orientation.Value, viewportSize, near, far);
            var   dt           = frame.GetDisplayTransform(Orientation.Value, viewportSize);

            var urhoProjection = *(Matrix4 *)(void *)&prj;

            urhoProjection.M43 /= 2f;
            urhoProjection.M33  = far / (far - near);
            urhoProjection.M34 *= -1;
            //prj.M13 = 0; //center of projection
            //prj.M23 = 0;
            //urhoProjection.Row2 *= -1;
            urhoProjection.Transpose();

            Camera.SetProjection(urhoProjection);
            ApplyOpenTkTransform(Camera.Node, transform);

            if (!yuvTexturesInited)
            {
                var img = frame.CapturedImage;

                // texture for UV-plane;
                cameraUVtexture = new Texture2D();
                cameraUVtexture.SetNumLevels(1);
                cameraUVtexture.SetSize((int)img.GetWidthOfPlane(1), (int)img.GetHeightOfPlane(1), Graphics.LuminanceAlphaFormat, TextureUsage.Dynamic);
                cameraUVtexture.FilterMode = TextureFilterMode.Bilinear;
                cameraUVtexture.SetAddressMode(TextureCoordinate.U, TextureAddressMode.Clamp);
                cameraUVtexture.SetAddressMode(TextureCoordinate.V, TextureAddressMode.Clamp);
                cameraUVtexture.Name = nameof(cameraUVtexture);
                Application.ResourceCache.AddManualResource(cameraUVtexture);

                // texture for Y-plane;
                cameraYtexture = new Texture2D();
                cameraYtexture.SetNumLevels(1);
                cameraYtexture.FilterMode = TextureFilterMode.Bilinear;
                cameraYtexture.SetAddressMode(TextureCoordinate.U, TextureAddressMode.Clamp);
                cameraYtexture.SetAddressMode(TextureCoordinate.V, TextureAddressMode.Clamp);
                cameraYtexture.SetSize((int)img.Width, (int)img.Height, Graphics.LuminanceFormat, TextureUsage.Dynamic);
                cameraYtexture.Name = nameof(cameraYtexture);
                Application.ResourceCache.AddManualResource(cameraYtexture);

                var viewport = Application.Renderer.GetViewport(0);

                var videoRp = new RenderPathCommand(RenderCommandType.Quad);
                videoRp.PixelShaderName  = (UrhoString)ArkitShader;
                videoRp.VertexShaderName = (UrhoString)ArkitShader;
                videoRp.SetOutput(0, "viewport");
                videoRp.SetTextureName(TextureUnit.Diffuse, cameraYtexture.Name);                 //sDiffMap
                videoRp.SetTextureName(TextureUnit.Normal, cameraUVtexture.Name);                 //sNormalMap

                if (Orientation != UIInterfaceOrientation.Portrait)
                {
                    videoRp.PixelShaderDefines = new UrhoString("ARKIT_LANDSCAPE");
                }

                viewport.RenderPath.InsertCommand(1, videoRp);

                var vrp = viewport.RenderPath.GetCommand(1);
                vrp->SetShaderParameter("Tx", (float)dt.x0);
                vrp->SetShaderParameter("Ty", (float)dt.y0);
                vrp->SetShaderParameter("ScaleX", (float)dt.xx);
                vrp->SetShaderParameter("ScaleY", (float)dt.yy);
                vrp->SetShaderParameter("ScaleYX", (float)dt.yx);
                vrp->SetShaderParameter("ScaleXY", (float)dt.xy);

                float imageAspect = (float)img.Width / img.Height;

                float yoffset;
                if (ARConfiguration is ARFaceTrackingConfiguration)
                {
                    yoffset = 0.013f;
                }
                else
                {
                    yoffset = 64.0f / Math.Max(img.Width, img.Height);
                }
                vrp->SetShaderParameter("YOffset", yoffset);

                yuvTexturesInited = true;
            }

            if (yuvTexturesInited)
            {
                UpdateBackground(frame);
            }

            ARFrame?.Invoke(frame);

            // required!
            frame.Dispose();
        }
 public void DidUpdateFrame(ARSession session, ARFrame frame)
 {
     this.UpdateSceneEnvironmentProbe(frame);
     this.UpdateSessionInfoLabel(frame.Camera);
     frame.Dispose();
 }