Пример #1
0
        private void SetupViewport(string renderPath = null)
        {
            var renderer = Renderer;
            var rp       = new RenderPath();

            rp.Load(ResourceCache.GetXmlFile(renderPath ?? "RenderPaths/Forward.xml"));
            renderer.SetViewport(0, new Viewport(Context, _scene, CameraNode.GetComponent <Camera>(), rp));
        }
Пример #2
0
        void SetupViewport()
        {
            var renderer = GetSubsystem <Renderer>();
            var cache    = GetSubsystem <ResourceCache>();

            renderer.SetNumViewports(2);                               // use 2 viewports, 1 for 3d and 1 for the 2d hud
            var viewport2_        = new Viewport(hudScene, hudCamera); // hud orthographic viewport, scene and camera
            var overlayRenderPath = new RenderPath();

            overlayRenderPath.Load(cache.GetResource <XMLFile>("PostProcess/FrontPath.xml")); //special renderpath that does not clear
            viewport2_.SetRenderPath(overlayRenderPath);                                      // apply to hud viewport, so the background is transparent
            renderer.SetViewport(0, new Viewport(scene, camera));                             // perspective viewport, scene and camera
            renderer.SetViewport(1, viewport2_);                                              // and add in the HUD viewport
        }
Пример #3
0
        public unsafe void ProcessARFrame(ARSession session, ARFrame frame)
        {
            var arcamera  = frame?.Camera;
            var transform = arcamera.Transform;
            var prj       = arcamera.GetProjectionMatrix(UIInterfaceOrientation.LandscapeRight, new CoreGraphics.CGSize(Graphics.Width, Graphics.Height), 0.01f, 30f);

            //Urho accepts projection matrix in DirectX format (negative row3 + transpose)
            var urhoProjection = new Matrix4(
                prj.M11, prj.M21, -prj.M31, prj.M41,
                prj.M12, prj.M22, -prj.M32, prj.M42,
                prj.M13, prj.M23, -prj.M33, prj.M43,
                prj.M14, prj.M24, -prj.M34, prj.M44);

            Camera.SetProjection(urhoProjection);
            ApplyTransform(CameraNode, transform);

            if (!yuvTexturesInited)
            {
                var img = frame.CapturedImage;

                // texture for Y-plane;
                cameraYtexture = new Texture2D();
                cameraYtexture.SetNumLevels(1);
                cameraYtexture.FilterMode = TextureFilterMode.Bilinear;
                cameraYtexture.SetAddressMode(TextureCoordinate.U, TextureAddressMode.Clamp);
                cameraYtexture.SetAddressMode(TextureCoordinate.V, TextureAddressMode.Clamp);
                cameraYtexture.SetSize((int)img.Width, (int)img.Height, Graphics.LuminanceFormat, TextureUsage.Dynamic);
                cameraYtexture.Name = nameof(cameraYtexture);
                ResourceCache.AddManualResource(cameraYtexture);

                // texture for UV-plane;
                cameraUVtexture = new Texture2D();
                cameraUVtexture.SetNumLevels(1);
                cameraUVtexture.SetSize((int)img.GetWidthOfPlane(1), (int)img.GetHeightOfPlane(1), Graphics.LuminanceAlphaFormat, TextureUsage.Dynamic);
                cameraUVtexture.FilterMode = TextureFilterMode.Bilinear;
                cameraUVtexture.SetAddressMode(TextureCoordinate.U, TextureAddressMode.Clamp);
                cameraUVtexture.SetAddressMode(TextureCoordinate.V, TextureAddressMode.Clamp);
                cameraUVtexture.Name = nameof(cameraUVtexture);
                ResourceCache.AddManualResource(cameraUVtexture);

                RenderPath rp = new RenderPath();
                rp.Load(ResourceCache.GetXmlFile("ARRenderPath.xml"));
                var cmd = rp.GetCommand(1);                                    //see ARRenderPath.xml, second command.
                cmd->SetTextureName(TextureUnit.Diffuse, cameraYtexture.Name); //sDiffMap
                cmd->SetTextureName(TextureUnit.Normal, cameraUVtexture.Name); //sNormalMap

                var   capturedImage = frame.CapturedImage;
                var   nativeBounds  = UIScreen.MainScreen.NativeBounds;
                float imageAspect   = (float)capturedImage.Width / (float)capturedImage.Height;
                float screenAspect  = (float)nativeBounds.Size.Height / (float)nativeBounds.Size.Width;

                cmd->SetShaderParameter("CameraScale", screenAspect / imageAspect);

                //rp.Append(CoreAssets.PostProcess.FXAA2);
                Viewport.RenderPath = rp;
                yuvTexturesInited   = true;
            }

            if (ContinuesHitTestAtCenter)
            {
                LastHitTest = HitTest();
            }


            // display tracking state (quality)
            DebugHud.AdditionalText = $"{arcamera.TrackingState}\n";
            if (arcamera.TrackingStateReason != ARTrackingStateReason.None)
            {
                DebugHud.AdditionalText += arcamera.TrackingStateReason;
            }

            // see "Render with Realistic Lighting"
            // https://developer.apple.com/documentation/arkit/displaying_an_ar_experience_with_metal
            var ambientIntensity = (float)frame.LightEstimate.AmbientIntensity / 1000f;

            //Light.Brightness = 0.5f + ambientIntensity / 2;
            DebugHud.AdditionalText += "\nAmb: " + ambientIntensity.ToString("F1");

            //use outside of InvokeOnMain?
            if (yuvTexturesInited)
            {
                UpdateBackground(frame);
            }

            // required!
            frame.Dispose();
        }