Пример #1
0
    private void OnConfigure(object o, ConfigureEventArgs args)
    {
        if (!MakeCurrent())
        {
            LogManager.Log(LogLevel.Warning, "MakeCurrent() - OnConfigure failed");
            return;
        }

        GlUtil.ContextValid = true;

        // setup opengl state and transform
        gl.Disable(gl.DEPTH_TEST);
        gl.Disable(gl.CULL_FACE);
        gl.Enable(gl.TEXTURE_2D);
        gl.Enable(gl.BLEND);
        gl.BlendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);

        gl.ClearColor(0f, 0f, 0f, 0f);
        gl.Viewport(0, 0, Allocation.Width, Allocation.Height);
        gl.MatrixMode(gl.PROJECTION);
        gl.LoadIdentity();
        gl.Ortho(0, Allocation.Width, Allocation.Height, 0,
                 -1.0f, 1.0f);
        gl.MatrixMode(gl.MODELVIEW);
        gl.LoadIdentity();

        GlUtil.Assert("After setting opengl transforms");

        GlUtil.ContextValid = false;
    }
Пример #2
0
        /// <summary>
        /// Initializes this <see cref="GlWorldRenderStrategy"/>.
        /// </summary>
        /// <param name="gameReference">
        /// A reference to the Junkbot game engine.
        /// </param>
        /// <returns>True if the initialization process was successful.</returns>
        public override bool Initialize(JunkbotGame gameReference)
        {
            Game = gameReference;

            ActorAtlas = GlUtil.LoadAtlas(Environment.CurrentDirectory + @"\Content\Atlas\actors-atlas");

            return(true);
        }
Пример #3
0
        protected BGlPanel()
        {
            InitializeComponent();

            if (!DesignModeUtil.InDesignMode)
            {
                GlUtil.Init();
                this.impl_.CreateGraphics();
                this.impl_.MakeCurrent();

                this.InitGl();

                this.timedCallback =
                    TimedCallback.WithFrequency(this.Invalidate, DEFAULT_FRAMERATE_);
            }
        }
Пример #4
0
 public void Init()
 {
     new Thread(
         () => {
         device = VeldridStartup.CreateGraphicsDevice(
             window,
             new GraphicsDeviceOptions {
             SyncToVerticalBlank = true
         },
             GraphicsBackend.OpenGL
             );
         Sdl2Native.SDL_GL_MakeCurrent(window.SdlWindowHandle, Sdl2Native.SDL_GL_CreateContext(window.SdlWindowHandle));
         Sdl2Native.SDL_GL_SetSwapInterval(1);
         PoolTouhou.Logger.Log($"Using : {device.BackendType}");
         GlUtil.CheckGlError();
         PoolTouhou.GameState = new LoadingMenuState();
         new Thread(UpdateLoop).Start();
         DrawLoop();
     }
         ).Start();
 }
Пример #5
0
 private void DrawLoop()
 {
     PoolTouhou.Logger.Log("开始渲染线程循环");
     try {
         long last = 0;
         while (window.Exists && running)
         {
             long   now   = Watch.ElapsedTicks;
             double delta = Stopwatch.Frequency / (double)(now - last);
             OpenGLNative.glClearColor(0, 1, 1, 0.5f);
             OpenGLNative.glClear(ClearBufferMask.ColorBufferBit);
             PoolTouhou.GameState.Draw(delta);
             Sdl2Native.SDL_GL_SwapWindow(window.SdlWindowHandle);
             GlUtil.CheckGlError();
             last = now;
         }
     } catch (Exception e) {
         running = false;
         PoolTouhou.Logger.Log(e.Message + Environment.NewLine + e.StackTrace);
     }
 }
Пример #6
0
    private void OnExposed(object o, ExposeEventArgs args)
    {
        if (!MakeCurrent())
        {
            LogManager.Log(LogLevel.Warning, "Make Current - OnExposed failed");
            return;
        }

        GlUtil.ContextValid = true;

        gl.MatrixMode(gl.MODELVIEW);
        gl.LoadIdentity();
        gl.Scalef(Zoom, Zoom, 1f);
        gl.Translatef(Translation.X, Translation.Y, 0f);

        DrawGl();
        GlUtil.Assert("After Drawing");

        SwapBuffers();

        GlUtil.ContextValid = false;
    }
Пример #7
0
        public void OnDrawFrame(IGL10 gl)
        {
            // Clear screen to notify driver it should not load any pixels from previous frame.
            GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, 0);
            GLES20.GlViewport(0, 0, glSurfaceView.Width, glSurfaceView.Height);
            GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit);

            if (arSession == null)
            {
                return;
            }

            // Notify ARCore session that the view size changed so that the perspective matrix and the video background
            // can be properly adjusted
            // displayRotationHelper.UpdateSessionIfNeeded(arSession);

            try
            {
                // Obtain the current frame from ARSession. When the configuration is set to
                // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
                // camera framerate.
                Frame frame = arSession.Update();
                Google.AR.Core.Camera camera = frame.Camera;


                // Draw background.
                GLES20.GlViewport(0, 0, glSurfaceView.Width, glSurfaceView.Height);
                backgroundRenderer.Draw(frame);

                GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, fboId);
                GLES20.GlViewport(0, 0, targetResolution.Width, targetResolution.Height);
                GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit);
                backgroundRenderer.Draw(frame);
                GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, 0);
                GLES20.GlViewport(0, 0, glSurfaceView.Width, glSurfaceView.Height);
                GlUtil.CheckNoGLES2Error("Switch framebuffers.");

                // Handle taps. Handling only one tap per frame, as taps are usually low frequency
                // compared to frame rate.
                MotionEvent tap = null;
                queuedSingleTaps.TryDequeue(out tap);

                if (tap != null && camera.TrackingState == TrackingState.Tracking)
                {
                    foreach (var hit in frame.HitTest(tap))
                    {
                        var trackable = hit.Trackable;

                        // Check if any plane was hit, and if it was hit inside the plane polygon.
                        if (trackable is Plane && ((Plane)trackable).IsPoseInPolygon(hit.HitPose))
                        {
                            // Cap the number of objects created. This avoids overloading both the
                            // rendering system and ARCore.
                            if (anchors.Count >= 16)
                            {
                                anchors[0].Detach();
                                anchors.RemoveAt(0);
                            }
                            // Adding an Anchor tells ARCore that it should track this position in
                            // space.  This anchor is created on the Plane to place the 3d model
                            // in the correct position relative to both the world and to the plane
                            anchors.Add(hit.CreateAnchor());

                            // Hits are sorted by depth. Consider only closest hit on a plane.
                            break;
                        }
                    }
                }

                // If not tracking, don't draw 3d objects.
                if (camera.TrackingState == TrackingState.Paused)
                {
                    return;
                }

                // Get projection matrix.
                float[] projmtx = new float[16];
                camera.GetProjectionMatrix(projmtx, 0, 0.1f, 100.0f);

                // Get camera matrix and draw.
                float[] viewmtx = new float[16];
                camera.GetViewMatrix(viewmtx, 0);

                // Compute lighting from average intensity of the image.
                var lightIntensity = frame.LightEstimate.PixelIntensity;

                // Visualize tracked points.
                var pointCloud = frame.AcquirePointCloud();
                pointCloudRenderer.Update(pointCloud);

                // App is repsonsible for releasing point cloud resources after using it
                pointCloud.Release();

                var planes = new List <Plane>();
                foreach (var p in arSession.GetAllTrackables(Java.Lang.Class.FromType(typeof(Plane))))
                {
                    var plane = (Plane)p;
                    planes.Add(plane);
                }

                // Check if we detected at least one plane. If so, hide the loading message.
                if (loadingMessageSnackbar != null)
                {
                    foreach (var plane in planes)
                    {
                        if (plane.GetType() == Plane.Type.HorizontalUpwardFacing &&
                            plane.TrackingState == TrackingState.Tracking)
                        {
                            HideLoadingMessage();
                            break;
                        }
                    }
                }

                // Draw(frame, camera, projmtx, viewmtx, lightIntensity, planes);


                GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, fboId);
                GLES20.GlViewport(0, 0, targetResolution.Width, targetResolution.Height);
                // Restore the depth state for further drawing.
                GLES20.GlDepthMask(true);
                GLES20.GlEnable(GLES20.GlDepthTest);
                // Draw(frame, camera, projmtx, viewmtx, lightIntensity, planes);
                // DrawModels(projmtx, viewmtx, lightIntensity);


                if (doCaptureCameraFrame)
                {
                    var displayOrientedPose = camera.DisplayOrientedPose;
                    var pose = new VirtualStudio.Shared.DTOs.Tracking.Pose
                    {
                        Position    = new System.Numerics.Vector3(displayOrientedPose.Tx(), displayOrientedPose.Ty(), displayOrientedPose.Tz()),
                        Orientation = new System.Numerics.Vector4(displayOrientedPose.Qx(), displayOrientedPose.Qy(), displayOrientedPose.Qz(), displayOrientedPose.Qw()),
                        Projection  = new System.Numerics.Matrix4x4(
                            projmtx[0], projmtx[1], projmtx[2], projmtx[3],
                            projmtx[4], projmtx[5], projmtx[6], projmtx[7],
                            projmtx[8], projmtx[9], projmtx[10], projmtx[11],
                            projmtx[12], projmtx[13], projmtx[14], projmtx[15]
                            )
                    };
                    webRtcClient.SendMessage(pose.ToBinary());
                    counter = 0;

                    var textureBuffer = new TextureBufferImpl(targetResolution.Width, targetResolution.Height, VideoFrame.TextureBufferType.Rgb, renderTextureId, new Android.Graphics.Matrix(), null, null, null);
                    var i420Buffer    = yuvConverter.Convert(textureBuffer);
                    VideoFrameAvailable?.Invoke(this, i420Buffer);
                }
            }
            catch (System.Exception ex)
            {
                // Avoid crashing the application due to unhandled exceptions.
                Log.Error(TAG, "Exception on the OpenGL thread", ex);
            }
        }
Пример #8
0
        public void OnSurfaceCreated(IGL10 gl, Javax.Microedition.Khronos.Egl.EGLConfig config)
        {
            GLES20.GlClearColor(0.9f, 0.1f, 0.1f, 1.0f);
            // GLES20.GlViewport(0, 0, glSurfaceView.Width, glSurfaceView.Height);

            textureSize = arSession.CameraConfig.TextureSize;
            arSession.SetDisplayGeometry(1, targetResolution.Width, targetResolution.Height);

            int[] glObjs = new int[1];
            GLES20.GlGenFramebuffers(1, glObjs, 0);
            fboId = glObjs[0];
            GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, fboId);
            GLES20.GlViewport(0, 0, targetResolution.Width, targetResolution.Height);
            GLES20.GlGenTextures(1, glObjs, 0);
            renderTextureId = glObjs[0];;
            GLES20.GlBindTexture(GLES20.GlTexture2d, renderTextureId);
            GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureWrapS, GLES20.GlClampToEdge);
            GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureWrapT, GLES20.GlClampToEdge);
            GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureMinFilter, GLES20.GlNearest);
            GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureMagFilter, GLES20.GlNearest);
            GLES20.GlTexImage2D(GLES20.GlTexture2d, 0, GLES20.GlRgba, targetResolution.Width, targetResolution.Height, 0, GLES20.GlRgba, GLES20.GlUnsignedByte, null);

            GLES20.GlBindTexture(GLES20.GlTexture2d, 0);
            GLES20.GlFramebufferTexture2D(GLES20.GlFramebuffer, GLES20.GlColorAttachment0, GLES20.GlTexture2d, renderTextureId, 0);
            GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, 0);

            GlUtil.CheckNoGLES2Error("Create render texture.");

            // Create the texture and pass it to ARCore session to be filled during update().
            backgroundRenderer.CreateOnGlThread(/*context=*/ this);
            if (arSession != null)
            {
                arSession.SetCameraTextureName(BackgroundRenderer.TextureId);
            }


            // Prepare the other rendering objects.
            try
            {
                virtualObject.CreateOnGlThread(/*context=*/ this, "andy.obj", "andy.png");
                virtualObject.setMaterialProperties(0.0f, 3.5f, 1.0f, 6.0f);

                virtualObjectShadow.CreateOnGlThread(/*context=*/ this,
                                                     "andy_shadow.obj", "andy_shadow.png");
                virtualObjectShadow.SetBlendMode(ObjectRenderer.BlendMode.Shadow);
                virtualObjectShadow.setMaterialProperties(1.0f, 0.0f, 0.0f, 1.0f);
            }
            catch (Java.IO.IOException e)
            {
                Log.Error(TAG, "Failed to read obj file");
            }

            try
            {
                planeRenderer.CreateOnGlThread(/*context=*/ this, "trigrid.png");
            }
            catch (Java.IO.IOException e)
            {
                Log.Error(TAG, "Failed to read plane texture");
            }
            pointCloudRenderer.CreateOnGlThread(/*context=*/ this);
        }