コード例 #1
0
        public void OnDrawFrame(IGL10 gl)
        {
            // Clear screen to notify driver it should not load any pixels from previous frame.
            GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, 0);
            GLES20.GlViewport(0, 0, glSurfaceView.Width, glSurfaceView.Height);
            GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit);

            if (arSession == null)
            {
                return;
            }

            // Notify ARCore session that the view size changed so that the perspective matrix and the video background
            // can be properly adjusted
            // displayRotationHelper.UpdateSessionIfNeeded(arSession);

            try
            {
                // Obtain the current frame from ARSession. When the configuration is set to
                // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
                // camera framerate.
                Frame frame = arSession.Update();
                Google.AR.Core.Camera camera = frame.Camera;


                // Draw background.
                GLES20.GlViewport(0, 0, glSurfaceView.Width, glSurfaceView.Height);
                backgroundRenderer.Draw(frame);

                GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, fboId);
                GLES20.GlViewport(0, 0, targetResolution.Width, targetResolution.Height);
                GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit);
                backgroundRenderer.Draw(frame);
                GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, 0);
                GLES20.GlViewport(0, 0, glSurfaceView.Width, glSurfaceView.Height);
                GlUtil.CheckNoGLES2Error("Switch framebuffers.");

                // Handle taps. Handling only one tap per frame, as taps are usually low frequency
                // compared to frame rate.
                MotionEvent tap = null;
                queuedSingleTaps.TryDequeue(out tap);

                if (tap != null && camera.TrackingState == TrackingState.Tracking)
                {
                    foreach (var hit in frame.HitTest(tap))
                    {
                        var trackable = hit.Trackable;

                        // Check if any plane was hit, and if it was hit inside the plane polygon.
                        if (trackable is Plane && ((Plane)trackable).IsPoseInPolygon(hit.HitPose))
                        {
                            // Cap the number of objects created. This avoids overloading both the
                            // rendering system and ARCore.
                            if (anchors.Count >= 16)
                            {
                                anchors[0].Detach();
                                anchors.RemoveAt(0);
                            }
                            // Adding an Anchor tells ARCore that it should track this position in
                            // space.  This anchor is created on the Plane to place the 3d model
                            // in the correct position relative to both the world and to the plane
                            anchors.Add(hit.CreateAnchor());

                            // Hits are sorted by depth. Consider only closest hit on a plane.
                            break;
                        }
                    }
                }

                // If not tracking, don't draw 3d objects.
                if (camera.TrackingState == TrackingState.Paused)
                {
                    return;
                }

                // Get projection matrix.
                float[] projmtx = new float[16];
                camera.GetProjectionMatrix(projmtx, 0, 0.1f, 100.0f);

                // Get camera matrix and draw.
                float[] viewmtx = new float[16];
                camera.GetViewMatrix(viewmtx, 0);

                // Compute lighting from average intensity of the image.
                var lightIntensity = frame.LightEstimate.PixelIntensity;

                // Visualize tracked points.
                var pointCloud = frame.AcquirePointCloud();
                pointCloudRenderer.Update(pointCloud);

                // App is repsonsible for releasing point cloud resources after using it
                pointCloud.Release();

                var planes = new List <Plane>();
                foreach (var p in arSession.GetAllTrackables(Java.Lang.Class.FromType(typeof(Plane))))
                {
                    var plane = (Plane)p;
                    planes.Add(plane);
                }

                // Check if we detected at least one plane. If so, hide the loading message.
                if (loadingMessageSnackbar != null)
                {
                    foreach (var plane in planes)
                    {
                        if (plane.GetType() == Plane.Type.HorizontalUpwardFacing &&
                            plane.TrackingState == TrackingState.Tracking)
                        {
                            HideLoadingMessage();
                            break;
                        }
                    }
                }

                // Draw(frame, camera, projmtx, viewmtx, lightIntensity, planes);


                GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, fboId);
                GLES20.GlViewport(0, 0, targetResolution.Width, targetResolution.Height);
                // Restore the depth state for further drawing.
                GLES20.GlDepthMask(true);
                GLES20.GlEnable(GLES20.GlDepthTest);
                // Draw(frame, camera, projmtx, viewmtx, lightIntensity, planes);
                // DrawModels(projmtx, viewmtx, lightIntensity);


                if (doCaptureCameraFrame)
                {
                    var displayOrientedPose = camera.DisplayOrientedPose;
                    var pose = new VirtualStudio.Shared.DTOs.Tracking.Pose
                    {
                        Position    = new System.Numerics.Vector3(displayOrientedPose.Tx(), displayOrientedPose.Ty(), displayOrientedPose.Tz()),
                        Orientation = new System.Numerics.Vector4(displayOrientedPose.Qx(), displayOrientedPose.Qy(), displayOrientedPose.Qz(), displayOrientedPose.Qw()),
                        Projection  = new System.Numerics.Matrix4x4(
                            projmtx[0], projmtx[1], projmtx[2], projmtx[3],
                            projmtx[4], projmtx[5], projmtx[6], projmtx[7],
                            projmtx[8], projmtx[9], projmtx[10], projmtx[11],
                            projmtx[12], projmtx[13], projmtx[14], projmtx[15]
                            )
                    };
                    webRtcClient.SendMessage(pose.ToBinary());
                    counter = 0;

                    var textureBuffer = new TextureBufferImpl(targetResolution.Width, targetResolution.Height, VideoFrame.TextureBufferType.Rgb, renderTextureId, new Android.Graphics.Matrix(), null, null, null);
                    var i420Buffer    = yuvConverter.Convert(textureBuffer);
                    VideoFrameAvailable?.Invoke(this, i420Buffer);
                }
            }
            catch (System.Exception ex)
            {
                // Avoid crashing the application due to unhandled exceptions.
                Log.Error(TAG, "Exception on the OpenGL thread", ex);
            }
        }
コード例 #2
0
        public void OnSurfaceCreated(IGL10 gl, Javax.Microedition.Khronos.Egl.EGLConfig config)
        {
            GLES20.GlClearColor(0.9f, 0.1f, 0.1f, 1.0f);
            // GLES20.GlViewport(0, 0, glSurfaceView.Width, glSurfaceView.Height);

            textureSize = arSession.CameraConfig.TextureSize;
            arSession.SetDisplayGeometry(1, targetResolution.Width, targetResolution.Height);

            int[] glObjs = new int[1];
            GLES20.GlGenFramebuffers(1, glObjs, 0);
            fboId = glObjs[0];
            GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, fboId);
            GLES20.GlViewport(0, 0, targetResolution.Width, targetResolution.Height);
            GLES20.GlGenTextures(1, glObjs, 0);
            renderTextureId = glObjs[0];;
            GLES20.GlBindTexture(GLES20.GlTexture2d, renderTextureId);
            GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureWrapS, GLES20.GlClampToEdge);
            GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureWrapT, GLES20.GlClampToEdge);
            GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureMinFilter, GLES20.GlNearest);
            GLES20.GlTexParameteri(GLES20.GlTexture2d, GLES20.GlTextureMagFilter, GLES20.GlNearest);
            GLES20.GlTexImage2D(GLES20.GlTexture2d, 0, GLES20.GlRgba, targetResolution.Width, targetResolution.Height, 0, GLES20.GlRgba, GLES20.GlUnsignedByte, null);

            GLES20.GlBindTexture(GLES20.GlTexture2d, 0);
            GLES20.GlFramebufferTexture2D(GLES20.GlFramebuffer, GLES20.GlColorAttachment0, GLES20.GlTexture2d, renderTextureId, 0);
            GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, 0);

            GlUtil.CheckNoGLES2Error("Create render texture.");

            // Create the texture and pass it to ARCore session to be filled during update().
            backgroundRenderer.CreateOnGlThread(/*context=*/ this);
            if (arSession != null)
            {
                arSession.SetCameraTextureName(BackgroundRenderer.TextureId);
            }


            // Prepare the other rendering objects.
            try
            {
                virtualObject.CreateOnGlThread(/*context=*/ this, "andy.obj", "andy.png");
                virtualObject.setMaterialProperties(0.0f, 3.5f, 1.0f, 6.0f);

                virtualObjectShadow.CreateOnGlThread(/*context=*/ this,
                                                     "andy_shadow.obj", "andy_shadow.png");
                virtualObjectShadow.SetBlendMode(ObjectRenderer.BlendMode.Shadow);
                virtualObjectShadow.setMaterialProperties(1.0f, 0.0f, 0.0f, 1.0f);
            }
            catch (Java.IO.IOException e)
            {
                Log.Error(TAG, "Failed to read obj file");
            }

            try
            {
                planeRenderer.CreateOnGlThread(/*context=*/ this, "trigrid.png");
            }
            catch (Java.IO.IOException e)
            {
                Log.Error(TAG, "Failed to read plane texture");
            }
            pointCloudRenderer.CreateOnGlThread(/*context=*/ this);
        }