private void ImageSource_ImageAvailable(object sender, Image image)
        {
            try
            {
                var  buffer      = new TextureBufferImpl(width, height, VideoFrame.TextureBufferType.Oes, BackgroundRenderer.TextureId, new Android.Graphics.Matrix(), textureHelper.Handler, new YuvConverter(), null);
                long timestampNs = (DateTime.Now - startTime).Ticks * 100;
                var  i420buffer  = yuvConverter.Convert(buffer);
                var  videoFrame  = new VideoFrame(i420buffer, 0, timestampNs);

                //var frameBuffer = new ImageI420Buffer(image);
                //var videoFrame = new VideoFrame(frameBuffer, 0, timestampNs);
                capturerObserver.OnFrameCaptured(videoFrame);
                videoFrame.Release();
                image?.Close();
            }
            catch (Exception ex)
            {
            }
        }
Example #2
0
        public void OnDrawFrame(IGL10 gl)
        {
            // Clear screen to notify driver it should not load any pixels from previous frame.
            GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, 0);
            GLES20.GlViewport(0, 0, glSurfaceView.Width, glSurfaceView.Height);
            GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit);

            if (arSession == null)
            {
                return;
            }

            // Notify ARCore session that the view size changed so that the perspective matrix and the video background
            // can be properly adjusted
            // displayRotationHelper.UpdateSessionIfNeeded(arSession);

            try
            {
                // Obtain the current frame from ARSession. When the configuration is set to
                // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the
                // camera framerate.
                Frame frame = arSession.Update();
                Google.AR.Core.Camera camera = frame.Camera;


                // Draw background.
                GLES20.GlViewport(0, 0, glSurfaceView.Width, glSurfaceView.Height);
                backgroundRenderer.Draw(frame);

                GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, fboId);
                GLES20.GlViewport(0, 0, targetResolution.Width, targetResolution.Height);
                GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit);
                backgroundRenderer.Draw(frame);
                GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, 0);
                GLES20.GlViewport(0, 0, glSurfaceView.Width, glSurfaceView.Height);
                GlUtil.CheckNoGLES2Error("Switch framebuffers.");

                // Handle taps. Handling only one tap per frame, as taps are usually low frequency
                // compared to frame rate.
                MotionEvent tap = null;
                queuedSingleTaps.TryDequeue(out tap);

                if (tap != null && camera.TrackingState == TrackingState.Tracking)
                {
                    foreach (var hit in frame.HitTest(tap))
                    {
                        var trackable = hit.Trackable;

                        // Check if any plane was hit, and if it was hit inside the plane polygon.
                        if (trackable is Plane && ((Plane)trackable).IsPoseInPolygon(hit.HitPose))
                        {
                            // Cap the number of objects created. This avoids overloading both the
                            // rendering system and ARCore.
                            if (anchors.Count >= 16)
                            {
                                anchors[0].Detach();
                                anchors.RemoveAt(0);
                            }
                            // Adding an Anchor tells ARCore that it should track this position in
                            // space.  This anchor is created on the Plane to place the 3d model
                            // in the correct position relative to both the world and to the plane
                            anchors.Add(hit.CreateAnchor());

                            // Hits are sorted by depth. Consider only closest hit on a plane.
                            break;
                        }
                    }
                }

                // If not tracking, don't draw 3d objects.
                if (camera.TrackingState == TrackingState.Paused)
                {
                    return;
                }

                // Get projection matrix.
                float[] projmtx = new float[16];
                camera.GetProjectionMatrix(projmtx, 0, 0.1f, 100.0f);

                // Get camera matrix and draw.
                float[] viewmtx = new float[16];
                camera.GetViewMatrix(viewmtx, 0);

                // Compute lighting from average intensity of the image.
                var lightIntensity = frame.LightEstimate.PixelIntensity;

                // Visualize tracked points.
                var pointCloud = frame.AcquirePointCloud();
                pointCloudRenderer.Update(pointCloud);

                // App is repsonsible for releasing point cloud resources after using it
                pointCloud.Release();

                var planes = new List <Plane>();
                foreach (var p in arSession.GetAllTrackables(Java.Lang.Class.FromType(typeof(Plane))))
                {
                    var plane = (Plane)p;
                    planes.Add(plane);
                }

                // Check if we detected at least one plane. If so, hide the loading message.
                if (loadingMessageSnackbar != null)
                {
                    foreach (var plane in planes)
                    {
                        if (plane.GetType() == Plane.Type.HorizontalUpwardFacing &&
                            plane.TrackingState == TrackingState.Tracking)
                        {
                            HideLoadingMessage();
                            break;
                        }
                    }
                }

                // Draw(frame, camera, projmtx, viewmtx, lightIntensity, planes);


                GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, fboId);
                GLES20.GlViewport(0, 0, targetResolution.Width, targetResolution.Height);
                // Restore the depth state for further drawing.
                GLES20.GlDepthMask(true);
                GLES20.GlEnable(GLES20.GlDepthTest);
                // Draw(frame, camera, projmtx, viewmtx, lightIntensity, planes);
                // DrawModels(projmtx, viewmtx, lightIntensity);


                if (doCaptureCameraFrame)
                {
                    var displayOrientedPose = camera.DisplayOrientedPose;
                    var pose = new VirtualStudio.Shared.DTOs.Tracking.Pose
                    {
                        Position    = new System.Numerics.Vector3(displayOrientedPose.Tx(), displayOrientedPose.Ty(), displayOrientedPose.Tz()),
                        Orientation = new System.Numerics.Vector4(displayOrientedPose.Qx(), displayOrientedPose.Qy(), displayOrientedPose.Qz(), displayOrientedPose.Qw()),
                        Projection  = new System.Numerics.Matrix4x4(
                            projmtx[0], projmtx[1], projmtx[2], projmtx[3],
                            projmtx[4], projmtx[5], projmtx[6], projmtx[7],
                            projmtx[8], projmtx[9], projmtx[10], projmtx[11],
                            projmtx[12], projmtx[13], projmtx[14], projmtx[15]
                            )
                    };
                    webRtcClient.SendMessage(pose.ToBinary());
                    counter = 0;

                    var textureBuffer = new TextureBufferImpl(targetResolution.Width, targetResolution.Height, VideoFrame.TextureBufferType.Rgb, renderTextureId, new Android.Graphics.Matrix(), null, null, null);
                    var i420Buffer    = yuvConverter.Convert(textureBuffer);
                    VideoFrameAvailable?.Invoke(this, i420Buffer);
                }
            }
            catch (System.Exception ex)
            {
                // Avoid crashing the application due to unhandled exceptions.
                Log.Error(TAG, "Exception on the OpenGL thread", ex);
            }
        }