public void Draw(Frame frame) { if (frame.HasDisplayGeometryChanged)//.IsDisplayRotationChanged) { frame.TransformDisplayUvCoords(mQuadTexCoord, mQuadTexCoordTransformed); } GLES20.GlDisable(GLES20.GlDepthTest); GLES20.GlDepthMask(false); GLES20.GlBindTexture(GLES11Ext.GlTextureExternalOes, TextureId); GLES20.GlUseProgram(mQuadProgram); GLES20.GlVertexAttribPointer( mQuadPositionParam, COORDS_PER_VERTEX, GLES20.GlFloat, false, 0, mQuadVertices); GLES20.GlVertexAttribPointer(mQuadTexCoordParam, TEXCOORDS_PER_VERTEX, GLES20.GlFloat, false, 0, mQuadTexCoordTransformed); GLES20.GlEnableVertexAttribArray(mQuadPositionParam); GLES20.GlEnableVertexAttribArray(mQuadTexCoordParam); GLES20.GlDrawArrays(GLES20.GlTriangleStrip, 0, 4); // Disable vertex arrays GLES20.GlDisableVertexAttribArray(mQuadPositionParam); GLES20.GlDisableVertexAttribArray(mQuadTexCoordParam); // Restore the depth state for further drawing. GLES20.GlDepthMask(true); GLES20.GlEnable(GLES20.GlDepthTest); ShaderUtil.CheckGLError(TAG, "Draw"); }
/** * Draws the AR background image. The image will be drawn such that virtual content rendered * with the matrices provided by {@link Frame#getViewMatrix(float[], int)} and * {@link Session#getProjectionMatrix(float[], int, float, float)} will accurately follow * static physical objects. This must be called <b>before</b> drawing virtual content. * * @param frame The last {@code Frame} returned by {@link Session#update()}. */ public void Draw(Frame frame) { // If display rotation changed (also includes view size change), we need to re-query the uv // coordinates for the screen rect, as they may have changed as well. if (frame.HasDisplayGeometryChanged) { frame.TransformDisplayUvCoords(mQuadTexCoord, mQuadTexCoordTransformed); } // No need to test or write depth, the screen quad has arbitrary depth, and is expected // to be drawn first. GLES20.GlDisable(GLES20.GlDepthTest); GLES20.GlDepthMask(false); GLES20.GlBindTexture(mTextureTarget, TextureId); GLES20.GlTexParameteri(mTextureTarget, GLES20.GlTextureWrapS, GLES20.GlClampToEdge); GLES20.GlTexParameteri(mTextureTarget, GLES20.GlTextureWrapT, GLES20.GlClampToEdge); GLES20.GlTexParameteri(mTextureTarget, GLES20.GlTextureMinFilter, GLES20.GlNearest); GLES20.GlTexParameteri(mTextureTarget, GLES20.GlTextureMagFilter, GLES20.GlNearest); GLES20.GlUseProgram(mQuadProgram); // Set the vertex positions. GLES20.GlVertexAttribPointer( mQuadPositionParam, COORDS_PER_VERTEX, GLES20.GlFloat, false, 0, mQuadVertices); // Set the texture coordinates. GLES20.GlVertexAttribPointer(mQuadTexCoordParam, TEXCOORDS_PER_VERTEX, GLES20.GlFloat, false, 0, mQuadTexCoordTransformed); // Enable vertex arrays GLES20.GlEnableVertexAttribArray(mQuadPositionParam); GLES20.GlEnableVertexAttribArray(mQuadTexCoordParam); GLES20.GlDrawArrays(GLES20.GlTriangleStrip, 0, 4); // Disable vertex arrays GLES20.GlDisableVertexAttribArray(mQuadPositionParam); GLES20.GlDisableVertexAttribArray(mQuadTexCoordParam); // Restore the depth state for further drawing. GLES20.GlDepthMask(true); GLES20.GlEnable(GLES20.GlDepthTest); ShaderUtil.CheckGLError(TAG, "Draw"); }
private void DrawSortedPlans(List <ARPlane> sortedPlanes, float[] cameraViews, float[] cameraProjection) { ShaderUtil.CheckGlError(TAG, "Draw sorted plans start."); GLES20.GlDepthMask(false); GLES20.GlEnable(GLES20.GlBlend); GLES20.GlBlendFuncSeparate( GLES20.GlDstAlpha, GLES20.GlOne, GLES20.GlZero, GLES20.GlOneMinusSrcAlpha); GLES20.GlUseProgram(mProgram); GLES20.GlEnableVertexAttribArray(glPositionParameter); foreach (ARPlane plane in sortedPlanes) { float[] planeMatrix = new float[MATRIX_SIZE]; plane.CenterPose.ToMatrix(planeMatrix, 0); Array.Copy(planeMatrix, modelMatrix, MATRIX_SIZE); float scaleU = 1.0f / LABEL_WIDTH; // Set the value of the plane angle uv matrix. planeAngleUvMatrix[0] = scaleU; planeAngleUvMatrix[1] = 0.0f; planeAngleUvMatrix[2] = 0.0f; float scaleV = 1.0f / LABEL_HEIGHT; planeAngleUvMatrix[3] = scaleV; int idx = plane.Label.Ordinal(); Log.Debug(TAG, "Plane getLabel:" + idx); idx = Java.Lang.Math.Abs(idx); GLES20.GlActiveTexture(GLES20.GlTexture0 + idx); GLES20.GlBindTexture(GLES20.GlTexture2d, textures[idx]); GLES20.GlUniform1i(glTexture, idx); GLES20.GlUniformMatrix2fv(glPlaneUvMatrix, 1, false, planeAngleUvMatrix, 0); DrawLabel(cameraViews, cameraProjection); } GLES20.GlDisableVertexAttribArray(glPositionParameter); GLES20.GlBindTexture(GLES20.GlTexture2d, 0); GLES20.GlDisable(GLES20.GlBlend); GLES20.GlDepthMask(true); ShaderUtil.CheckGlError(TAG, "Draw sorted plans end."); }
/// <summary> /// Render each frame. /// This method is called when Android.Opengl.GLSurfaceView.IRenderer's OnDrawFrame. /// </summary> /// <param name="frame">ARFrame</param> public void OnDrawFrame(ARFrame frame) { ShaderUtil.CheckGlError(TAG, "On draw frame start."); if (frame == null) { return; } if (frame.HasDisplayGeometryChanged) { frame.TransformDisplayUvCoords(mTexBuffer, mTexTransformedBuffer); } Clear(); GLES20.GlDisable(GLES20.GlDepthTest); GLES20.GlDepthMask(false); GLES20.GlUseProgram(mProgram); // Set the texture ID. GLES20.GlBindTexture(GLES11Ext.GlTextureExternalOes, mExternalTextureId); // Set the projection matrix. GLES20.GlUniformMatrix4fv(mMatrix, 1, false, mProjectionMatrix, 0); GLES20.GlUniformMatrix4fv(mCoordMatrix, 1, false, coordMatrixs, 0); // Set the vertex. GLES20.GlEnableVertexAttribArray(mPosition); GLES20.GlVertexAttribPointer(mPosition, 2, GLES20.GlFloat, false, 0, mVerBuffer); // Set the texture coordinates. GLES20.GlEnableVertexAttribArray(mCoord); GLES20.GlVertexAttribPointer(mCoord, 2, GLES20.GlFloat, false, 0, mTexTransformedBuffer); // Number of vertices. GLES20.GlDrawArrays(GLES20.GlTriangleStrip, 0, 4); GLES20.GlDisableVertexAttribArray(mPosition); GLES20.GlDisableVertexAttribArray(mCoord); GLES20.GlDepthMask(true); GLES20.GlEnable(GLES20.GlDepthTest); ShaderUtil.CheckGlError(TAG, "On draw frame end."); }
/** * Draws the model. * * @param cameraView A 4x4 view matrix, in column-major order. * @param cameraPerspective A 4x4 projection matrix, in column-major order. * @param lightIntensity Illumination intensity. Combined with diffuse and specular material * properties. * @see #setBlendMode(BlendMode) * @see #updateModelMatrix(float[], float) * @see #setMaterialProperties(float, float, float, float) * @see android.opengl.Matrix */ public void Draw(float[] cameraView, float[] cameraPerspective, float lightIntensity) { ShaderUtil.CheckGLError(TAG, "Before draw"); // Build the ModelView and ModelViewProjection matrices // for calculating object position and light. Android.Opengl.Matrix.MultiplyMM(mModelViewMatrix, 0, cameraView, 0, mModelMatrix, 0); Android.Opengl.Matrix.MultiplyMM(mModelViewProjectionMatrix, 0, cameraPerspective, 0, mModelViewMatrix, 0); GLES20.GlUseProgram(mProgram); // Set the lighting environment properties. Android.Opengl.Matrix.MultiplyMV(mViewLightDirection, 0, mModelViewMatrix, 0, LIGHT_DIRECTION, 0); normalizeVec3(mViewLightDirection); GLES20.GlUniform4f(mLightingParametersUniform, mViewLightDirection[0], mViewLightDirection[1], mViewLightDirection[2], lightIntensity); // Set the object material properties. GLES20.GlUniform4f(mMaterialParametersUniform, mAmbient, mDiffuse, mSpecular, mSpecularPower); // Attach the object texture. GLES20.GlActiveTexture(GLES20.GlTexture0); GLES20.GlBindTexture(GLES20.GlTexture2d, mTextures[0]); GLES20.GlUniform1i(mTextureUniform, 0); // Set the vertex attributes. GLES20.GlBindBuffer(GLES20.GlArrayBuffer, mVertexBufferId); GLES20.GlVertexAttribPointer( mPositionAttribute, COORDS_PER_VERTEX, GLES20.GlFloat, false, 0, mVerticesBaseAddress); GLES20.GlVertexAttribPointer( mNormalAttribute, 3, GLES20.GlFloat, false, 0, mNormalsBaseAddress); GLES20.GlVertexAttribPointer( mTexCoordAttribute, 2, GLES20.GlFloat, false, 0, mTexCoordsBaseAddress); GLES20.GlBindBuffer(GLES20.GlArrayBuffer, 0); // Set the ModelViewProjection matrix in the shader. GLES20.GlUniformMatrix4fv( mModelViewUniform, 1, false, mModelViewMatrix, 0); GLES20.GlUniformMatrix4fv( mModelViewProjectionUniform, 1, false, mModelViewProjectionMatrix, 0); // Enable vertex arrays GLES20.GlEnableVertexAttribArray(mPositionAttribute); GLES20.GlEnableVertexAttribArray(mNormalAttribute); GLES20.GlEnableVertexAttribArray(mTexCoordAttribute); if (mBlendMode != BlendMode.Null) { GLES20.GlDepthMask(false); GLES20.GlEnable(GLES20.GlBlend); switch (mBlendMode) { case BlendMode.Shadow: // Multiplicative blending function for Shadow. GLES20.GlBlendFunc(GLES20.GlZero, GLES20.GlOneMinusSrcAlpha); break; case BlendMode.Grid: // Grid, additive blending function. GLES20.GlBlendFunc(GLES20.GlSrcAlpha, GLES20.GlOneMinusSrcAlpha); break; } } GLES20.GlBindBuffer(GLES20.GlElementArrayBuffer, mIndexBufferId); GLES20.GlDrawElements(GLES20.GlTriangles, mIndexCount, GLES20.GlUnsignedShort, 0); GLES20.GlBindBuffer(GLES20.GlElementArrayBuffer, 0); if (mBlendMode != BlendMode.Null) { GLES20.GlDisable(GLES20.GlBlend); GLES20.GlDepthMask(true); } // Disable vertex arrays GLES20.GlDisableVertexAttribArray(mPositionAttribute); GLES20.GlDisableVertexAttribArray(mNormalAttribute); GLES20.GlDisableVertexAttribArray(mTexCoordAttribute); GLES20.GlBindTexture(GLES20.GlTexture2d, 0); ShaderUtil.CheckGLError(TAG, "After draw"); }
/** * Draws the collection of tracked planes, with closer planes hiding more distant ones. * * @param allPlanes The collection of planes to draw. * @param cameraPose The pose of the camera, as returned by {@link Frame#getPose()} * @param cameraPerspective The projection matrix, as returned by * {@link Session#getProjectionMatrix(float[], int, float, float)} */ public void DrawPlanes(IEnumerable <Plane> allPlanes, Pose cameraPose, float[] cameraPerspective) { // Planes must be sorted by distance from camera so that we draw closer planes first, and // they occlude the farther planes. List <SortablePlane> sortedPlanes = new List <SortablePlane>(); float[] normal = new float[3]; float cameraX = cameraPose.Tx(); float cameraY = cameraPose.Ty(); float cameraZ = cameraPose.Tz(); foreach (var plane in allPlanes) { if (plane.GetType() != Plane.Type.HorizontalUpwardFacing || plane.GetTrackingState() != Plane.TrackingState.Tracking) { continue; } var center = plane.CenterPose; // Get transformed Y axis of plane's coordinate system. center.GetTransformedAxis(1, 1.0f, normal, 0); // Compute dot product of plane's normal with vector from camera to plane center. float distance = (cameraX - center.Tx()) * normal[0] + (cameraY - center.Ty()) * normal[1] + (cameraZ - center.Tz()) * normal[2]; if (distance < 0) { // Plane is back-facing. continue; } sortedPlanes.Add(new SortablePlane(distance, plane)); } sortedPlanes.Sort((x, y) => x.Distance.CompareTo(y.Distance)); var cameraView = new float[16]; cameraPose.Inverse().ToMatrix(cameraView, 0); // Planes are drawn with additive blending, masked by the alpha channel for occlusion. // Start by clearing the alpha channel of the color buffer to 1.0. GLES20.GlClearColor(1, 1, 1, 1); GLES20.GlColorMask(false, false, false, true); GLES20.GlClear(GLES20.GlColorBufferBit); GLES20.GlColorMask(true, true, true, true); // Disable depth write. GLES20.GlDepthMask(false); // Additive blending, masked by alpha chanel, clearing alpha channel. GLES20.GlEnable(GLES20.GlBlend); GLES20.GlBlendFuncSeparate( GLES20.GlDstAlpha, GLES20.GlOne, // RGB (src, dest) GLES20.GlZero, GLES20.GlOneMinusSrcAlpha); // ALPHA (src, dest) // Set up the shader. GLES20.GlUseProgram(mPlaneProgram); // Attach the texture. GLES20.GlActiveTexture(GLES20.GlTexture0); GLES20.GlBindTexture(GLES20.GlTexture2d, mTextures[0]); GLES20.GlUniform1i(mTextureUniform, 0); // Shared fragment uniforms. GLES20.GlUniform4fv(mGridControlUniform, 1, GRID_CONTROL, 0); // Enable vertex arrays GLES20.GlEnableVertexAttribArray(mPlaneXZPositionAlphaAttribute); ShaderUtil.CheckGLError(TAG, "Setting up to draw planes"); foreach (var sortedPlane in sortedPlanes) { var plane = sortedPlane.Plane; float[] planeMatrix = new float[16]; plane.CenterPose.ToMatrix(planeMatrix, 0); updatePlaneParameters(planeMatrix, plane.ExtentX, plane.ExtentZ, plane.PlanePolygon); // Get plane index. Keep a map to assign same indices to same planes. int planeIndex = -1; if (!mPlaneIndexMap.TryGetValue(plane, out planeIndex)) { planeIndex = Java.Lang.Integer.ValueOf(mPlaneIndexMap.Count).IntValue(); mPlaneIndexMap.Add(plane, planeIndex); } // Set plane color. Computed deterministically from the Plane index. int colorIndex = planeIndex % PLANE_COLORS_RGBA.Length; colorRgbaToFloat(mPlaneColor, PLANE_COLORS_RGBA[colorIndex]); GLES20.GlUniform4fv(mLineColorUniform, 1, mPlaneColor, 0); GLES20.GlUniform4fv(mDotColorUniform, 1, mPlaneColor, 0); // Each plane will have its own angle offset from others, to make them easier to // distinguish. Compute a 2x2 rotation matrix from the angle. float angleRadians = planeIndex * 0.144f; float uScale = DOTS_PER_METER; float vScale = DOTS_PER_METER * EQUILATERAL_TRIANGLE_SCALE; mPlaneAngleUvMatrix[0] = +(float)Math.Cos(angleRadians) * uScale; mPlaneAngleUvMatrix[1] = -(float)Math.Sin(angleRadians) * uScale; mPlaneAngleUvMatrix[2] = +(float)Math.Sin(angleRadians) * vScale; mPlaneAngleUvMatrix[3] = +(float)Math.Cos(angleRadians) * vScale; GLES20.GlUniformMatrix2fv(mPlaneUvMatrixUniform, 1, false, mPlaneAngleUvMatrix, 0); Draw(cameraView, cameraPerspective); } // Clean up the state we set GLES20.GlDisableVertexAttribArray(mPlaneXZPositionAlphaAttribute); GLES20.GlBindTexture(GLES20.GlTexture2d, 0); GLES20.GlDisable(GLES20.GlBlend); GLES20.GlDepthMask(true); ShaderUtil.CheckGLError(TAG, "Cleaning up after drawing planes"); }
public void OnDrawFrame(IGL10 gl) { // Clear screen to notify driver it should not load any pixels from previous frame. GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, 0); GLES20.GlViewport(0, 0, glSurfaceView.Width, glSurfaceView.Height); GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit); if (arSession == null) { return; } // Notify ARCore session that the view size changed so that the perspective matrix and the video background // can be properly adjusted // displayRotationHelper.UpdateSessionIfNeeded(arSession); try { // Obtain the current frame from ARSession. When the configuration is set to // UpdateMode.BLOCKING (it is by default), this will throttle the rendering to the // camera framerate. Frame frame = arSession.Update(); Google.AR.Core.Camera camera = frame.Camera; // Draw background. GLES20.GlViewport(0, 0, glSurfaceView.Width, glSurfaceView.Height); backgroundRenderer.Draw(frame); GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, fboId); GLES20.GlViewport(0, 0, targetResolution.Width, targetResolution.Height); GLES20.GlClear(GLES20.GlColorBufferBit | GLES20.GlDepthBufferBit); backgroundRenderer.Draw(frame); GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, 0); GLES20.GlViewport(0, 0, glSurfaceView.Width, glSurfaceView.Height); GlUtil.CheckNoGLES2Error("Switch framebuffers."); // Handle taps. Handling only one tap per frame, as taps are usually low frequency // compared to frame rate. MotionEvent tap = null; queuedSingleTaps.TryDequeue(out tap); if (tap != null && camera.TrackingState == TrackingState.Tracking) { foreach (var hit in frame.HitTest(tap)) { var trackable = hit.Trackable; // Check if any plane was hit, and if it was hit inside the plane polygon. if (trackable is Plane && ((Plane)trackable).IsPoseInPolygon(hit.HitPose)) { // Cap the number of objects created. This avoids overloading both the // rendering system and ARCore. if (anchors.Count >= 16) { anchors[0].Detach(); anchors.RemoveAt(0); } // Adding an Anchor tells ARCore that it should track this position in // space. This anchor is created on the Plane to place the 3d model // in the correct position relative to both the world and to the plane anchors.Add(hit.CreateAnchor()); // Hits are sorted by depth. Consider only closest hit on a plane. break; } } } // If not tracking, don't draw 3d objects. if (camera.TrackingState == TrackingState.Paused) { return; } // Get projection matrix. float[] projmtx = new float[16]; camera.GetProjectionMatrix(projmtx, 0, 0.1f, 100.0f); // Get camera matrix and draw. float[] viewmtx = new float[16]; camera.GetViewMatrix(viewmtx, 0); // Compute lighting from average intensity of the image. var lightIntensity = frame.LightEstimate.PixelIntensity; // Visualize tracked points. var pointCloud = frame.AcquirePointCloud(); pointCloudRenderer.Update(pointCloud); // App is repsonsible for releasing point cloud resources after using it pointCloud.Release(); var planes = new List <Plane>(); foreach (var p in arSession.GetAllTrackables(Java.Lang.Class.FromType(typeof(Plane)))) { var plane = (Plane)p; planes.Add(plane); } // Check if we detected at least one plane. If so, hide the loading message. if (loadingMessageSnackbar != null) { foreach (var plane in planes) { if (plane.GetType() == Plane.Type.HorizontalUpwardFacing && plane.TrackingState == TrackingState.Tracking) { HideLoadingMessage(); break; } } } // Draw(frame, camera, projmtx, viewmtx, lightIntensity, planes); GLES20.GlBindFramebuffer(GLES20.GlFramebuffer, fboId); GLES20.GlViewport(0, 0, targetResolution.Width, targetResolution.Height); // Restore the depth state for further drawing. GLES20.GlDepthMask(true); GLES20.GlEnable(GLES20.GlDepthTest); // Draw(frame, camera, projmtx, viewmtx, lightIntensity, planes); // DrawModels(projmtx, viewmtx, lightIntensity); if (doCaptureCameraFrame) { var displayOrientedPose = camera.DisplayOrientedPose; var pose = new VirtualStudio.Shared.DTOs.Tracking.Pose { Position = new System.Numerics.Vector3(displayOrientedPose.Tx(), displayOrientedPose.Ty(), displayOrientedPose.Tz()), Orientation = new System.Numerics.Vector4(displayOrientedPose.Qx(), displayOrientedPose.Qy(), displayOrientedPose.Qz(), displayOrientedPose.Qw()), Projection = new System.Numerics.Matrix4x4( projmtx[0], projmtx[1], projmtx[2], projmtx[3], projmtx[4], projmtx[5], projmtx[6], projmtx[7], projmtx[8], projmtx[9], projmtx[10], projmtx[11], projmtx[12], projmtx[13], projmtx[14], projmtx[15] ) }; webRtcClient.SendMessage(pose.ToBinary()); counter = 0; var textureBuffer = new TextureBufferImpl(targetResolution.Width, targetResolution.Height, VideoFrame.TextureBufferType.Rgb, renderTextureId, new Android.Graphics.Matrix(), null, null, null); var i420Buffer = yuvConverter.Convert(textureBuffer); VideoFrameAvailable?.Invoke(this, i420Buffer); } } catch (System.Exception ex) { // Avoid crashing the application due to unhandled exceptions. Log.Error(TAG, "Exception on the OpenGL thread", ex); } }