public static void ReverseOrder(MatOfPoint2f points) { int count = points.rows(); for (int i = 0; i < count / 2; i++) { Vector2 vec2 = points.ReadVector2(i); int i2 = count - i - 1; points.WriteVector2(points.ReadVector2(i2), i); points.WriteVector2(vec2, i2); } }
void UpdateCirclePatternInProjectorImage() { // We will be raycasting from the projector camera. In the editor, the aspect can change depending on the main game view, so we have to ensure that it matches. _projectorCamera.targetTexture = _arTexture; // Use the circle pattern transform from last update frame, because it is more likely that it will match the detected reality. TrackingToolsHelper.UpdateWorldSpacePatternPoints(_circlePatternSize, _circlePatternToWorldPrevFrame, TrackingToolsHelper.PatternType.AsymmetricCircleGrid, _circlePatternBorderSizeUV, ref _circlePointsRenderedWorldMat); for (int p = 0; p < _circlePatternPointCount; p++) { Vector3 worldPoint = _circlePointsRenderedWorldMat.ReadVector3(p); Vector3 viewportPoint = _projectorCamera.WorldToViewportPoint(worldPoint); Vector2 imagePoint = new Vector2(viewportPoint.x * _cameraTexture.width, (1 - viewportPoint.y) * _cameraTexture.height); // Viewport space has zero at bottom-left, image space (opencv) has zero at top-left. So flip y. _circlePointsProjectorRenderImageMat.WriteVector2(imagePoint, p); } //TrackingToolsHelper.DrawFoundPattern( _camTexGrayUndistortMat, circlesPatternSize, _circlePointsProjectorRenderImageMat ); // Testing // Reset render target. _projectorCamera.targetTexture = null; }