public void CollinearTest() { { Point3 p1 = new Point3(0, 0, 0); Point3 p2 = new Point3(0, 0, 1); Point3 p3 = new Point3(0, 0, 2); bool expected = true; bool actual = Point3.Collinear(p1, p2, p3); Assert.AreEqual(expected, actual); } { Point3 p1 = new Point3(1, 0, 0); Point3 p2 = new Point3(0, 2, 1); Point3 p3 = new Point3(0, 0, 2); bool expected = false; bool actual = Point3.Collinear(p1, p2, p3); Assert.AreEqual(expected, actual); } { Point3 p1 = new Point3(134, 268, 402); Point3 p2 = new Point3(329, 658, 98); Point3 p3 = new Point3(125, 250, 375); bool expected = false; bool actual = Point3.Collinear(p1, p2, p3); Assert.AreEqual(expected, actual); } }
/// <summary> /// Projects point regarding camera. /// </summary> /// <param name="point">Point to project.</param> /// <param name="camera">Camera position (default is (0,0,0))</param> /// <returns>Projected point</returns> public static PointF Project(this Point3 point, Point3 camera = default(Point3)) { var F = point.Z - camera.Z; return new PointF { X = ((point.X - camera.X) * (F / point.Z)) + camera.X, Y = ((point.Y - camera.Y) * (F / point.Z)) + camera.Y }; }
public void FromPointsTest() { Point3 point1 = new Point3(0, 1, -7); Point3 point2 = new Point3(3, 1, -9); Point3 point3 = new Point3(0, -5, -8); Plane actual = Plane.FromPoints(point1, point2, point3); Vector3 expected = new Vector3(-12, 3, -18); Assert.AreEqual(expected, actual.Normal); }
public void FromPointsTest2() { Point3 point1 = new Point3(1, 2, -2); Point3 point2 = new Point3(3, -2, 1); Point3 point3 = new Point3(5, 1, -4); Plane expected = new Plane(11, 16, 14, -15); Plane actual = Plane.FromPoints(point1, point2, point3); Assert.AreEqual(expected, actual); }
public static void Translate(MyPlane last, out MyPlane next, float Dist) { Accord.Math.Point3 new_normal = last.planeEquation.Normal; float new_offset = last.planeEquation.Offset - Dist; Plane new_p = new Plane(new_normal, new_offset); MyVector3 new_center = new MyVector3(); List <Accord.Math.Point3> new_points = new List <Accord.Math.Point3>(); List <MyVector3> new_vertices = new List <MyVector3>(); next = new MyPlane(new_p, new_center, new_points, new_vertices); }
public override void readFromXml(XmlNode xmlNode) { String parameters = xmlNode.InnerText; String[] parts = parameters.Split(','); List<Point3> points = new List<Point3>(); if (parts.Length % 3 == 0) { for (int i = 0; i < parts.Length / 3; i++) { Point3 p = new Point3(float.Parse(parts[3 * i].Trim()), float.Parse(parts[3 * i + 1].Trim()), float.Parse(parts[3 * i + 2].Trim())); points.Add(p); } } boundingPolygon = points; }
public override void readFromXml(XmlNode xmlNode) { XmlNode centerNode = xmlNode.SelectSingleNode(CENTER); String[] parts = centerNode.InnerText.Split(','); if (parts.Length == 3) { center = new Point3(float.Parse(parts[0]), float.Parse(parts[1]), float.Parse(parts[2])); } XmlNode quaternionsNode = xmlNode.SelectSingleNode(QUATERNIONS); parts = quaternionsNode.InnerText.Split(','); if (parts.Length == 4) { quaternion = new Quaternions(float.Parse(parts[0]), float.Parse(parts[1]), float.Parse(parts[2]), float.Parse(parts[3])); } }
/// <summary> /// Matches two sets of points using RANSAC. /// </summary> /// /// <returns>The fundamental matrix relating x1 and x2.</returns> /// public Plane Estimate(Point3[] points) { // Initial argument checks if (points.Length < 3) throw new ArgumentException("At least three points are required to fit a plane"); this.d2 = new double[points.Length]; this.points = points; // Compute RANSAC and find the inlier points ransac.Compute(points.Length, out inliers); if (inliers.Length == 0) return null; // Compute the final plane Plane plane = fitting(points.Submatrix(inliers)); return plane; }
public override LocationMark3D getScaledLocationMark(float scale, Point3 translation) { var scaledBoundingPolygons = boundingPolygons.Select(boundingPolygon => boundingPolygon.scaleBound(scale, translation)).ToList(); return new GlyphBoxLocationMark3D(frameNo, this.glyphSize, scaledBoundingPolygons, faces); }
public static System.Numerics.Vector3 ToSystemVector3(this Point3 v) { return(new System.Numerics.Vector3(v.X, v.Y, v.Z)); }
/// <summary> /// /// </summary> /// <param name="depthPixel">depthPoint.X = pixel in X; depthPoint.Y = pixel in Y; depthPoint.Z in milimeter</param> /// <returns></returns> public static Point3 projectDepthPixelToCameraSpacePoint( Point3 depthPixel ) { float z = depthPixel.Z / 1000; float x = (depthPixel.X - 252.7343f) * z / 367.187f; float y = (depthPixel.Y - 203.6235f) * z / -369f; return new Point3(x, y, z); }
public void RansacPlaneConstructorTest() { Accord.Math.Tools.SetupGenerator(0); { Point3[] points = new Point3[300]; int c = 0; for (int i = 0; i < points.Length / 3; i++) { points[c++] = new Point3((float)i, (float)0, (float)0); points[c++] = new Point3((float)0, (float)i, (float)0); points[c++] = new Point3((float)i, (float)i, (float)0); } RansacPlane target = new RansacPlane(0.80, 0.9); Plane expected = Plane.FromPoints(points[3], points[4], points[5]); Plane actual = target.Estimate(points); Assert.AreEqual(actual.A, 0); Assert.AreEqual(actual.B, 0); Assert.AreEqual(actual.C, -1); Assert.AreEqual(actual.Offset, 0); Assert.IsTrue(expected.Equals(actual, 1e-3)); } { Point3[] points = new Point3[300]; int c = 0; for (int i = 0; i < points.Length / 3; i++) { points[c++] = new Point3((float)i, (float)0, (float)50); points[c++] = new Point3((float)0, (float)i, (float)50); points[c++] = new Point3((float)i, (float)i, (float)50); } RansacPlane target = new RansacPlane(0.80, 0.9); Plane expected = Plane.FromPoints(points[6], points[7], points[8]); expected.Normalize(); Plane actual = target.Estimate(points); Assert.AreEqual(actual.A, 0); Assert.AreEqual(actual.B, 0, 1e-5); Assert.AreEqual(actual.C, -1, 1e-5); Assert.AreEqual(actual.Offset, 50, 1e-4); Assert.IsTrue(expected.Equals(actual, 1e-3)); } { Point3[] points = new Point3[10 * 10]; int c = 0; for (int i = 0; i < 10; i++) { for (int j = 0; j < 10; j++) { double x = i + 5 * Accord.Math.Tools.Random.NextDouble(); double y = j + 5 * Accord.Math.Tools.Random.NextDouble(); double z = x + y - 1; points[c++] = new Point3((float)x, (float)z, (float)y); } } RansacPlane target = new RansacPlane(0.80, 0.9); Plane actual = target.Estimate(points); var normal = actual.Normal / actual.Normal.Max; Assert.AreEqual(normal.X, +1, 1e-5); Assert.AreEqual(normal.Y, -1, 1e-5); Assert.AreEqual(normal.Z, +1, 1e-5); } }
private static Point3 getLinearInterpolateOnBoundary(int colorWidth, int colorHeight, Point3[,] tempo, int x, int y, int size) { float X = 0.0f, Y = 0.0f, Z = 0.0f; var no_neighbors = 0; // size-cell boundary for (int i = -size; i <= size; i++) for (int j = -size; j <= size; j++) if (i == -size || i == size || j == size || j == -size) { if (x + i >= 0 && x + i < colorWidth && y + j >= 0 && y + j < colorHeight) { if (!tempo[x + i, y + j].Equals(initiate)) { X += tempo[x + i, y + j].X; Y += tempo[x + i, y + j].Y; Z += tempo[x + i, y + j].Z; no_neighbors += 1; } } } if (no_neighbors != 0) { return new Point3 { X = X / no_neighbors, Y = Y / no_neighbors, Z = Z / no_neighbors }; } return initiate; }
public Point3[,] projectDepthImageToColor(ushort[] depthImage, int depthWidth, int depthHeight, int colorWidth, int colorHeight) { Point3[,] result = new Point3[colorWidth, colorHeight]; for (int x = 0; x < colorWidth; x++) for (int y = 0; y < colorHeight; y++) { result[x, y] = initiate; } for (int x = 0; x < depthWidth; x++) for (int y = 0; y < depthHeight; y++) { ushort depth = depthImage[y * depthWidth + x]; // Project this depth pixel coordinate to camera space point Point3 depthPixel = new Point3 { X = x, Y = y, Z = depth }; Point3 cameraSpacePoint = KinectUtils.projectDepthPixelToCameraSpacePoint(depthPixel); System.Drawing.PointF colorPixelPoint = projectedPoint(depthPixel); int color_X = (int)colorPixelPoint.X; int color_Y = (int)colorPixelPoint.Y; if (color_X >= 0 && color_X < colorWidth && color_Y >= 0 && color_Y < colorHeight) { if (!result[color_X, color_Y].Equals(initiate)) { // Replace the current one with the one closer to the camera if (result[color_X, color_Y].Z > cameraSpacePoint.Z) { result[color_X, color_Y] = cameraSpacePoint; } } else { result[color_X, color_Y] = cameraSpacePoint; } } } return result; }
public CubeLocationMark(int frameNo, Point3 center, Quaternions quaternion) : base(frameNo) { this.center = center; this.quaternion = quaternion; }
/// <summary> /// Determines whether the specified <see cref="Point3"/> is equal to this instance. /// </summary> /// /// <param name="other">The <see cref="Point3"/> to compare with this instance.</param> /// <param name="tolerance">The acceptance tolerance threshold to consider the instances equal.</param> /// /// <returns> /// <c>true</c> if the specified <see cref="System.Object"/> is equal to this instance; otherwise, <c>false</c>. /// </returns> /// public bool Equals(Point3 other, double tolerance) { return((Math.Abs(coordinates.X - other.coordinates.X) < tolerance) && (Math.Abs(coordinates.Y - other.coordinates.Y) < tolerance) && (Math.Abs(coordinates.Z - other.coordinates.Z) < tolerance)); }
/// <summary> /// Determines whether the specified <see cref="Point3"/> is equal to this instance. /// </summary> /// /// <param name="other">The <see cref="Point3"/> to compare with this instance.</param> /// /// <returns> /// <c>true</c> if the specified <see cref="Plane"/> is equal to this instance; otherwise, <c>false</c>. /// </returns> /// public bool Equals(Point3 other) { return(coordinates == other.coordinates); }
public static UnityEngine.Vector3 ToUnityVector3(this Point3 v) { return(new UnityEngine.Vector3(v.X, v.Y, v.Z)); }
private Point3 getCameraSpacePoint(Point3[,] colorSpaceToCameraSpacePoint, PointF p) { int x = (int)p.X; int y = (int)p.Y; Point3 cameraSpacePoint = nullCameraSpacePoint; foreach (var i in new int[] { 0, -1, 1, -2, 2 }) foreach (var j in new int[] { 0, -1, 1, -2, 2 }) { if (x + i >= 0 && x + i < session.getVideo(videoFile).frameWidth && y + j >= 0 && y + j < session.getVideo(videoFile).frameHeight) { cameraSpacePoint = colorSpaceToCameraSpacePoint[x + i, y + j]; if (!cameraSpacePoint.Equals(nullCameraSpacePoint)) { return cameraSpacePoint; } } } return cameraSpacePoint; }
/// <summary> /// /// </summary> /// <param name="depthReader"></param> /// <param name="mappingHelper"></param> /// <returns> If the object is 3d-generated </returns> internal virtual bool generate3d(BaseDepthReader depthReader, DepthCoordinateMappingReader mappingHelper) { if (depthReader == null) { Console.WriteLine("depthReader is null "); return false; } if (mappingHelper == null) { Console.WriteLine("mappingHelper is null "); return false; } switch (this.genType) { case GenType.MANUAL: var voxMLReader = VoxMLReader.getDefaultVoxMLReader(); var voxMLType = voxMLReader.getVoxMLType(this.semanticType); string inform = ""; if (voxMLType.HasValue) { inform = "Object 3d will be generated based on VoxML semantic type " + voxMLType.Value.pred; } else { inform = "There is no corresponding VoxML semantic type. The boundary of objects will be projected directly onto 3d. Do you want to continue?"; } var result = System.Windows.Forms.MessageBox.Show(inform, "Generate 3D", System.Windows.Forms.MessageBoxButtons.YesNo); if (result == System.Windows.Forms.DialogResult.Yes) { foreach (var entry in objectMarks) { int frameNo = entry.Key; // Mapping depth image // At this point we use video frameNo // It's actually just an approximation for the depth frameNo Point3[,] colorSpaceToCameraSpacePoint = mappingHelper.projectDepthImageToColor(depthReader.readFrame(frameNo), depthReader.getWidth(), depthReader.getHeight(), session.getVideo(videoFile).frameWidth, session.getVideo(videoFile).frameHeight); LocationMark objectMark = entry.Value; List<PointF> boundary = new List<PointF>(); List<Point3> boundary3d = new List<Point3>(); if (this is RectangleObject) { var boundingBox = ((RectangleLocationMark)objectMark).boundingBox; boundary.Add(new PointF(boundingBox.X, boundingBox.Y)); boundary.Add(new PointF(boundingBox.X + boundingBox.Width, boundingBox.Y)); boundary.Add(new PointF(boundingBox.X, boundingBox.Y + boundingBox.Height)); boundary.Add(new PointF(boundingBox.X + boundingBox.Width, boundingBox.Y + boundingBox.Height)); } else if (this is PolygonObject) { boundary.AddRange(((PolygonLocationMark2D)objectMark).boundingPolygon); } // Using flat information if possible if (voxMLType.HasValue && voxMLType.Value.concavity == "Flat") { // First algorithm : has completed // Using the majority of points to infer the exact locations // of corner points // Divide the diagonal between any two corners // into noOfInner + 1 sub-segments // int noOfInner = 2; // Create a list of inner points for the surface // by linear combination of corners //List<PointF> innerPoints = new List<PointF>(); //for (int i = 0; i < boundary.Count; i++) // for (int j = i + 1; j < boundary.Count; j++) { // var start = boundary[i]; // var end = boundary[j]; // for ( int k = 0; k < noOfInner; k ++ ) // { // var p = new PointF((start.X * (k + 1) + end.X * (noOfInner - k)) / (noOfInner + 1), // (start.Y * (k + 1) + end.Y * (noOfInner - k)) / (noOfInner + 1)); // innerPoints.Add(p); // } // } //// Add the original corner points //innerPoints.AddRange(boundary); int tryDivide = 10; // Second algorithm, work only if one corner point is retrievable PointF anchorCorner = new PointF(); Point3 anchorPoint = nullCameraSpacePoint; foreach (PointF p in boundary) { Point3 cameraSpacePoint = getCameraSpacePoint(colorSpaceToCameraSpacePoint, p); if (cameraSpacePoint != null && !cameraSpacePoint.Equals(nullCameraSpacePoint)) { anchorCorner = p; anchorPoint = cameraSpacePoint; break; } } if (!anchorPoint.Equals(nullCameraSpacePoint)) { foreach (PointF corner in boundary) { Point3 cameraSpacePoint = getCameraSpacePoint(colorSpaceToCameraSpacePoint, corner); // If point p is out of the depth view if (cameraSpacePoint.Equals(nullCameraSpacePoint)) { var start = anchorCorner; var end = corner; var added = false; // For each value of k, try to get the point // between anchorCorner and corner // that divide the segment into 1 andk k - 1 for (int k = 2; k < tryDivide; k++) { var p = new PointF((start.X + end.X * (k - 1)) / k, (start.Y + end.Y * (k - 1)) / k); Point3 middleCameraSpacePoint = getCameraSpacePoint(colorSpaceToCameraSpacePoint, p); if (middleCameraSpacePoint != null && !middleCameraSpacePoint.Equals(nullCameraSpacePoint)) { var inferred_p = new Point3() { X = anchorPoint.X + (middleCameraSpacePoint.X - anchorPoint.X) * k, Y = anchorPoint.Y + (middleCameraSpacePoint.Y - anchorPoint.Y) * k, Z = anchorPoint.Z + (middleCameraSpacePoint.Z - anchorPoint.Z) * k, }; boundary3d.Add(inferred_p); added = true; break; } } // If that doesn't work if (!added) { boundary3d.Add(nullCameraSpacePoint); } } else { boundary3d.Add(cameraSpacePoint); } } } } else { // Just mapping to 3d points foreach (PointF p in boundary) { Point3 cameraSpacePoint = getCameraSpacePoint(colorSpaceToCameraSpacePoint, p); if (cameraSpacePoint != null && !cameraSpacePoint.Equals(nullCameraSpacePoint)) { boundary3d.Add(cameraSpacePoint); } else { boundary3d.Add(nullCameraSpacePoint); } } } set3DBounding(frameNo, new PolygonLocationMark3D(frameNo, boundary3d)); } } break; default: return false; } this.objectType = ObjectType._3D; return true; }
static Plane fitting(Point3[] points) { // Set up constraint equations of the form AB = 0, // where B is a column vector of the plane coefficients // in the form b(1)*X + b(2)*Y +b(3)*Z + b(4) = 0. // // A = [XYZ' ones(npts,1)]; % Build constraint matrix if (points.Length < 3) return null; if (points.Length == 3) return Plane.FromPoints(points[0], points[1], points[2]); float[,] A = new float[points.Length, 4]; for (int i = 0; i < points.Length; i++) { A[i, 0] = points[i].X; A[i, 1] = points[i].Y; A[i, 2] = points[i].Z; A[i, 3] = -1; } SingularValueDecompositionF svd = new SingularValueDecompositionF(A, computeLeftSingularVectors: false, computeRightSingularVectors: true, autoTranspose: true, inPlace: true); float[,] v = svd.RightSingularVectors; float a = v[0, 3]; float b = v[1, 3]; float c = v[2, 3]; float d = v[3, 3]; float norm = (float)Math.Sqrt(a * a + b * b + c * c); a /= norm; b /= norm; c /= norm; d /= norm; return new Plane(a, b, c, -d); }
public double DistanceToPoint(MyVector3 a) { Accord.Math.Point3 point = new Accord.Math.Point3((float)a.x, (float)a.y, (float)a.z); return(this.planeEquation.DistanceToPoint(point)); }
/// <summary> /// Map from a depth point to a color point /// </summary> /// <param name="p"> /// p.x = depth pixel on x axis, /// p.y = depth pixel on y axis, /// p.z = depth in meter /// </param> /// <returns>Color space point in pixels</returns> public System.Drawing.PointF projectedPoint(Point3 p) { int x = (int)p.X; int y = (int)p.Y; if (x < 0 || x >= 512) return new System.Drawing.PointF(); if (y < 0 || y >= 424) return new System.Drawing.PointF(); var shortP = shortRangeMap[x, y]; var longP = longRangeMap[x, y]; float shortP_X = shortP.X; float longP_X = longP.X; var b = (longP_X * longRange - shortP_X * shortRange) / (longRange - shortRange); var a = (shortP_X - b) * shortRange; var p_x = a / p.Z + b; float shortP_Y = shortP.Y; float longP_Y = longP.Y; b = (longP_Y * longRange - shortP_Y * shortRange) / (longRange - shortRange); a = (shortP_Y - b) * shortRange; var p_y = a / p.Z + b; return new System.Drawing.PointF(p_x, p_y); }
/// <summary> /// Determines whether the specified <see cref="Point3"/> is equal to this instance. /// </summary> /// /// <param name="other">The <see cref="Point3"/> to compare with this instance.</param> /// <param name="tolerance">The acceptance tolerance threshold to consider the instances equal.</param> /// /// <returns> /// <c>true</c> if the specified <see cref="System.Object"/> is equal to this instance; otherwise, <c>false</c>. /// </returns> /// public bool Equals(Point3 other, double tolerance) { return (Math.Abs(coordinates.X - other.coordinates.X) < tolerance) && (Math.Abs(coordinates.Y - other.coordinates.Y) < tolerance) && (Math.Abs(coordinates.Z - other.coordinates.Z) < tolerance); }
public void projectDepthImageToCameraSpacePoint(ushort[] depthImage, int depthWidth, int depthHeight, int colorWidth, int colorHeight, CameraSpacePoint[] result) { // For debug purpose Bitmap bm = null; byte[] depthValuesToByte = null; BitmapData bmapdata = null; IntPtr? ptr = null; if (DEBUG) { bm = new Bitmap(colorWidth, colorHeight, PixelFormat.Format32bppRgb); bmapdata = bm.LockBits( new Rectangle(0, 0, colorWidth, colorHeight), ImageLockMode.WriteOnly, bm.PixelFormat); ptr = bmapdata.Scan0; depthValuesToByte = new byte[colorWidth * colorHeight * 4]; for (int i = 0; i < colorWidth * colorHeight; i++) { depthValuesToByte[4 * i] = depthValuesToByte[4 * i + 1] = depthValuesToByte[4 * i + 2] = 0; } } Point3[,] tempo = new Point3[colorWidth, colorHeight]; for (int x = 0; x < colorWidth; x++) for (int y = 0; y < colorHeight; y++) { tempo[x, y] = initiate; } int xsmallest = colorWidth, ysmallest = colorHeight, xlargest = -1, ylargest = -1; for (int x = 0; x < depthWidth; x++) { for (int y = 0; y < depthHeight; y++) { ushort depth = depthImage[y * depthWidth + x]; // Project this depth pixel coordinate to camera space point Point3 depthPixel = new Point3 { X = x, Y = y, Z = depth }; Point3 cameraSpacePoint = KinectUtils.projectDepthPixelToCameraSpacePoint(depthPixel); System.Drawing.PointF colorPixelPoint = projectedPoint(depthPixel); int color_X = (int)colorPixelPoint.X; int color_Y = (int)colorPixelPoint.Y; if (color_X >= 0 && color_X < colorWidth && color_Y >= 0 && color_Y < colorHeight) { // Replace smallest and largest X and Y if (color_X < xsmallest) { //Console.WriteLine("XSmallest; depth = " + depth + " x = " + x + " y = " + y + " cameraSpacePoint = " + cameraSpacePoint.ToSString() + " colorPixelPoint = " + colorPixelPoint); xsmallest = color_X; } if (color_X > xlargest) { //Console.WriteLine("XLargest; depth = " + depth + " x = " + x + " y = " + y + " cameraSpacePoint = " + cameraSpacePoint.ToSString() + " colorPixelPoint = " + colorPixelPoint); xlargest = color_X; } if (color_Y < ysmallest) { //Console.WriteLine("YSmallest; depth = " + depth + " x = " + x + " y = " + y + " cameraSpacePoint = " + cameraSpacePoint.ToSString() + " colorPixelPoint = " + colorPixelPoint); ysmallest = color_Y; } if (color_Y > ylargest) { //Console.WriteLine("YLargest; depth = " + depth + " x = " + x + " y = " + y + " cameraSpacePoint = " + cameraSpacePoint.ToSString() + " colorPixelPoint = " + colorPixelPoint); ylargest = color_Y; } if (!tempo[color_X, color_Y].Equals(initiate)) { // Replace the current one with the one closer to the camera if (tempo[color_X, color_Y].Z > cameraSpacePoint.Z) { tempo[color_X, color_Y] = cameraSpacePoint; } } else { tempo[color_X, color_Y] = cameraSpacePoint; } } } } for (int x = 0; x < colorWidth; x++) for (int y = 0; y < colorHeight; y++) { result[x * colorHeight + y] = new CameraSpacePoint { X = -1, Y = -1, Z = -1 }; } //Console.WriteLine("xsmallest " + xsmallest); //Console.WriteLine("xlargest " + xlargest); //Console.WriteLine("ysmallest " + ysmallest); //Console.WriteLine("ylargest " + ylargest); for (int x = xsmallest; x < xlargest; x++) { for (int y = ysmallest; y < ylargest; y++) { var index = y * colorWidth + x; // If tempo[x,y] has value if (!tempo[x, y].Equals(initiate)) { result[index] = new CameraSpacePoint { X = tempo[x, y].X, Y = tempo[x, y].Y, Z = tempo[x, y].Z }; //writetext.Write("1"); if (DEBUG) depthValuesToByte[4 * index] = depthValuesToByte[4 * index + 1] = depthValuesToByte[4 * index + 2] = 255; } else { //writetext.Write("0"); if (DEBUG) depthValuesToByte[4 * index] = depthValuesToByte[4 * index + 1] = depthValuesToByte[4 * index + 2] = 0; } } //writetext.WriteLine(); } if (DEBUG) { Marshal.Copy(depthValuesToByte, 0, ptr.Value, colorWidth * colorHeight * 4); bm.UnlockBits(bmapdata); bm.Save("0.png"); bmapdata = bm.LockBits( new Rectangle(0, 0, colorWidth, colorHeight), ImageLockMode.WriteOnly, bm.PixelFormat); } // Linear interpolate for (int x = xsmallest; x < xlargest; x++) for (int y = ysmallest; y < ylargest; y++) { var index = y * colorWidth + x; // If tempo[x,y] has value if (!tempo[x, y].Equals(initiate)) { result[index] = new CameraSpacePoint { X = tempo[x, y].X, Y = tempo[x, y].Y, Z = tempo[x, y].Z }; //writetext.Write("1"); if (DEBUG) depthValuesToByte[4 * index] = depthValuesToByte[4 * index + 1] = depthValuesToByte[4 * index + 2] = 255; continue; } Point3 interpolatedOnBoundary = initiate; for (int boundarySize = 1; boundarySize <= 2; boundarySize++) { interpolatedOnBoundary = getLinearInterpolateOnBoundary(colorWidth, colorHeight, tempo, x, y, boundarySize); if (!interpolatedOnBoundary.Equals(initiate)) { result[index] = new CameraSpacePoint { X = interpolatedOnBoundary.X, Y = interpolatedOnBoundary.Y, Z = interpolatedOnBoundary.Z }; //writetext.Write("1"); if (DEBUG) depthValuesToByte[4 * index] = depthValuesToByte[4 * index + 1] = depthValuesToByte[4 * index + 2] = 255; break; } } //writetext.Write("0"); if (DEBUG) { if (interpolatedOnBoundary.Equals(initiate)) depthValuesToByte[4 * index] = depthValuesToByte[4 * index + 1] = depthValuesToByte[4 * index + 2] = 0; } } if (DEBUG) { Marshal.Copy(depthValuesToByte, 0, ptr.Value, colorWidth * colorHeight * 4); bm.UnlockBits(bmapdata); bm.Save("1.png"); bm.Dispose(); } }
public static void calculateProject( CoordinateMapper coordinateMapper, String outputFilename) { CameraSpacePoint[] spacePointBasics = new CameraSpacePoint[] { new CameraSpacePoint { X = -0.1f, Y = 0.0f, Z = 1.0f }, new CameraSpacePoint { X = -0.7f, Y = 0.0f, Z = 1.0f }, new CameraSpacePoint { X = 0.0f, Y = -0.1f, Z = 1.0f }, new CameraSpacePoint { X = 0.0f, Y = -0.7f, Z = 1.0f }, new CameraSpacePoint { X = 0.7f, Y = 0.0f, Z = 1.0f }, new CameraSpacePoint { X = 0.35f, Y = 0.0f, Z = 1.0f }, new CameraSpacePoint { X = 0.0f, Y = 0.3f, Z = 1.0f }, new CameraSpacePoint { X = 0.0f, Y = 0.0f, Z = 1.0f }, //Skeleton_Joint_Locations 0.51399,0.163888,1.20627,0.494376,0.362051,1.19127 new CameraSpacePoint { X = 0.51399f, Y = 0.163888f, Z = 1.20627f }, new CameraSpacePoint { X = 0.494376f, Y = 0.362051f, Z = 1.19127f }, //Skeleton_Joint_Locations_Orig 0.534418,-0.159339,1.38631,0.523629,0.0243074,1.30818 new CameraSpacePoint { X = 0.534418f, Y = -0.159339f, Z = 1.38631f }, new CameraSpacePoint { X = 0.523629f, Y = 0.0243074f, Z = 1.30818f }, }; DepthSpacePoint[] spaceBasicToDepth = new DepthSpacePoint[spacePointBasics.Count()]; coordinateMapper.MapCameraPointsToDepthSpace(spacePointBasics, spaceBasicToDepth); ColorSpacePoint[] spaceBasicToColor = new ColorSpacePoint[spacePointBasics.Count()]; coordinateMapper.MapCameraPointsToColorSpace(spacePointBasics, spaceBasicToColor); ColorSpacePoint[] spaceBasicToDepthToColor = new ColorSpacePoint[spacePointBasics.Count()]; coordinateMapper.MapDepthPointsToColorSpace(spaceBasicToDepth, Enumerable.Repeat((ushort)1000, spacePointBasics.Count()).ToArray(), spaceBasicToDepthToColor); Console.WriteLine("Camera space points to depth space points"); foreach (var t in spaceBasicToDepth) { Console.WriteLine(t.ToSString()); } Console.WriteLine("Camera space points to color space points"); foreach (var t in spaceBasicToColor) { Console.WriteLine(t.ToSString()); } Console.WriteLine("Camera space points to depth space points then to color space points"); foreach (var t in spaceBasicToDepthToColor) { Console.WriteLine(t.ToSString()); } DepthSpacePoint[] depthBasics = new DepthSpacePoint[] { new DepthSpacePoint() { X = 0.0f, Y = 30.0f }, new DepthSpacePoint() { X = 0.0f, Y = 380.0f }, new DepthSpacePoint() { X = 511.0f, Y = 30.0f }, new DepthSpacePoint() { X = 511.0f, Y = 380.0f }, new DepthSpacePoint() { X = 262.7343f, Y = 203.6235f }, // Center of origin }; int noOfPoints = depthBasics.Count(); ColorSpacePoint[] depthBasicToColor = new ColorSpacePoint[noOfPoints]; CameraSpacePoint[] depthBasicToCamera = new CameraSpacePoint[noOfPoints]; // Depth used for depth field are the same as z-axis // Not the distance from IR sensor to the point ushort[] distances = new ushort[] { 1, 10, 100, 1000, 2000, 40000 }; foreach (var distance in distances) { coordinateMapper.MapDepthPointsToColorSpace(depthBasics, Enumerable.Repeat(distance, noOfPoints).ToArray(), depthBasicToColor); Console.WriteLine("Projection from depth to color "); Console.WriteLine("z-axis = " + distance); foreach (var point in depthBasicToColor) { Console.WriteLine(point.ToSString()); } coordinateMapper.MapDepthPointsToCameraSpace(depthBasics, Enumerable.Repeat(distance, noOfPoints).ToArray(), depthBasicToCamera); Console.WriteLine("Projection from depth to camera "); Console.WriteLine("z-axis = " + distance); foreach (var point in depthBasicToCamera) { Console.WriteLine(point.ToSString()); } for (int i = 0; i < noOfPoints; i++) { Console.WriteLine(projectDepthPixelToCameraSpacePoint(new Point3(depthBasics[i].X, depthBasics[i].Y, distance)).ToSString()); } } // Let's X_ir(P) being P.X in depth image, X_rgb(P) being P.X in RGB // The calculation here is just an estimation, doesn't take into account camera calibration // Solve the problem on the z-plane of 1 meter // Center of depth field has the same X, Y as the zero point of coordinate space System.Drawing.PointF centerDepthField_ir = new System.Drawing.PointF(spaceBasicToDepth[4].X, spaceBasicToDepth[4].Y); System.Drawing.PointF centerDepthField_rgb = new System.Drawing.PointF(spaceBasicToColor[4].X, spaceBasicToColor[4].Y); // It is difficult to get the center point of rgb field directly, so let's assume it is the center of rgb field System.Drawing.PointF centerRgbField_rgb = new System.Drawing.PointF(964.5f, 544.5f); // Vector from centerRgbFieldInRgb to centerDepthFieldInRgb System.Drawing.PointF translation = new System.Drawing.PointF(centerDepthField_rgb.X - centerRgbField_rgb.X, centerDepthField_rgb.Y - centerRgbField_rgb.Y); // Ratio of depth unit / rgb unit float ratioX = (spaceBasicToDepth[2].X - centerDepthField_ir.X) / (spaceBasicToColor[2].X - centerDepthField_rgb.X); float ratioY = (spaceBasicToDepth[3].Y - centerDepthField_ir.Y) / (spaceBasicToColor[3].Y - centerDepthField_rgb.Y); ColorSpacePoint[,] shortRange = new ColorSpacePoint[512, 424]; ColorSpacePoint[,] longRange = new ColorSpacePoint[512, 424]; for ( int X = 0; X < 511; X ++ ) for ( int Y = 0; Y < 423; Y ++ ) { Point3 p1 = new Point3 { X = X, Y = Y, Z = 500 }; Point3 p2 = new Point3 { X = X, Y = Y, Z = 8000 }; var p1Projected = coordinateMapper.MapDepthPointToColorSpace(new DepthSpacePoint { X = p1.X, Y = p1.Y }, (ushort)p1.Z); var p2Projected = coordinateMapper.MapDepthPointToColorSpace(new DepthSpacePoint { X = p2.X, Y = p2.Y }, (ushort)p2.Z); shortRange[X, Y] = p1Projected; longRange[X, Y] = p2Projected; } // Write these projected points into a file if ( !File.Exists(COORDINATE_MAPPING) ) { var coordinateWriter = new DepthCoordinateMappingWriter(COORDINATE_MAPPING); coordinateWriter.write(512, 424, 500, 8000, shortRange, longRange); } // Let's get a random space point with a depth Point3[] PointAs = new Point3[] { new Point3 { X = 300f, Y = 300f, Z = 500 }, new Point3 { X = 400f, Y = 400f, Z = 500 }, new Point3 { X = 300f, Y = 300f, Z = 1000 }, new Point3 { X = 400f, Y = 400f, Z = 1000 }, new Point3 { X = 300f, Y = 300f, Z = 2000 }, new Point3 { X = 400f, Y = 400f, Z = 2000 }, new Point3 { X = 100f, Y = 100f, Z = 1000 }, new Point3 { X = 100f, Y = 100f, Z = 2000 }, new Point3 { X = 0f, Y = 100f, Z = 1000 }, new Point3 { X = 0f, Y = 100f, Z = 2000 }, new Point3 { X = 50f, Y = 100f, Z = 1000 }, new Point3 { X = 50f, Y = 100f, Z = 2000 }, new Point3 { X = 50f, Y = 50f, Z = 1000 }, new Point3 { X = 50f, Y = 50f, Z = 2000 }, new Point3 { X = 500f, Y = 50f, Z = 1000 }, new Point3 { X = 500f, Y = 50f, Z = 2000 }, new Point3 { X = 10f, Y = 10f, Z = 1000 }, new Point3 { X = 10f, Y = 10f, Z = 2000 }, new Point3 { X = 500f, Y = 200f, Z = 1000 }, new Point3 { X = 500f, Y = 200f, Z = 2000 }, new Point3 { X = depthBasics[4].X, Y = depthBasics[4].Y, Z = 2000 }, new Point3 { X = depthBasics[4].X, Y = depthBasics[4].Y, Z = 1000 }, new Point3 { X = depthBasics[4].X, Y = depthBasics[4].Y, Z = 4000 }, new Point3 { X = depthBasics[4].X, Y = depthBasics[4].Y, Z = 500 }, new Point3 { X = spaceBasicToDepth[2].X, Y = spaceBasicToDepth[2].Y, Z = 1000 }, new Point3 { X = spaceBasicToDepth[2].X, Y = spaceBasicToDepth[2].Y, Z = 2000 }, }; foreach (Point3 PointA in PointAs) { // Project randomDepthPoint using ray from IR camera on the 1-z plane // It still has X_ir = 300, y_ir = 300 System.Drawing.PointF projectedA_ir = new System.Drawing.PointF(PointA.X, PointA.Y); // Let's find projectedA_rgb System.Drawing.PointF projectedA_rgb = new System.Drawing.PointF(translation.X * 1000 / PointA.Z + (projectedA_ir.X - centerDepthField_ir.X) / ratioX + centerRgbField_rgb.X, translation.Y * 1000 / PointA.Z + (projectedA_ir.Y - centerDepthField_ir.Y) / ratioY + centerRgbField_rgb.Y); // Test Console.WriteLine("======Test====="); Console.WriteLine(PointA.X + ", " + PointA.Y + ", " + PointA.Z); Console.WriteLine(projectedPoint(shortRange, longRange, 500, 8000, PointA)); Console.WriteLine(projectedA_rgb); Console.WriteLine(coordinateMapper.MapDepthPointToColorSpace(new DepthSpacePoint { X = PointA.X, Y = PointA.Y }, (ushort)PointA.Z).ToSString()); } }
/// <summary> /// Transforms 3D point to 2D point using transformation matrix and projection regarding camera position. /// </summary> /// <param name="point">Point to transform.</param> /// <param name="transformationMat">Transformation matrix (3x3).</param> /// <param name="camera">Camera position. Default is (0,0,0).</param> /// <returns>Transformed point.</returns> public static PointF Transform(this PointF point, float[,] transformationMat, Point3 camera = default(Point3)) { var point3 = new Point3(point.X, point.Y, 1); return point3.Transform(transformationMat).Project(camera); }
public void set3DBounding(int frameNumber, int glyphSize, List<List<Point3>> glyphBounds, List<GlyphFace> faces, float scale = 1, Point3 translation = new Point3()) { List<List<Point3>> inversedScaledGlyphBounds = glyphBounds.Select(glyphBound => glyphBound.scaleBound(1 / scale, new Point3(-translation.X / scale, -translation.Y / scale, -translation.Z / scale))).ToList(); LocationMark3D ob = new GlyphBoxLocationMark3D(frameNumber, glyphSize, inversedScaledGlyphBounds, faces); object3DMarks[frameNumber] = ob; }
/// <summary> /// /// </summary> /// <param name="csp"> point in 3d coordination</param> /// <returns>depthPoint.X = pixel in X (horizontal); depthPoint.Y = pixel in Y (vertical); depthPoint.Z in milimeter (depth)</returns> public static Point3 projectCameraSpacePointToDepthPixel(Point3 csp) { float x = csp.X * 367.187f / csp.Z + 252.7343f; float y = csp.Y * -369f / csp.Z + 203.6235f; return new Point3(x, y, csp.Z * 1000); }
public override void readFromXml(XmlNode xmlNode) { glyphSize = int.Parse(xmlNode.Attributes[GLYPH_SIZE].Value); foreach (XmlNode faceNode in xmlNode.SelectNodes(FACE)) { // Get boundings var boundingNode = faceNode.SelectSingleNode(BOUNDING); if (boundingNode != null) { String[] parts = boundingNode.InnerText.Split(','); var points = new List<Point3>(); if (parts.Length % 3 == 0) { for (int i = 0; i < parts.Length / 3; i++) { Point3 p = new Point3(float.Parse(parts[3 * i].Trim()), float.Parse(parts[3 * i + 1].Trim()), float.Parse(parts[3 * i + 2].Trim())); points.Add(p); } } boundingPolygons.Add(points); } // Get face var glyphNode = faceNode.SelectSingleNode(CODE); if (glyphNode != null) { var parts = glyphNode.InnerText.Split(','); bool[,] glyphValues = new bool[glyphSize, glyphSize]; if (parts.Length == glyphSize * glyphSize) { for (int i = 0; i < glyphSize; i++) for (int j = 0; j < glyphSize; j++) { glyphValues[i, j] = int.Parse(parts[i * glyphSize + j].Trim()) == 1; } } GlyphFace gf = new GlyphFace(glyphValues, glyphSize); faces.Add(gf); } } }
/// <summary> /// Gets whether three points lie on the same line. /// </summary> /// /// <param name="p1">The first point.</param> /// <param name="p2">The second point.</param> /// <param name="p3">The third point.</param> /// /// <returns>True if there is a line passing through all /// three points; false otherwise.</returns> /// public static bool Collinear(Point3 p1, Point3 p2, Point3 p3) { float x1m2 = p2.X - p1.X; float y1m2 = p2.Y - p1.Y; float z1m2 = p2.Z - p1.Z; float x2m3 = p3.X - p1.X; float y2m3 = p3.Y - p1.Y; float z2m3 = p3.Z - p1.Z; float x = y1m2 * z2m3 - z1m2 * y2m3; float y = z1m2 * x2m3 - x1m2 * z2m3; float z = x1m2 * y2m3 - y1m2 * x2m3; float norm = x * x + y * y + z * z; return norm < Constants.SingleEpsilon; }
public static Juniper.Mathematics.Vector3Serializable ToJuniperVector3Serializable(this Point3 v) { return(new Juniper.Mathematics.Vector3Serializable(v.X, v.Y, v.Z)); }
/// <summary> /// Determines whether the specified <see cref="Point3"/> is equal to this instance. /// </summary> /// /// <param name="other">The <see cref="Point3"/> to compare with this instance.</param> /// /// <returns> /// <c>true</c> if the specified <see cref="Plane"/> is equal to this instance; otherwise, <c>false</c>. /// </returns> /// public bool Equals(Point3 other) { return coordinates == other.coordinates; }
public virtual LocationMark3D getScaledLocationMark(float scale, Point3 translation) { return null; }