public void MeshScanComplete(CVMesh mesh) { this.Show(); currentMesh = mesh; btnUltraSoundScan.IsEnabled = true; //btnConvertAndSave.IsEnabled = true; }
public List <URPose> DEBUG_FINDBOXPATH(CVMesh cameraMesh) { CVMesh roboMesh = calibrator.ConvertToRobospace(cameraMesh); List <URPose> path = creator.GenerateBoxPath(roboMesh); return(path); }
/// <summary> /// Figures the list of poses the Robot arm should run through to cover a surface-type mesh. /// The smoothing is applied to correct extreme normals on the mesh. /// </summary> /// <param name="cameraMesh">A mesh output from a 3D camera, not transformed into robot space yet</param> /// <param name="smoothing">The amount of laplacian smoothing that should be applied to the mesh.</param> /// <returns>A list of poses necesarry to perform an automatic ultrasound scan</returns> public List <URPose> FindPath(CVMesh cameraMesh, int smoothing = 4) { CVMesh roboMesh = calibrator.ConvertToRobospace(cameraMesh); Vector3[] vertices = roboMesh.Vertices.ToArray(); for (int i = 0; i < smoothing; i++) { vertices = smoother.LaplacianFilter(vertices, roboMesh.TriangleIndeces.ToArray()); } roboMesh.Vertices = vertices.ToList(); float min = calibrator.TransformVertex(new Vector3 { X = -0.25f, Y = 0, Z = 0 }, calibrator.TransformMatrix).Y; float max = calibrator.TransformVertex(new Vector3 { X = 0.25f, Y = 0, Z = 0 }, calibrator.TransformMatrix).Y; List <VertexIndex> path = creator.CreatePath(roboMesh.Vertices, min, max); List <URPose> poses = new List <URPose>(); path = Extensions.PruneVertices(path); List <VertexIndex> prunedAgain = new List <VertexIndex>(); foreach (var v in path) { if (v.Vector.X != 0 && v.Vector.Y != 0 && v.Vector.Z != 0) { prunedAgain.Add(v); } } poses = poser.ToURPath(path, roboMesh); return(poses); }
public void RetrieveMesh(CVMesh m) { LastMesh = m; ((IDisposable)fusionizer).Dispose(); fusionizer = null; MeshFinished?.Invoke(this, EventArgs.Empty); }
public void RemoveNonSurfaceFaces_InsertBox_BoxTopRemoved() { CVMesh mesh = PLYHandler.ReadMesh(testModelLocation + @"\box.ply"); Slicer.xMin = double.MinValue; Slicer.xMax = double.MaxValue; CVMesh meshOut = uut.Slice(mesh, double.MinValue, double.MaxValue, false); }
public void LaplacianFilter_BoxInserted_BoxSmoothed() { string location = testModelLocation + "\\box.ply"; CVMesh mesh = PLYHandler.ReadMesh(location); List <Vector3> vertices = mesh.Vertices; List <Vector3> newVertices = uut.LaplacianFilter(vertices.ToArray(), mesh.TriangleIndeces.ToArray()).ToList(); mesh.Vertices = newVertices; }
public void ReceiveMesh(object sender, EventArgs e) { var mesh = master.LastMesh; currentMesh = mesh; currentMesh = master.slicer.Slice(mesh, y_min, y_max); btnOK.IsEnabled = true; Initialize3DDrawing(); SetUpKinect(); }
/// <summary> /// Find the normal of a face /// </summary> /// <param name="i">Index of face in mesh.Faces</param> /// <param name="m">Mesh</param> /// <returns>A direction vector indicating the normal of a face</returns> public Vector3 GetFaceNormal(int i, CVMesh m) { Vector3 v1 = m.Vertices[m.Faces[i].index1]; Vector3 v2 = m.Vertices[m.Faces[i].index2]; Vector3 v3 = m.Vertices[m.Faces[i].index3]; Vector3 normal = Extensions.FaceNormal(v1, v2, v3); normal.Z = normal.Z >= 0 ? normal.Z : 0; normal = Extensions.Normalize(normal); return(normal); }
public void IdentifyPath_InsertPlane_CorrectPathOutPut() { string location = testModelLocation + "\\plane.ply"; CVMesh mesh = PLYHandler.ReadMesh(location); BoundingBox b = Extensions.FindBoundingBox(mesh.Vertices); uut.distance_length = -(b.x_max - b.x_min) / 9f; uut.distance_width = -(b.y_max - b.y_min) / 9f; List <VertexIndex> path = uut.CreatePath(mesh.Vertices); path = Extensions.PruneVertices(path); Assert.AreEqual(path.Count, mesh.Vertices.Count); }
//Assumes the following pipeline: // 1: ColorMesh outputted from Kinect API // 2: ColorMesh turned into CVMesh by KinectFusionizer // 3: No further edits have been made public CVMesh Slice(CVMesh meshIn, double y_min, double y_max, bool inverted = true) { if (y_min >= y_max) { throw new Exception("Minimum value must be lower than maximum value"); } if (meshIn == null) { throw new Exception("Mesh is null"); } if (meshIn.Vertices.Count == 0) { throw new Exception("Mesh has no vertices"); } CVMesh meshOut = CVMesh.Clone(meshIn); //the depth image is flipped in comparison to output image if (inverted) { double yMinTemp = y_min; y_min = y_max; y_max = yMinTemp; y_min *= -1; y_max *= -1; } yMin = y_min; yMax = y_max; //only add the same vector if it is unique. the vector itself is the key. //the value is the original index the vector had meshOut.Faces = Extensions.ToFaces(meshOut.TriangleIndeces); meshOut = RemoveNonSurfaceFaces(meshOut); Dictionary <Vector3, int> uniques = new Dictionary <Vector3, int>(); Dictionary <int, int> oldToNewIndices = new Dictionary <int, int>(); //We can only delete something once, but multiple deleted vertices can //refer to one unique vertice, so: //Key = deleted, value = unique vertice Dictionary <int, int> deleted = new Dictionary <int, int>(); SplitVertices(ref uniques, ref deleted, ref oldToNewIndices, meshOut.Vertices, meshOut); meshOut.Faces = AssignFacesNewIndices(deleted, oldToNewIndices, meshOut.Faces); meshOut.Vertices = new List <Vector3>(); foreach (var unique in uniques) { meshOut.Vertices.Add(unique.Key); } meshOut.TriangleIndeces = Extensions.ToTriangleIndices(meshOut.Faces); string location = Environment.CurrentDirectory; PLYHandler.Output(meshOut, ref location, false); return(meshOut); }
/// <summary> /// Translates, rotates and scales a mesh from the 3D camera's view to the Robot Arm's view /// </summary> /// <param name="mesh">The mesh we wish to transform</param> /// <returns>A mesh in another space</returns> public CVMesh ConvertToRobospace(CVMesh mesh) { CVMesh converted = CVMesh.Clone(mesh); List <Vector3> vertices = converted.Vertices; List <Vector3> transformedVertices = new List <Vector3>(); foreach (var v in vertices) { Vector3 transformed = TransformVertex(v, TransformMatrix); transformedVertices.Add(transformed); } converted.Vertices = transformedVertices; return(converted); }
private void CalculateMesh() { try { ColorMesh m = volume.CalculateMesh(2); currentMesh = m; CVMesh mesh = CVMesh.ConvertToMesh(m); master.RetrieveMesh(mesh); reader = null; sensor.Close(); CaptureCurrent = false; } catch (Exception ex) { Debug.Write(ex.Message); } }
//A 3D camera mounted to a ceiling should only be able to detect faces pointing up public CVMesh RemoveNonSurfaceFaces(CVMesh mesh) { List <Face> toSave = new List <Face>(); foreach (var meshFace in mesh.Faces) { Vector3 v1 = mesh.Vertices[meshFace.index1]; Vector3 v2 = mesh.Vertices[meshFace.index2]; Vector3 v3 = mesh.Vertices[meshFace.index3]; Vector3 normal = Extensions.FaceNormal(v1, v2, v3); if (normal.Z <= -0.5) //Note that the mesh output from a 3D camera is 'rotated' 180 degrees, that's why this is < 0 instead of > 0 { toSave.Add(meshFace); } } mesh.Faces = toSave; return(mesh); }
private void ConvertToGeometry(Model3DGroup model_group) { CVMesh m = currentMesh; MeshGeometry3D mg3D = new MeshGeometry3D(); foreach (var mFace in m.Faces) { Point3D a = ToPoint3D(m.Vertices[mFace.index1]); Point3D b = ToPoint3D(m.Vertices[mFace.index2]); Point3D c = ToPoint3D(m.Vertices[mFace.index3]); AddTriangle(mg3D, a, b, c); } DiffuseMaterial surface_material = new DiffuseMaterial(Brushes.LightGray); GeometryModel3D surface_model = new GeometryModel3D(mg3D, surface_material); surface_model.BackMaterial = surface_material; model_group.Children.Add(surface_model); }
public void Slice_InsertFourFaces_2Returned() { //Mesh has 10 vertices, of which 8 are unique. It has 4 faces. //We're slicing halfway through the third face, so we should end up with 2 faces and 5 vertices. string location = testModelLocation + @"\fourTriangles.ply"; CVMesh mesh = PLYHandler.ReadMesh(location); float lowerLimit = 58.5f; float upperLimit = 1000f; Slicer.xMin = float.MinValue; Slicer.xMax = float.MaxValue; CVMesh sliced = uut.Slice(mesh, lowerLimit, upperLimit, false); Assert.AreEqual(5, sliced.Vertices.Count); Assert.AreEqual(0, sliced.Faces[0].index1); Assert.AreEqual(1, sliced.Faces[0].index2); Assert.AreEqual(2, sliced.Faces[0].index3); Assert.AreEqual(3, sliced.Faces[1].index1); Assert.AreEqual(2, sliced.Faces[1].index2); Assert.AreEqual(4, sliced.Faces[1].index3); }
public void Slice_Inserted45VerticesOutOfBound_0Returned() { CVMesh mesh = new CVMesh(); List <Vector3> vertices = new List <Vector3>(); int lowerLimit = 10; int upperLimit = 20; Slicer.xMin = lowerLimit; Slicer.xMax = upperLimit; for (int i = 0; i < 45; i++) { int x = RandomOutSideOfRange(lowerLimit, upperLimit); int y = RandomOutSideOfRange(lowerLimit, upperLimit); vertices.Add(new Vector3 { X = x, Y = y, Z = 0 }); } mesh.Vertices = vertices; CVMesh sliced = uut.Slice(mesh, lowerLimit, upperLimit, false); Assert.AreEqual(0, sliced.Vertices.Count); }
[ExcludeFromCodeCoverage] //only used for calibration public List <URPose> GenerateBoxPath(CVMesh m) { var verticePoints = m.Vertices; var box = Extensions.FindBoundingBox(verticePoints); List <Vector3> upperBox = new List <Vector3>(); upperBox.Add(new Vector3() { X = box.x_max, Y = box.y_max, Z = box.z_max }); upperBox.Add(new Vector3() { X = box.x_min, Y = box.y_max, Z = box.z_max }); upperBox.Add(new Vector3() { X = box.x_min, Y = box.y_min, Z = box.z_max }); upperBox.Add(new Vector3() { X = box.x_max, Y = box.y_min, Z = box.z_max }); List <URPose> poses = new List <URPose>(); foreach (var p in upperBox) { float X = p.X; float Y = p.Y; float Z = p.Z; Z += 0.20f; double RXpose = Math.PI; double RYpose = 0; double RZpose = 0; poses.Add(new URPose(X, Y, Z, RXpose, RYpose, RZpose)); } return(poses); }
/// <summary> /// Find the direction vector for a vertex by taking the average direction vector of the faces the vertex belongs to /// </summary> /// <param name="vertex">Vertex in a mesh</param> /// <param name="mesh">Mesh which vertex is a part of.</param> /// <returns>Normal/Direction vector for vertex</returns> public Vector3 FindVertexNormal(VertexIndex vertex, CVMesh mesh) { mesh.Faces = Extensions.ToFaces(mesh.TriangleIndeces); int index = vertex.Index; List <Vector3> faceNormals = new List <Vector3>(); for (int i = 0; i < mesh.Faces.Count; i++) { Face f = mesh.Faces[i]; if (f.index1 == index || f.index2 == index || f.index3 == index) { faceNormals.Add(GetFaceNormal(i, mesh)); } } if (faceNormals.Count == 0) { return new Vector3 { X = 0, Y = 0, Z = 1 } } ; return(Extensions.Normalize(Extensions.AvgVector(faceNormals))); }
public static float UR_PROBE_OFFSET = 0.10f; //The length of whatever is attached to the tool center point in meters. /// <summary> /// Finds robot arm tool center point poses for a list of vertices /// </summary> /// <param name="vectorPath">A list of vertices in a mesh. Vertices must exist in mesh and be part of at least one face each</param> /// <param name="mesh">The mesh that the vectorPath belongs to</param> /// <returns>A list of URPoses used to feed the robot arm later</returns> public List <URPose> ToURPath(List <VertexIndex> vectorPath, CVMesh mesh) { List <URPose> poses = new List <URPose>(); foreach (var v in vectorPath) { Vector3 vertexNormal = FindVertexNormal(v, mesh); //vertexNormal.X = 0.00001f; vertexNormal = Extensions.Normalize(vertexNormal); Vector3 urPos = FindURPosition(v.Vector, vertexNormal); //Vector3 smoothedNormal = Extensions.Normalize(Extensions.AvgVector(new List<Vector3> {vertexNormal, new Vector3 {X=0, Y=0, Z=1}})); Vector3 rotationNormal = Extensions.Multiply(vertexNormal, -1); //Vector3 urRot = ToRotationVector(rotationNormal); URPose pose = new URPose(urPos.X, urPos.Y, urPos.Z, rotationNormal.X, rotationNormal.Y, rotationNormal.Z); poses.Add(pose); } int percent = (int)Math.Floor(poses.Count * 0.12); var snippet = poses.GetRange(percent, poses.Count - percent * 2); PLYHandler.Output(snippet, Environment.CurrentDirectory); ConvertDirectionVectors(snippet); return(snippet); }
public void FindPath_InsertFlatMeshFromCamera_OutputURPath() { string location = testModelLocation + "\\cameraBoxScan.ply"; CVMesh mesh = PLYHandler.ReadMesh(location); List <URPose> poses = uut.FindPath(mesh, 2); }
/// <summary> /// Splits vertices into two dictionaries, the unique ones, and the ones that are not unique and should be deleted /// </summary> /// <param name="uniques">Unique vertices</param> /// <param name="deleted">Vertices marked for deletion</param> private void SplitVertices(ref Dictionary <Vector3, int> uniques, ref Dictionary <int, int> deleted, ref Dictionary <int, int> oldToNewIndices, List <Vector3> vertices, CVMesh mesh) { //Given vertices 01234567: //01, 24, 67, are the same //That will give us the uniques 0,2,3,5,6, but in the end they need to have the index 0,1,2,3,4, so we need a uniqueCounter int uniqueCounter = 0; for (int i = 0; i < vertices.Count; i++) { var v = vertices[i]; if (!uniques.ContainsKey(v) && WithInBounds(v) && BelongsToAFace(i, mesh.Faces)) { uniques.Add(v, uniqueCounter); oldToNewIndices.Add(i, uniqueCounter); uniqueCounter++; } else { int uniqueIndex = -1; //face needs to be deleted if this is -1 still if (WithInBounds(v)) { uniques.TryGetValue(v, out uniqueIndex); } deleted.Add(i, uniqueIndex); } } }
public void RequestCurrentImageAsMesh() { LastMesh = null; fusionizer.CaptureMeshNow(); }
public void CaptureMeshNow() { CVMesh m = new CVMesh(); master.RetrieveMesh(m); }