Esempio n. 1
0
        /// <summary>
        /// Creates a 'squarewave' above the mesh and presses that squarewave down to the surface of the mesh, figuring out which points are interesting for scanning, and in which order. Optional X values are for handling out of bound vertices which sometimes are generated from the Kinect API.
        /// </summary>
        /// <param name="vertices">The vertices of a mesh</param>
        /// <param name="x_absolute_min"></param>
        /// <param name="x_absolute_max"></param>
        /// <returns></returns>
        public List <VertexIndex> CreatePath(List <Vector3> vertices, float x_absolute_min = float.MaxValue, float x_absolute_max = float.MinValue)
        {
            BoundingBox    b    = Extensions.FindBoundingBox(vertices, x_absolute_min, x_absolute_max);
            List <Vector3> wave = GenerateSquareWave(b);

            PLYHandler.Output(wave, Environment.CurrentDirectory, "WAVE");
            return(IdentifyPath(wave, vertices));
        }
        public void RemoveNonSurfaceFaces_InsertBox_BoxTopRemoved()
        {
            CVMesh mesh = PLYHandler.ReadMesh(testModelLocation + @"\box.ply");

            Slicer.xMin = double.MinValue;
            Slicer.xMax = double.MaxValue;
            CVMesh meshOut = uut.Slice(mesh, double.MinValue, double.MaxValue, false);
        }
        public void LaplacianFilter_BoxInserted_BoxSmoothed()
        {
            string         location    = testModelLocation + "\\box.ply";
            CVMesh         mesh        = PLYHandler.ReadMesh(location);
            List <Vector3> vertices    = mesh.Vertices;
            List <Vector3> newVertices = uut.LaplacianFilter(vertices.ToArray(), mesh.TriangleIndeces.ToArray()).ToList();

            mesh.Vertices = newVertices;
        }
        public void IdentifyPath_InsertPlane_CorrectPathOutPut()
        {
            string      location = testModelLocation + "\\plane.ply";
            CVMesh      mesh     = PLYHandler.ReadMesh(location);
            BoundingBox b        = Extensions.FindBoundingBox(mesh.Vertices);

            uut.distance_length = -(b.x_max - b.x_min) / 9f;
            uut.distance_width  = -(b.y_max - b.y_min) / 9f;
            List <VertexIndex> path = uut.CreatePath(mesh.Vertices);

            path = Extensions.PruneVertices(path);
            Assert.AreEqual(path.Count, mesh.Vertices.Count);
        }
Esempio n. 5
0
        //Assumes the following pipeline:
        // 1: ColorMesh outputted from Kinect API
        // 2: ColorMesh turned into CVMesh by KinectFusionizer
        // 3: No further edits have been made
        public CVMesh Slice(CVMesh meshIn, double y_min, double y_max, bool inverted = true)
        {
            if (y_min >= y_max)
            {
                throw new Exception("Minimum value must be lower than maximum value");
            }
            if (meshIn == null)
            {
                throw new Exception("Mesh is null");
            }
            if (meshIn.Vertices.Count == 0)
            {
                throw new Exception("Mesh has no vertices");
            }
            CVMesh meshOut = CVMesh.Clone(meshIn);

            //the depth image is flipped in comparison to output image
            if (inverted)
            {
                double yMinTemp = y_min;
                y_min  = y_max;
                y_max  = yMinTemp;
                y_min *= -1;
                y_max *= -1;
            }
            yMin = y_min;
            yMax = y_max;
            //only add the same vector if it is unique. the vector itself is the key.
            //the value is the original index the vector had
            meshOut.Faces = Extensions.ToFaces(meshOut.TriangleIndeces);
            meshOut       = RemoveNonSurfaceFaces(meshOut);
            Dictionary <Vector3, int> uniques         = new Dictionary <Vector3, int>();
            Dictionary <int, int>     oldToNewIndices = new Dictionary <int, int>();
            //We can only delete something once, but multiple deleted vertices can
            //refer to one unique vertice, so:
            //Key = deleted, value = unique vertice
            Dictionary <int, int> deleted = new Dictionary <int, int>();

            SplitVertices(ref uniques, ref deleted, ref oldToNewIndices, meshOut.Vertices, meshOut);
            meshOut.Faces    = AssignFacesNewIndices(deleted, oldToNewIndices, meshOut.Faces);
            meshOut.Vertices = new List <Vector3>();
            foreach (var unique in uniques)
            {
                meshOut.Vertices.Add(unique.Key);
            }
            meshOut.TriangleIndeces = Extensions.ToTriangleIndices(meshOut.Faces);
            string location = Environment.CurrentDirectory;

            PLYHandler.Output(meshOut, ref location, false);
            return(meshOut);
        }
        public void Slice_InsertFourFaces_2Returned()
        {
            //Mesh has 10 vertices, of which 8 are unique. It has 4 faces.
            //We're slicing halfway through the third face, so we should end up with 2 faces and 5 vertices.
            string location   = testModelLocation + @"\fourTriangles.ply";
            CVMesh mesh       = PLYHandler.ReadMesh(location);
            float  lowerLimit = 58.5f;
            float  upperLimit = 1000f;

            Slicer.xMin = float.MinValue;
            Slicer.xMax = float.MaxValue;
            CVMesh sliced = uut.Slice(mesh, lowerLimit, upperLimit, false);

            Assert.AreEqual(5, sliced.Vertices.Count);
            Assert.AreEqual(0, sliced.Faces[0].index1);
            Assert.AreEqual(1, sliced.Faces[0].index2);
            Assert.AreEqual(2, sliced.Faces[0].index3);
            Assert.AreEqual(3, sliced.Faces[1].index1);
            Assert.AreEqual(2, sliced.Faces[1].index2);
            Assert.AreEqual(4, sliced.Faces[1].index3);
        }
Esempio n. 7
0
        public static float UR_PROBE_OFFSET = 0.10f; //The length of whatever is attached to the tool center point in meters.
        /// <summary>
        /// Finds robot arm tool center point poses for a list of vertices
        /// </summary>
        /// <param name="vectorPath">A list of vertices in a mesh. Vertices must exist in mesh and be part of at least one face each</param>
        /// <param name="mesh">The mesh that the vectorPath belongs to</param>
        /// <returns>A list of URPoses used to feed the robot arm later</returns>
        public List <URPose> ToURPath(List <VertexIndex> vectorPath, CVMesh mesh)
        {
            List <URPose> poses = new List <URPose>();

            foreach (var v in vectorPath)
            {
                Vector3 vertexNormal = FindVertexNormal(v, mesh);
                //vertexNormal.X = 0.00001f;
                vertexNormal = Extensions.Normalize(vertexNormal);
                Vector3 urPos = FindURPosition(v.Vector, vertexNormal);
                //Vector3 smoothedNormal = Extensions.Normalize(Extensions.AvgVector(new List<Vector3> {vertexNormal, new Vector3 {X=0, Y=0, Z=1}}));
                Vector3 rotationNormal = Extensions.Multiply(vertexNormal, -1);
                //Vector3 urRot = ToRotationVector(rotationNormal);

                URPose pose = new URPose(urPos.X, urPos.Y, urPos.Z, rotationNormal.X, rotationNormal.Y, rotationNormal.Z);
                poses.Add(pose);
            }
            int percent = (int)Math.Floor(poses.Count * 0.12);
            var snippet = poses.GetRange(percent, poses.Count - percent * 2);

            PLYHandler.Output(snippet, Environment.CurrentDirectory);
            ConvertDirectionVectors(snippet);
            return(snippet);
        }
 public void FindPath_InsertFlatMeshFromCamera_OutputURPath()
 {
     string        location = testModelLocation + "\\cameraBoxScan.ply";
     CVMesh        mesh     = PLYHandler.ReadMesh(location);
     List <URPose> poses    = uut.FindPath(mesh, 2);
 }