public IMImageCell(ICP pos, List <IRD> rasterData) { CellPosition = pos; RasterData = rasterData; // Auto populate image data ImageData = new bool[pos.XSize, pos.YSize]; int concatCounter = 0; byte[] concatData = rasterData.SelectMany(d => d.Data).ToArray(); for (int y = 0; y <= ImageData.GetUpperBound(1); y++) { for (int x = 0; x < ImageData.GetUpperBound(0); x += 8) { byte curByte = concatData[concatCounter++]; for (int b = 0; b < 8; b++) { ImageData[x + b, y] = (curByte & (1 << (7 - b))) > 0; } } } }
/* * Point cloud color: * AntiqueWhite is ref * YellowGreen is pending * blue is established */ private void TransformPC(Point3DCollection source, Point3DCollection reference) { // As far as i know, source[i] maps to reference[i] // compute initial transformation //compute transformation from reference _icpData = _icp.ComputeICP( Parser3DPoint.FromPoint3DToDataPoints(source), Parser3DPoint.FromPoint3DToDataPoints(reference), _initialTransformation); _icpData.transform = _initialTransformation; var p = ICP.ApplyTransformation(_icpData.transform, Parser3DPoint.FromPoint3DToDataPoints(_displayPointCloud)); _displayPointCloud = Parser3DPoint.FromDataPointsToPoint3DCollection(p); _renderer.CreatePointCloud(_displayPointCloud, Brushes.BlueViolet); }
private void Init() { InitializeComponent(); Log.InitLog(textBox, label_Cycle); // Initialize images _processingStage = new ProcessingStage(label_Status, new BitmapImage(new Uri("pack://application:,,,/3DReconstructionWPF;component/Assets/Images/icons8-crossmark.png")), new BitmapImage(new Uri("pack://application:,,,/3DReconstructionWPF;component/Assets/Images/icons8-checkmark.png")), image_trackedFeature, image_rgbColor, image_depth); _renderer = new Renderer(group); _pcv = new PointCloudView(_renderer); _sensor = KinectSensor.GetDefault(); _initialTransformation = new pointmatcher.net.EuclideanTransform { translation = System.Numerics.Vector3.Zero, rotation = System.Numerics.Quaternion.Normalize(System.Numerics.Quaternion.CreateFromRotationMatrix(new System.Numerics.Matrix4x4( 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1 ))) }; _icpData = new ICP.ICPData(null, _initialTransformation); _icp = new ICP(); label_Cycle.Content = "cycle: " + _cycleRuns; if (_sensor != null) { if (_sensor.IsOpen && _sensor.IsAvailable) { Log.WriteLog("Kinect capture data available!"); } } // Init filters FilterGroup.InitFilters(); }
private static void ConstructTestCase(out EuclideanTransform t, out ErrorElements errorElements) { // pick some random points var points = new List <DataPoint>(); for (int i = 0; i < 10000; i++) { var n = RandomVector(); points.Add(new DataPoint { point = 100.0f * RandomVector() - new Vector3(50.0f), normal = Vector3.Normalize(n), }); } var dataPoints = new DataPoints { points = points.ToArray(), contiansNormals = true, }; t = new EuclideanTransform(); t.translation = RandomVector() * 50.0f; //t.translation = new Vector3(0f); var axis = Vector3.Normalize(RandomVector()); t.rotation = Quaternion.CreateFromAxisAngle(axis, (float)(r.NextDouble() * Math.PI * 2)); t.rotation = Quaternion.Normalize(t.rotation); //t.rotation = Quaternion.Identity; var transformedPoints = ICP.ApplyTransformation(dataPoints, t.Inverse()); errorElements = new ErrorElements { reference = dataPoints, reading = transformedPoints, weights = Enumerable.Repeat(1.0f, points.Count).ToArray() }; }
/// <summary> /// 链接ICP /// </summary> public void AttachICP(ICP icp) { this.icp = icp; }
public List <Vector3d> ICPCalculation(List <Vector3d> pointsToTransform, List <Vector3d> pointsOrig) { int j = 0; int m = 0; double[,] map_points = new double[3, pointsToTransform.Count]; for (int i = 0; i < map_points.Length / 3; i++, j++) { map_points.SetValue(pointsToTransform[i].X, 0, j); map_points.SetValue(pointsToTransform[i].Y, 1, j); map_points.SetValue(pointsToTransform[i].Z, 2, j); } double[,] map_pointsToTransform = ThinPointCloud(pointsToTransform, 0, 50); m += 10; double[,] map_pointsOrig = new double[3, pointsOrig.Count()]; for (int i = 0; i < pointsOrig.Count; i++) { map_pointsOrig.SetValue(pointsOrig[i].X, 0, i); map_pointsOrig.SetValue(pointsOrig[i].Y, 1, i); map_pointsOrig.SetValue(pointsOrig[i].Z, 2, i); } //IterativeClosestPointClass icp = new IterativeClosestPointClass(); ICP icpTest = new ICP(); // ICP icpTest = new ICP(); MWCellArray outMapedPointsNew = null; MWCellArray outMapedPointsOld = null; Console.WriteLine("Start of ICP algorithm"); Stopwatch stopWatch = new Stopwatch(); stopWatch.Start(); outMapedPointsNew = (MWCellArray)icpTest.getIterativeClosestPoints((MWNumericArray)map_pointsOrig, (MWNumericArray)map_pointsToTransform, (MWNumericArray)map_points, (MWNumericArray)20); for (int i = 0; i < 5; i++) { m += 5; MWNumericArray newoutmappedPoints = (MWNumericArray)outMapedPointsNew[1]; List <Vector3d> newPoints = new List <Vector3d>(); double newx, newy, newz; Vector3d newact_point; for (int k = 1; k < newoutmappedPoints.Dimensions[1]; k++) { newx = (double)newoutmappedPoints[1, k]; newy = (double)newoutmappedPoints[2, k]; newz = (double)newoutmappedPoints[3, k]; newact_point = new Vector3d(newx, newy, newz); newPoints.Add(newact_point); } map_pointsToTransform = ThinPointCloud(newPoints, m, 50); outMapedPointsOld = outMapedPointsNew; // Call function getIterativeClosestPoints which returns a new Array of map_points with rotation and translation //outMapedPoints= (MWCellArray)icp.getIterativeClosestPoints((MWNumericArray)map_pointsOrig, (MWNumericArray)map_points, (MWNumericArray)map_pointsToTransform,(MWNumericArray)15); outMapedPointsNew = (MWCellArray)icpTest.getIterativeClosestPoints((MWNumericArray)map_pointsOrig, (MWNumericArray)map_pointsToTransform, outMapedPointsOld[1], (MWNumericArray)30); } stopWatch.Stop(); // Get the elapsed time as a TimeSpan value. TimeSpan ts = stopWatch.Elapsed; Console.WriteLine("End of ICP algorithm, it took : {0} minuts and {1} seconds!", ts.Minutes, ts.Seconds); MWNumericArray mappedPoints = (MWNumericArray)outMapedPointsNew[1]; MWNumericArray rotation_vec = (MWNumericArray)outMapedPointsNew[2]; MWNumericArray translation_vec = (MWNumericArray)outMapedPointsNew[3]; Vector3d act_point; List <Vector3d> points = new List <Vector3d>(); double x, y, z; for (int i = 1; i <= mappedPoints.Dimensions[1]; i++) { x = (double)mappedPoints[1, i]; y = (double)mappedPoints[2, i]; z = (double)mappedPoints[3, i]; act_point = new Vector3d(x, y, z); points.Add(act_point); } List <Vector4d> matrix = new List <Vector4d>(); Vector4d column; for (int i = 1; i < 4; i++) { x = (double)mappedPoints[1, i]; y = (double)mappedPoints[2, i]; z = (double)mappedPoints[3, i]; column = new Vector4d((double)rotation_vec[i, 1], (double)rotation_vec[i, 2], (double)rotation_vec[i, 3], (double)translation_vec[i, 1]); matrix.Add(column); } column = new Vector4d(0, 0, 0, 1); matrix.Add(column); //WriteTransformationMatrix(matrix, "..\\..\\..\\..\\..\\03Daten\\registratedData\\MeshTomo_to_SimulatedTransformation.txt"); transMatrix = matrix; //GetNearestPoints(points, pointsOrig); return(points); }
private void ParseFontAndImageData() { // FONT RASTER PATTERNS Dictionary <Container, IReadOnlyDictionary <FNI.Info, bool[, ]> > rasterPatterns = new Dictionary <Container, IReadOnlyDictionary <FNI.Info, bool[, ]> >(); foreach (Container c in Resources.Where(r => r.ResourceType == Resource.eResourceType.FontCharacterSet && r.IsLoaded) .Select(r => r.Fields[0].LowestLevelContainer)) { // If we have a pattern map, gather raster data FNM patternsMap = c.GetStructure <FNM>(); if (patternsMap != null) { FNI firstFNI = c.GetStructure <FNI>(); Dictionary <FNI.Info, bool[, ]> patternsDictionary = new Dictionary <FNI.Info, bool[, ]>(); byte[] allFNGData = c.GetStructures <FNG>().SelectMany(f => f.Data).ToArray(); int indexCounter = 0; for (int i = 0; i < patternsMap.AllPatternData.Count; i++) { // Subtract the next offset (or length of data if at end) by this one to find out how many bytes to take int bytesToTake = (int)((i < patternsMap.AllPatternData.Count - 1 ? patternsMap.AllPatternData[i + 1].DataOffset : (uint)allFNGData.Length) - patternsMap.AllPatternData[i].DataOffset); // Create an empty array of bools from our box width and height // The array sizes are the number of bits in the minimum number of bytes required to support the bit size int numBitsWide = (int)Math.Ceiling((patternsMap.AllPatternData[i].BoxMaxWidthIndex + 1) / 8.0) * 8; int numRows = bytesToTake / (numBitsWide / 8); bool[,] curPattern = new bool[numBitsWide, numRows]; for (int y = 0; y < numRows; y++) { for (int x = 0; x < numBitsWide; x += 8) { byte curByte = allFNGData[indexCounter++]; for (int b = 0; b < 8; b++) { curPattern[x + b, y] = (curByte & (1 << (7 - b))) > 0; } } } // Lookup the GCGID from the first FNI for this pattern patternsDictionary.Add(firstFNI.InfoList.First(fni => fni.FNMIndex == i), curPattern); } rasterPatterns.Add(c, patternsDictionary); } } ParsedFontPatterns = rasterPatterns; // IM IMAGES Dictionary <Container, IReadOnlyList <IMImageCell> > imImages = new Dictionary <Container, IReadOnlyList <IMImageCell> >(); foreach (Container c in Resources .Where(r => r.IsLoaded && (r.ResourceType == Resource.eResourceType.IMImage || (r.ResourceType == Resource.eResourceType.PageSegment && r.Fields[1] is BII))) .Select(r => r.ResourceType == Resource.eResourceType.PageSegment ? r.Fields[1].LowestLevelContainer : r.Fields[0].LowestLevelContainer)) { IID imageDescriptor = c.GetStructure <IID>(); List <IMImageCell> cellList = new List <IMImageCell>(); if (c.GetStructure <ICP>() == null) { // Since there are no cells, create one ICP newCellPos = new ICP(imageDescriptor.XSize, imageDescriptor.YSize); IMImageCell newCell = new IMImageCell(newCellPos, c.GetStructures <IRD>()); cellList.Add(newCell); } else { // Manually parse a list of cells since they don't have their own container for (int i = 0; i < c.Structures.Count; i++) { if (c.Structures[i].GetType() != typeof(ICP)) { continue; } // Get list of IRDs up to the next ICP or end of structures List <IRD> rasterData = new List <IRD>(); for (int r = i + 1; r < c.Structures.Count; r++) { if (c.Structures[r].GetType() != typeof(IRD)) { break; } rasterData.Add((IRD)c.Structures[r]); } cellList.Add(new IMImageCell((ICP)c.Structures[i], rasterData)); } } imImages.Add(c, cellList); } ParsedIMImages = imImages; // IOCA IMAGES Dictionary <Container, IReadOnlyList <ImageInfo> > iocaImages = new Dictionary <Container, IReadOnlyList <ImageInfo> >(); foreach (Container c in Resources .Where(r => r.IsLoaded && (r.ResourceType == Resource.eResourceType.IOCAImage || (r.ResourceType == Resource.eResourceType.PageSegment && r.Fields[1] is BIM))) .Select(r => r.ResourceType == Resource.eResourceType.PageSegment ? r.Fields[1].LowestLevelContainer : r.Fields[0].LowestLevelContainer)) { // Combine all self defining fields from zero or more IPD fields byte[] allIPDData = c.GetStructures <IPD>().SelectMany(f => f.Data).ToArray(); List <ImageSelfDefiningField> SDFs = ImageSelfDefiningField.GetAllSDFs(allIPDData); // Get all images in our self defining field list foreach (Container sc in SDFs.OfType <BeginImageContent>().Select(s => s.LowestLevelContainer)) { List <Container> allContainers = new List <Container>() { sc }; List <ImageInfo> infoList = new List <ImageInfo>(); // Along with ourself, add any tiles to the list of containers if (sc.Structures.Any(s => s.GetType() == typeof(BeginTile))) { allContainers.AddRange(sc.Structures.OfType <BeginTile>().Select(s => s.LowestLevelContainer)); } // For each container, get image and transparency bytes foreach (Container tc in allContainers) { ImageInfo info = new ImageInfo(); info.Data = tc.DirectGetStructures <ImageData>().SelectMany(s => s.Data).ToArray(); // If there are tiles, store offset information if (tc.Structures[0].GetType() == typeof(BeginTile)) { TilePosition tp = tc.GetStructure <TilePosition>(); info.XOffset = tp.XOffset; info.YOffset = tp.YOffset; } // Add transparency data if needed ImageSelfDefiningField BTM = tc.GetStructure <BeginTransparencyMask>(); if (BTM != null) { info.TransparencyMask = BTM.LowestLevelContainer.GetStructures <ImageData>().SelectMany(s => s.Data).ToArray(); } infoList.Add(info); } iocaImages.Add(c, infoList); } } ParsedImages = iocaImages; }
private void ScanReading_Click(object sender, RoutedEventArgs e) { // for intersection testing //_displayPointCloud = _renderer.ReadData(); //_rgbv._bvh = BVH.InitBVH(_displayPointCloud); var point1 = new Point3D(-0.8f, 0, 0); var point2 = new Point3D(-1f, 0, 0); var point3 = new Point3D(-0.6f, 0.5f, 0); var point4 = new Point3D(0.8f, -0.5f, 0); var pcReference = new Point3DCollection { point1, point2, point3 }; Log.WriteLog("--------------------"); /* * var rotationAngle = 0.707106781187f; * * Matrix3D m = new Matrix3D( * rotationAngle, 0, rotationAngle, 0, * 0, 1, 0, 0, * -rotationAngle, 0, rotationAngle, 0, * 1, 0, 0, 1); * * // Transform the thumb according to m * _thumbReading = m.Transform(_readingFeatures[0]); * point1 = m.Transform(point1); * point2 = m.Transform(point2); * point3 = m.Transform(point3); * point4 = m.Transform(point4); * * var pcReading = new Point3DCollection * { * point1, * point2, * point3 * }; */ var depthData = _pcv.GetDepthDataFromLatestFrame(); _reading = depthData.Item1; // all points _readingFeatures = depthData.Item2; // only feature points [4] _initialTransformation = Util.ComputeInitialTransformation(_readingFeatures, _referenceFeatures); _reading = Parser3DPoint.FromDataPointsToPoint3DCollection(ICP.ApplyTransformation(_initialTransformation, Parser3DPoint.FromPoint3DToDataPoints(_reading))); ComputeRMSE(_referenceFeatures, _readingFeatures, _initialTransformation); _renderer.CreatePointCloud(_referenceFeatures, Brushes.Pink, false, 0.0125f); // Transform readingFeatures _renderer.CreatePointCloud(_readingFeatures, Brushes.YellowGreen, false, 0.0125f); _readingFeatures = Parser3DPoint.FromDataPointsToPoint3DCollection(ICP.ApplyTransformation(_initialTransformation, Parser3DPoint.FromPoint3DToDataPoints(_readingFeatures))); _renderer.CreatePointCloud(_readingFeatures, Brushes.Violet, false, 0.0125f); _renderer.CreatePointCloud(_reading, Brushes.BlueViolet, false, 0.0025f); // _renderer.CreatePointCloud(_reading, Brushes.Violet,false); _cycleRuns++; label_Cycle.Content = "cycle: " + _cycleRuns; }