public static void Run(string[] args) { var xefPath = @"C:\XEF\cam2_cal.xef"; var xef = new Xef(xefPath); //Load computer vision (CV) color file var colorCV = xef.LoadCvColorFrame(0); colorCV.DrawAruco().ShowNoWait(); var cameraSpace = xef.LoadCVCameraSpace(2); var(tx, markers) = Calibrator.Calibrate(colorCV, cameraSpace); var pose = tx .CameraSpaceToWorldTx .ToMat(); var camSpaceTx = cameraSpace.Transform(pose) .ToCamSpacePoints(); //Save as XYZRGB file (open in MeshLab to view) XYZRGB.Export(camSpaceTx, colorCV.GetBRGABytes(), @"C:\XEF\cam2_cal.txt"); markers = markers.OrderByDescending(m => m.MaskSum.Val0).Take(4).ToList(); var markerPoints = new CvCameraSpace(); markers.ForEach(m => markerPoints.Add(m.KxCenter)); var txMarkers = markerPoints.Transform(pose); XYZRGB.Export(txMarkers, new Scalar(255, 0, 0), @"C:\XEF\cam2_cal_markers.txt"); }
public void SetXefFile(string xefFilePath) { _xef = new Xef(xefFilePath); this.CoordinateMapper = _xef.GetEmbeddedCoordinateMapper(); CopyNextDepth(); CopyNextColor(); }
static void Main(string[] args) { RemoteKinectXEFExample.Run(null); RGBPointCloudExampleWithMarkerHightlights.Run(null); MulticameraFusionExample.Run(null); var xefPath = @"C:\XEF\cam1_cal.xef"; var xef = new Xef(xefPath); //Load computer vision (CV) color file var colorCV = xef.LoadCvColorFrame(0); var cube = CoordinateDefinition.Microcube(); var markers = colorCV.FindAruco(); var colorBytes = colorCV.GetBRGABytes(); var cspace = xef.LoadCVCameraSpace(2); var marker1 = cspace.SubMat(Cv2.BoundingRect(markers.First().Points)); var realMask = marker1.GetRealMask(); var center = marker1.Mean(realMask); var marker2 = cspace.SubMat(Cv2.BoundingRect(markers.Skip(1).First().Points)); realMask = marker2.GetRealMask(); var center2 = marker2.Mean(realMask); var between = center2.DistanceTo(center); var indeterminiteMask = marker1.GetIndeterminteMask(); realMask.ShowNoWait(); Cv2.WaitKey(0); Console.Read(); //Save as XYZRGB file (open in MeshLab to view) // XYZRGB.Export(cameraSpace, colorBytes, @"C:\XEF\cam1_cal.txt"); }
public static void Run(string[] args) { var xef = new Xef(@"../../../Resources/cube.xef"); var depth = xef.LoadDepthFrame(0); var color = xef.LoadColorFrame(0); var cvColor = new CvColor(color); var cvDepth = new CvDepth(depth); //render kinect color to UI (using KinectX.Extensions;) cvColor.Show(); Console.Read(); }
public static void Run() { var xefPath = @"C:\XEF\cam1_cal.xef"; var xef = new Xef(xefPath); //Load computer vision (CV) color file var colorCV = xef.LoadCvColorFrame(0); var cameraSpace = xef.LoadCVCameraSpace(0); var pose = Calibrator.Calibrate(colorCV, cameraSpace) .Transform .CameraSpaceToWorldTx .ToMat(); cameraSpace.Transform(pose); //Save as XYZRGB file (open in MeshLab to view) XYZRGB.Export(cameraSpace.ToCamSpacePoints(), colorCV.GetBRGABytes(), @"C:\XEF\cam1_cal.txt"); }
public static KxTransform GetPoseFromXef(string xefPath) { //Create a defined registration pattern - in this case a cube var cube = CoordinateDefinition.Microcube(); //Find registration var xef = new Xef(xefPath); var colorCv = xef.LoadCvColorFrame(0); //Find and draw (make sure it can be found) var markers = Vision.FindAruco(colorCv); //Vision.DrawAruco(colorCv).Show(); //Calculate pose var _3dImage = xef.LoadCVCameraSpace(5); var kxTransform = Vision.GetPoseFromImage(cube, _3dImage, markers); return(kxTransform); }
public static void Run(string[] args) { //Create a defined registration pattern - in this case a cube var cube = CoordinateDefinition.Microcube(); //Find registration var xef = new Xef(@"../../../Resources/cube.xef"); var colorCv = xef.LoadCvColorFrame(0); //Find and draw (make sure it can be found) var markers = Vision.FindAruco(colorCv); //Vision.DrawAruco(colorCv).Show(); //Calculate pose var _3dImage = xef.LoadCVCameraSpace(5); var kxTransform = Vision.GetPoseFromImage(cube, _3dImage, markers); var pose = kxTransform.FusionCameraPose.ToMatrix4(); var fusion = new Engine(); FusionVolume.VoxelsPerMeter = 128; FusionVolume.VoxelsX = 384; FusionVolume.VoxelsY = 384; FusionVolume.VoxelsZ = 384; //Start fusion volume at first pose fusion.InitializeFusionVolume(pose); VolumeResetter.TranslateResetPoseByMinDepthThreshold = false; fusion.DataIntegrator.CaptureColor = false; var listener = fusion.StartFrameListener <XefFrameListener>(); //This would be where you would set your scan xef listener.SetXefFile(@"../../../Resources/cube.xef"); //You need to set world to camera BEFORE scanning (if more than one XEF) fusion.FusionVolume.WorldToCameraTransform = pose; fusion.Scanner.Scan(3, false); fusion.RenderController.RenderReconstructionAsMat(); fusion.FusionVolume.Renderer.RenderReconstruction(); //Export your model in world space (it is transformed already) fusion.MeshExporter.ExportVolume(@"cube.ply"); Console.Read(); }
public bool RecordXef(TimeSpan duration, string path) { try { _logger.Log(LogLevel.Info, $"Requested xef recording - {duration.TotalMilliseconds} ms."); if (File.Exists(path)) { File.Delete(path); } Xef.Record(path, duration); Thread.Sleep((int)duration.Add(TimeSpan.FromSeconds(2)).TotalMilliseconds); _logger.Log(LogLevel.Info, $"Recording successful. Bytes save to - {path}"); return(true); } catch (Exception e) { _logger.Log(LogLevel.Error, e.ToString()); return(false); } }
public bool RecordXef(TimeSpan duration) { try { _logger.Log(LogLevel.Info, $"Requested xef recording - {duration.TotalMilliseconds} ms."); var current = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location); _lastFilePath = Path.Combine(current, "record.xef"); if (File.Exists(_lastFilePath)) { File.Delete(_lastFilePath); } Xef.Record(_lastFilePath, duration); Thread.Sleep((int)duration.Add(TimeSpan.FromSeconds(2)).TotalMilliseconds); _logger.Log(LogLevel.Info, $"Recording successful. Sending bytes from xef recording - {_lastFilePath}"); return(true); } catch (Exception e) { _logger.Log(LogLevel.Error, e.ToString()); return(false); } }