private Point3fImage CalcDirections(InverseBrownConradyParams intrinsics, int width, int height) { Point3fImage directions = new Point3fImage(width, height); for (int row = 0; row < height; row++) { float yp = (intrinsics.Cy - row) / intrinsics.Fy; float yp2 = yp * yp; for (int col = 0; col < width; col++) { float xp = (intrinsics.Cx - col) / intrinsics.Fx; // correct the camera distortion float r2 = xp * xp + yp2; float r4 = r2 * r2; float k = 1 + intrinsics.K1 * r2 + intrinsics.K2 * r4; // Undistorted direction vector of the point float x = xp * k; float y = yp * k; float s0Inv = (float)(1 / Math.Sqrt(x * x + y * y + 1)); //z is 1, since x and y are coordinates on normalized image plane. directions[row, col] = new Point3f(-x * s0Inv, -y * s0Inv, s0Inv); } } return(directions); }
private void UpdateLoop() { int consecutiveFailCounter = 0; while (!_cancelUpdateThreadSource.Token.IsCancellationRequested) { try { // 0 - CameraModel // 1 - distance Image // 2 - intensity Image SyncCola(); uint jsonSize = ReceiveDataSize(); _backJsonData = ReceiveJsonString(jsonSize); if (null == _directions) { string json = new string(_backJsonData); var frameData = JsonConvert.DeserializeObject <List <CameraObject> >(json); _imageWidth = frameData[1].Data.Data.Width; _imageHeight = frameData[1].Data.Data.Height; InverseBrownConradyParams intrinsics = ParseIntrinsics(frameData[0]); _directions = CalcDirections(intrinsics, _imageWidth, _imageHeight); //Determine the offsets of intensity and distance image, which are stable over time. string needle = "\"data\":\""; int start_first_img = json.IndexOf(needle, 0) + needle.Length; int end_first_img = json.IndexOf("\"", start_first_img); _distanceJsonOffset = start_first_img; _distanceJsonSize = end_first_img - start_first_img; int start_second_img = json.IndexOf(needle, end_first_img) + needle.Length; int end_second_img = json.IndexOf("\"", start_second_img); _intensityJsonOffset = start_second_img; _intensityJsonSize = end_second_img - start_second_img; if ("uint16" != frameData[1].Data.Data.ImageType || "uint16" != frameData[2].Data.Data.ImageType) { string format = frameData[1].Data.Data.ImageType != "uint16" ? frameData[1].Data.Data.ImageType : frameData[2].Data.Data.ImageType; string msg = $"{Name}: Frame data has unexpected format: '{format}', expected: 'uint16'"; log.Error(msg); throw new ImageAcquisitionFailedException(msg); } if ("little" != frameData[1].Data.Data.Pixels.endian || "little" != frameData[2].Data.Data.Pixels.endian) { string endian = frameData[1].Data.Data.Pixels.endian != "little" ? frameData[1].Data.Data.Pixels.endian : frameData[2].Data.Data.Pixels.endian; string msg = $"{Name}: Frame data has unexpected endian: '{endian}', expected: 'little'"; log.Error(msg); throw new ImageAcquisitionFailedException(msg); } } _frameAvailable.Set(); } catch (Exception e) { consecutiveFailCounter++; if (consecutiveFailCounter > NumFrameRetries) { string msg = $"{Name}: Receive failed more than {NumFrameRetries} times in a row. Shutting down update loop."; log.Error(msg); log.Error(e.Message); _updateThreadError = msg; _updateThreadException = e; _frameAvailable.Set(); break; } } // reset counter after sucessfull fetch consecutiveFailCounter = 0; } }