public void SetData(cv.Mat input) { //if (xSize != -1 || zSize != -1) //{ // surfaceMeshRenderableSeries.DataSeries.Clear(); //} zMap = new cv.Mat(new cv.Size(input.Width, input.Height), cv.MatType.CV_32FC1); input.ConvertTo(zMap, cv.MatType.CV_32FC1); //Init(zMap.Rows, zMap.Cols); Init(zMap.Rows, zMap.Cols); Parallel.For(0, xSize, x => { for (int z = 0; z < zSize; ++z) { MeshDataSeries[z, x] = zMap.At <float>(x, z); if (yMax < zMap.Get <float>(x, z)) { yMax = zMap.Get <float>(x, z); } } }); double min = 0.0f, max = 0.0f; zMap.MinMaxLoc(out min, out max); surfaceMeshRenderableSeries.Maximum = max; surfaceMeshRenderableSeries.Minimum = min; //backgroundSurfaceMesh.IsVisible = false; }
static void Main(string[] args) { if (args.Length != 3) { Console.WriteLine("Usage: "); Console.WriteLine(" ZED_SVO_Export A B C "); Console.WriteLine("Please use the following parameters from the command line:"); Console.WriteLine(" A - SVO file path (input) : \"path/to/file.svo\""); Console.WriteLine(" B - AVI file path (output) or image sequence folder(output) : \"path/to/output/file.avi\" or \"path/to/output/folder\""); Console.WriteLine(" C - Export mode: 0=Export LEFT+RIGHT AVI."); Console.WriteLine(" 1=Export LEFT+DEPTH_VIEW AVI."); Console.WriteLine(" 2=Export LEFT+RIGHT image sequence."); Console.WriteLine(" 3=Export LEFT+DEPTH_VIEW image sequence."); Console.WriteLine(" 4=Export LEFT+DEPTH_16Bit image sequence."); Console.WriteLine(" A and B need to end with '/' or '\\'\n\n"); Console.WriteLine("Examples: \n"); Console.WriteLine(" (AVI LEFT+RIGHT) ZED_SVO_Export \"path/to/file.svo\" \"path/to/output/file.avi\" 0"); Console.WriteLine(" (AVI LEFT+DEPTH) ZED_SVO_Export \"path/to/file.svo\" \"path/to/output/file.avi\" 1"); Console.WriteLine(" (SEQUENCE LEFT+RIGHT) ZED_SVO_Export \"path/to/file.svo\" \"path/to/output/folder\" 2"); Console.WriteLine(" (SEQUENCE LEFT+DEPTH) ZED_SVO_Export \"path/to/file.svo\" \"path/to/output/folder\" 3"); Console.WriteLine(" (SEQUENCE LEFT+DEPTH_16Bit) ZED_SVO_Export \"path/to/file.svo\" \"path/to/output/folder\" 4"); Environment.Exit(-1); } string svoInputPath = args[0]; string outputPath = args[1]; bool outputAsVideo = true; APP_TYPE appType = APP_TYPE.LEFT_AND_RIGHT; if (args[2].Equals("1") || args[2].Equals("3")) { appType = APP_TYPE.LEFT_AND_DEPTH; } if (args[2].Equals("4")) { appType = APP_TYPE.LEFT_AND_DEPTH_16; } // Check if exporting to AVI or SEQUENCE if (!args[2].Equals("0") && !args[2].Equals("1")) { outputAsVideo = false; } if (!outputAsVideo && !Directory.Exists(outputPath)) { Console.WriteLine("Input directory doesn't exist. Check permissions or create it. " + outputPath); Environment.Exit(-1); } if (!outputAsVideo && outputPath.Substring(outputPath.Length - 1) != "/" && outputPath.Substring(outputPath.Length - 1) != "\\") { Console.WriteLine("Error: output folder needs to end with '/' or '\\'." + outputPath); Environment.Exit(-1); } // Create ZED Camera Camera zed = new Camera(0); //Specify SVO path parameters InitParameters initParameters = new InitParameters() { inputType = INPUT_TYPE.SVO, pathSVO = svoInputPath, svoRealTimeMode = true, coordinateUnits = UNIT.MILLIMETER }; ERROR_CODE zedOpenState = zed.Open(ref initParameters); if (zedOpenState != ERROR_CODE.SUCCESS) { Environment.Exit(-1); } Resolution imageSize = zed.GetCalibrationParameters().leftCam.resolution; sl.Mat leftImage = new sl.Mat(); leftImage.Create(imageSize, MAT_TYPE.MAT_8U_C4); OpenCvSharp.Mat leftImageOCV = SLMat2CVMat(ref leftImage, MAT_TYPE.MAT_8U_C4); sl.Mat rightImage = new sl.Mat(); rightImage.Create(imageSize, MAT_TYPE.MAT_8U_C4); OpenCvSharp.Mat rightImageOCV = SLMat2CVMat(ref rightImage, MAT_TYPE.MAT_8U_C4); sl.Mat depthImage = new sl.Mat(); depthImage.Create(imageSize, MAT_TYPE.MAT_32F_C1); OpenCvSharp.Mat depthImageOCV = SLMat2CVMat(ref depthImage, MAT_TYPE.MAT_8U_C4); OpenCvSharp.Mat imageSideBySide = new OpenCvSharp.Mat(); if (outputAsVideo) { imageSideBySide = new OpenCvSharp.Mat((int)imageSize.height, (int)imageSize.width * 2, OpenCvSharp.MatType.CV_8UC3); } OpenCvSharp.VideoWriter videoWriter = new OpenCvSharp.VideoWriter(); //Create Video writter if (outputAsVideo) { int fourcc = OpenCvSharp.VideoWriter.FourCC('M', '4', 'S', '2'); // MPEG-4 part 2 codec int frameRate = Math.Max(zed.GetInitParameters().cameraFPS, 25); // Minimum write rate in OpenCV is 25 Console.WriteLine(outputPath); videoWriter.Open(outputPath, fourcc, frameRate, new OpenCvSharp.Size((int)imageSize.width * 2, (int)imageSize.height)); if (!videoWriter.IsOpened()) { Console.WriteLine("Error: OpenCV video writer cannot be opened. Please check the .avi file path and write permissions."); zed.Close(); Environment.Exit(-1); } } RuntimeParameters rtParams = new RuntimeParameters(); rtParams.sensingMode = SENSING_MODE.FILL; // Start SVO conversion to AVI/SEQUENCE Console.WriteLine("Converting SVO... press Q to interupt conversion"); int nbFrames = zed.GetSVONumberOfFrames(); int svoPosition = 0; zed.SetSVOPosition(svoPosition); while (!exit_app) { exit_app = (System.Windows.Input.Keyboard.IsKeyDown(System.Windows.Input.Key.Q) == true); ERROR_CODE err = zed.Grab(ref rtParams); if (err == ERROR_CODE.SUCCESS) { svoPosition = zed.GetSVOPosition(); // Retrieve SVO images zed.RetrieveImage(leftImage, VIEW.LEFT); switch (appType) { case APP_TYPE.LEFT_AND_RIGHT: zed.RetrieveImage(rightImage, VIEW.RIGHT); break; case APP_TYPE.LEFT_AND_DEPTH: zed.RetrieveImage(rightImage, VIEW.DEPTH); break; case APP_TYPE.LEFT_AND_DEPTH_16: zed.RetrieveMeasure(depthImage, MEASURE.DEPTH); break; default: break; } if (outputAsVideo) { // Convert SVO image from RGBA to RGB Cv2.CvtColor(leftImageOCV, imageSideBySide[new OpenCvSharp.Rect(0, 0, (int)imageSize.width, (int)imageSize.height)], ColorConversionCodes.BGRA2BGR); Cv2.CvtColor(rightImageOCV, imageSideBySide[new OpenCvSharp.Rect((int)imageSize.width, 0, (int)imageSize.width, (int)imageSize.height)], ColorConversionCodes.BGRA2BGR); // Write the RGB image in the video videoWriter.Write(imageSideBySide); } else { // Generate filenames string filename1 = ""; filename1 = outputPath + "/left" + svoPosition + ".png"; string filename2 = ""; filename2 = outputPath + (appType == APP_TYPE.LEFT_AND_RIGHT ? "/right" : "/depth") + svoPosition + ".png"; // Save Left images Cv2.ImWrite(filename1, leftImageOCV); //Save depth if (appType != APP_TYPE.LEFT_AND_DEPTH_16) { Cv2.ImWrite(filename2, rightImageOCV); } else { //Convert to 16 bit OpenCvSharp.Mat depth16 = new OpenCvSharp.Mat(); depthImageOCV.ConvertTo(depth16, MatType.CV_16UC1); Cv2.ImWrite(filename2, depth16); } } // Display Progress ProgressBar((float)svoPosition / (float)nbFrames, 30); } else if (zed.GetSVOPosition() >= nbFrames - (zed.GetInitParameters().svoRealTimeMode ? 2 : 1)) { Console.WriteLine("SVO end has been reached. Exiting now."); Environment.Exit(-1); exit_app = true; } else { Console.WriteLine("Grab Error : " + err); exit_app = true; } } if (outputAsVideo) { //Close the video writer videoWriter.Release(); } zed.Close(); }
public void Run() { Mat img = Cv2.ImRead(FilePath.Image.Lenna, ImreadModes.GrayScale); // expand input image to optimal size Mat padded = new Mat(); int m = Cv2.GetOptimalDFTSize(img.Rows); int n = Cv2.GetOptimalDFTSize(img.Cols); // on the border add zero values Cv2.CopyMakeBorder(img, padded, 0, m - img.Rows, 0, n - img.Cols, BorderTypes.Constant, Scalar.All(0)); // Add to the expanded another plane with zeros Mat paddedF32 = new Mat(); padded.ConvertTo(paddedF32, MatType.CV_32F); Mat[] planes = { paddedF32, Mat.Zeros(padded.Size(), MatType.CV_32F) }; Mat complex = new Mat(); Cv2.Merge(planes, complex); // this way the result may fit in the source matrix Mat dft = new Mat(); Cv2.Dft(complex, dft); // compute the magnitude and switch to logarithmic scale // => log(1 + sqrt(Re(DFT(I))^2 + Im(DFT(I))^2)) Mat[] dftPlanes; Cv2.Split(dft, out dftPlanes); // planes[0] = Re(DFT(I), planes[1] = Im(DFT(I)) // planes[0] = magnitude Mat magnitude = new Mat(); Cv2.Magnitude(dftPlanes[0], dftPlanes[1], magnitude); magnitude += Scalar.All(1); // switch to logarithmic scale Cv2.Log(magnitude, magnitude); // crop the spectrum, if it has an odd number of rows or columns Mat spectrum = magnitude[ new Rect(0, 0, magnitude.Cols & -2, magnitude.Rows & -2)]; // rearrange the quadrants of Fourier image so that the origin is at the image center int cx = spectrum.Cols / 2; int cy = spectrum.Rows / 2; Mat q0 = new Mat(spectrum, new Rect(0, 0, cx, cy)); // Top-Left - Create a ROI per quadrant Mat q1 = new Mat(spectrum, new Rect(cx, 0, cx, cy)); // Top-Right Mat q2 = new Mat(spectrum, new Rect(0, cy, cx, cy)); // Bottom-Left Mat q3 = new Mat(spectrum, new Rect(cx, cy, cx, cy)); // Bottom-Right // swap quadrants (Top-Left with Bottom-Right) Mat tmp = new Mat(); q0.CopyTo(tmp); q3.CopyTo(q0); tmp.CopyTo(q3); // swap quadrant (Top-Right with Bottom-Left) q1.CopyTo(tmp); q2.CopyTo(q1); tmp.CopyTo(q2); // Transform the matrix with float values into a Cv2.Normalize(spectrum, spectrum, 0, 1, NormTypes.MinMax); // Show the result Cv2.ImShow("Input Image" , img); Cv2.ImShow("Spectrum Magnitude", spectrum); // calculating the idft Mat inverseTransform = new Mat(); Cv2.Dft(dft, inverseTransform, DftFlags.Inverse | DftFlags.RealOutput); Cv2.Normalize(inverseTransform, inverseTransform, 0, 1, NormTypes.MinMax); Cv2.ImShow("Reconstructed by Inverse DFT", inverseTransform); Cv2.WaitKey(); }