static void Main(string[] args) { try { if (args.Length < 2) { Console.WriteLine("Usage: DicomSeriesReader <input_directory> <output_file>"); return; } Console.WriteLine("Reading Dicom directory: " + args[0]); ImageSeriesReader reader = new ImageSeriesReader(); VectorString dicom_names = ImageSeriesReader.GetGDCMSeriesFileNames(args[0]); reader.SetFileNames(dicom_names); Image image = reader.Execute(); VectorUInt32 size = image.GetSize(); Console.WriteLine("Image size: " + size[0] + " " + size[1] + " " + size[2]); Console.WriteLine("Writing image: " + args[1]); ImageFileWriter writer = new ImageFileWriter(); writer.SetFileName(args[1]); writer.Execute(image); if (Environment.GetEnvironmentVariable("SITK_NOSHOW") == null) { SimpleITK.Show(image, "Dicom Series"); } } catch (Exception ex) { Console.WriteLine("Usage: DicomSeriesReader <input_directory> <output_file>"); Console.WriteLine(ex); } }
private static int[] AsPixelArray(Image image) { var copy = SimpleITK.Cast(image, PixelIDValueEnum.sitkInt32); var length = Convert.ToInt32(copy.GetNumberOfPixels()); var buffer = copy.GetConstBufferAsInt32(); var bufferAsArray = new int[length * sizeof(int)]; Marshal.Copy(buffer, bufferAsArray, 0, length); return(bufferAsArray); }
/// <summary> /// 将Dicom目录转化为NiFTI格式 /// </summary> /// <param name="dicomPath">Dicom目录</param> /// <returns>转化成功之后的.nii.gz文件</returns> private string ConvetDicom2NiFTI(string dicomPath) { ///TODO 吴昌霖继续完成 SimpleITK.ReadImage(dicomPath); string niFTIFilePath = ""; return(niFTIFilePath); /* private static string[] GetFile(String filepath) * { * string[] file = null; * file = Directory.GetFiles(filepath); * return file; * } * public static void Main(string[] args) * { * var vs = new VectorString(); * String[] dir = GetFile("C:/Users/dell/Desktop/d2n/T1"); * int time = 0; * foreach (String fileName in dir) * { * if (fileName.EndsWith(".dcm")) * { * string str = dir.GetValue(time).ToString(); * vs.Add(str); * Console.WriteLine(str + "已经添加"); * time++; * * } * else { * time++; * } * } * time = 0; ; * ImageSeriesReader isr = new ImageSeriesReader(); * isr.SetFileNames(vs); * //String strr = vs.ToString(); * //Console.WriteLine(strr); * Image img = SimpleITK.ReadImage(vs); * //ImageSeriesWriter isw = new ImageSeriesWriter(); * // isw.SetFileNames(vs); * //bool isTransport = isw.GetUseCompression(); * //Console.WriteLine(isTransport); * //Console.ReadLine(); * ImageFileWriter isw = new ImageFileWriter(); * // * isw.SetFileName("C:/Users/dell/Desktop/d2n/dcm2nii.nii.gz"); * isw.Execute(img); * * } * } * } */ }
static void Main(string[] args) { try { // Create an image PixelIDValueEnum pixelType = PixelIDValueEnum.sitkUInt8; VectorUInt32 imageSize = new VectorUInt32(new uint[] { 128, 128 }); Image image = new Image(imageSize, pixelType); // Create a face image VectorDouble faceSize = new VectorDouble(new double[] { 64, 64 }); VectorDouble faceCenter = new VectorDouble(new double[] { 64, 64 }); Image face = SimpleITK.GaussianSource(pixelType, imageSize, faceSize, faceCenter); // Create eye images VectorDouble eyeSize = new VectorDouble(new double[] { 5, 5 }); VectorDouble eye1Center = new VectorDouble(new double[] { 48, 48 }); VectorDouble eye2Center = new VectorDouble(new double[] { 80, 48 }); Image eye1 = SimpleITK.GaussianSource(pixelType, imageSize, eyeSize, eye1Center, 150); Image eye2 = SimpleITK.GaussianSource(pixelType, imageSize, eyeSize, eye2Center, 150); // Apply the eyes to the face face = SimpleITK.Subtract(face, eye1); face = SimpleITK.Subtract(face, eye2); face = SimpleITK.BinaryThreshold(face, 200, 255, 255); // Create the mouth VectorDouble mouthRadii = new VectorDouble(new double[] { 30, 20 }); VectorDouble mouthCenter = new VectorDouble(new double[] { 64, 76 }); Image mouth = SimpleITK.GaussianSource(pixelType, imageSize, mouthRadii, mouthCenter); mouth = SimpleITK.BinaryThreshold(mouth, 200, 255, 255); mouth = SimpleITK.Subtract(255, mouth); // Paste the mouth onto the face VectorUInt32 mouthSize = new VectorUInt32(new uint[] { 64, 18 }); VectorInt32 mouthLoc = new VectorInt32(new int[] { 32, 76 }); face = SimpleITK.Paste(face, mouth, mouthSize, mouthLoc, mouthLoc); // Apply the face to the original image image = SimpleITK.Add(image, face); // Display the results if (Environment.GetEnvironmentVariable("SITK_NOSHOW") == null) { SimpleITK.Show(image, "Hello World: CSharp", true); } } catch (Exception ex) { Console.WriteLine(ex); } }
static void Main(string[] args) { if (args.Length < 3) { Console.WriteLine("Usage: %s <fixedImageFilter> <movingImageFile> <outputTransformFile>\n", "ImageRegistrationMethod2"); return; } ImageFileReader reader = new ImageFileReader(); reader.SetOutputPixelType(PixelIDValueEnum.sitkFloat32); reader.SetFileName(args[0]); Image fixedImage = reader.Execute(); fixedImage = SimpleITK.Normalize(fixedImage); SimpleITK.DiscreteGaussian(fixedImage, 2.0); reader.SetFileName(args[1]); Image movingImage = reader.Execute(); movingImage = SimpleITK.Normalize(movingImage); movingImage = SimpleITK.DiscreteGaussian(movingImage, 2.0); ImageRegistrationMethod R = new ImageRegistrationMethod(); R.SetMetricAsJointHistogramMutualInformation(); double learningRate = 1; uint numberOfIterations = 200; double convergenceMinimumValue = 1e-4; uint convergenceWindowSize = 5; R.SetOptimizerAsGradientDescentLineSearch(learningRate, numberOfIterations, convergenceMinimumValue, convergenceWindowSize); R.SetInitialTransform(new TranslationTransform(fixedImage.GetDimension())); R.SetInterpolator(InterpolatorEnum.sitkLinear); IterationUpdate cmd = new IterationUpdate(R); R.AddCommand(EventEnum.sitkIterationEvent, cmd); Transform outTx = R.Execute(fixedImage, movingImage); outTx.WriteTransform(args[2]); }
public Bitmap konwertujObraz(Image obraz1, int przekroj = 0) { uint r = obraz1.GetWidth(); // VectorUInt32 w = new VectorUInt32(new[] { r, 512, 4 + 1 }); VectorInt32 start = new VectorInt32(new[] { 0, 0, 0 }); VectorInt32 size1 = new VectorInt32(new[] { 512, 512, 1 }); obraz1 = WybierzPrzekroj(obraz1, przekroj); IntensityWindowingImageFilter normalize = new IntensityWindowingImageFilter(); normalize.SetOutputMinimum(0); normalize.SetOutputMaximum(255); obraz1 = normalize.Execute(obraz1); PixelIDValueEnum u = PixelIDValueEnum.sitkFloat32; int len = 1; Image input = SimpleITK.Cast(obraz1, u); VectorUInt32 size = input.GetSize(); for (int dim = 0; dim < input.GetDimension(); dim++) { len *= (int)size[dim]; } IntPtr buffer = input.GetBufferAsFloat(); float bufferPtr = (float)buffer.ToInt32(); float[] bufferAsArray = new float[len]; float[,] newData = new float[size[0], size[1]]; Marshal.Copy(buffer, bufferAsArray, 0, len); obrazBitmap = new Bitmap(Convert.ToInt32(size[0]), Convert.ToInt32(size[1])); for (int j = 0; j < size[1]; j++) { for (int i = 0; i < size[0]; i++) { var bur = bufferAsArray[j * size[1] + i]; System.Drawing.Color newColor = System.Drawing.Color.FromArgb((int)bur, 0, 0, 0); obrazBitmap.SetPixel(j, i, newColor); } } Color s = obrazBitmap.GetPixel(34, 56); return(obrazBitmap); }
private Color[] LoadNIFTIFile() { Color[] colors; Image input = SimpleITK.ReadImage("/home/josher/Dropbox/UnityResources/2016.11.29.Brain.Tumor.in.Unity3D/Data/MRI_After_Surgery/AX_T1_3D_MPRAGE_NAVIGATION.nii.gz"); MinimumMaximumImageFilter filter = new MinimumMaximumImageFilter(); filter.Execute(input); uint max = (uint)filter.GetMaximum(); uint min = (uint)filter.GetMinimum(); size[0] = input.GetWidth(); size[1] = input.GetHeight(); size[2] = input.GetDepth(); size2p [0] = ClosestPowOfTwo(size [0]); size2p [1] = ClosestPowOfTwo(size [1]); size2p [2] = ClosestPowOfTwo(size [2]); //data = new float[size [0], size [1], size [2]]; colors = new Color[size2p[0] * size2p[1] * size2p[2]]; Color color = Color.black; for (uint x = 0; x < size2p[0]; x++) { for (uint y = 0; y < size2p[1]; y++) { for (uint z = 0; z < size2p[2]; z++) { float temp = (float)input.GetPixelAsInt16(new VectorUInt32(new uint[] { x *size[0] / size2p[0], y *size[1] / size2p[1], z *size[2] / size2p[2] })); // if(temp ==-1.0f) { // temp=1100.0f; // } //data [x, y, z] = (temp-min)/(max-min); color.a = (temp - min) / (max - min); colors[x + y * size2p[0] + z * size2p[0] * size2p[1]] = color; } } } return(colors); }
public static int[] fGetDatasetLength() { Class.manageImg managerView = HttpContext.Current.Session["managerView"] as Class.manageImg; String sPath = managerView.sCurrentImageFolder[0]; String[] sFilenames; if (sPath == null) { throw new ArgumentException("path variable in manageImg is not specified!"); } else { sFilenames = (System.IO.Directory.EnumerateFiles(sPath, "*.*", SearchOption.AllDirectories).Where(s => Constant.lsExtensions.Any(e => s.EndsWith(e)))).ToArray(); int[] iSize = new int[] { 0, 0, 0 }; // get dataset length and set hiddenfield itk.simple.Image itkImage = SimpleITK.ReadImage(sFilenames[0]); // Es wird nur das erste Listenelement ausgelesen? VectorUInt32 UInt32Size = itkImage.GetSize(); iSize[0] = unchecked ((int)UInt32Size[0]); iSize[1] = unchecked ((int)UInt32Size[1]); iSize[2] = unchecked ((int)UInt32Size[2]); return(iSize); } }
static void Main(string[] args) { if (args.Length < 1) { Console.WriteLine("Usage: SimpleGaussian <input>"); return; } // Read input image itk.simple.Image input = SimpleITK.ReadImage(args[0]); // Cast to we know the the pixel type input = SimpleITK.Cast(input, PixelId.sitkFloat32); // calculate the number of pixels VectorUInt32 size = input.GetSize(); int len = 1; for (int dim = 0; dim < input.GetDimension(); dim++) { len *= (int)size[dim]; } IntPtr buffer = input.GetBufferAsFloat(); // Note: C# also has a GetConstBufferAs... methods which do not // implicitly call MakeUnique. // There are two ways to access the buffer: // (1) Access the underlying buffer as a pointer in an "unsafe" block // (note that in C# "unsafe" simply means that the compiler can not // perform full type checking), and requires the -unsafe compiler flag // unsafe { // float* bufferPtr = (float*)buffer.ToPointer(); // // Now the byte pointer can be accessed as per Brad's email // // (of course this example is only a 2d single channel image): // // This is a 1-D array but can be access as a 3-D. Given an // // image of size [xS,yS,zS], you can access the image at // // index [x,y,z] as you wish by image[x+y*xS+z*xS*yS], // // so x is the fastest axis and z is the slowest. // for (int j = 0; j < size[1]; j++) { // for (int i = 0; i < size[0]; i++) { // float pixel = bufferPtr[i + j*size[1]]; // // Do something with pixel here // } // } // } // (2) Copy the buffer to a "safe" array (i.e. a fully typed array) // (note that this means memory is duplicated) float[] bufferAsArray = new float[len]; // Allocates new memory the size of input Marshal.Copy(buffer, bufferAsArray, 0, len); double total = 0.0; for (int j = 0; j < size[1]; j++) { for (int i = 0; i < size[0]; i++) { float pixel = bufferAsArray[i + j * size[1]]; total += pixel; } } Console.WriteLine("Pixel value total: {0}", total); }
// Use this for initialization void Start() { Image input = SimpleITK.ReadImage("/home/josher/UnityProjects/Read_Niftis/Assets/Ax_SWI.nii.gz"); }
/*! Loads a single file and creates an array of colors from the pixels. * The array of colors can later be used to generate a texture, see getTexture2D() */ private void loadImageData(int slice) { VectorString fileNames = seriesInfo.filenames; // Read the DICOM image: Image image = SimpleITK.ReadImage(fileNames[slice]); origTexWidth = (int)image.GetWidth(); origTexHeight = (int)image.GetHeight(); //int origTexDepth = (int)image.GetDepth (); texWidth = Mathf.NextPowerOfTwo((int)image.GetWidth()); texHeight = Mathf.NextPowerOfTwo((int)image.GetHeight()); texDepth = 1; colors = new Color32[texWidth * texHeight]; int intercept = 0; int slope = 1; try { intercept = Int32.Parse(image.GetMetaData("0028|1052")); slope = Int32.Parse(image.GetMetaData("0028|1053")); } catch { } if (image.GetDimension() != 2 && image.GetDimension() != 3) { throw(new System.Exception("Only 2D and 3D images are currently supported. Dimensions of image: " + image.GetDimension())); } Int64 min = int.MaxValue; Int64 max = int.MinValue; // Copy the image into a colors array: IntPtr bufferPtr; UInt32 numberOfPixels = image.GetWidth() * image.GetHeight(); if (image.GetPixelID() == PixelIDValueEnum.sitkUInt16) { bufferPtr = image.GetBufferAsUInt16(); Int16[] colorsTmp = new Int16[numberOfPixels]; Marshal.Copy(bufferPtr, colorsTmp, 0, (int)numberOfPixels); int index = 0; //for (UInt32 z = 0; z < texDepth; z++) { for (UInt32 y = 0; y < texHeight; y++) { for (UInt32 x = 0; x < texWidth; x++) { if (x < origTexWidth && y < origTexHeight) // && z < origTexDepth ) { UInt16 pixelValue = (UInt16)((colorsTmp [index] - intercept) / slope); colors [x + y * texWidth] = F2C(pixelValue); if (pixelValue > max) { max = pixelValue; } if (pixelValue < min) { min = pixelValue; } index++; } } } } else if (image.GetPixelID() == PixelIDValueEnum.sitkInt16) { bufferPtr = image.GetBufferAsInt16(); Int16[] colorsTmp = new Int16[numberOfPixels]; Marshal.Copy(bufferPtr, colorsTmp, 0, (int)numberOfPixels); int index = 0; //for (UInt32 z = 0; z < texDepth; z++) { for (UInt32 y = 0; y < texHeight; y++) { for (UInt32 x = 0; x < texWidth; x++) { if (x < origTexWidth && y < origTexHeight) // && z < origTexDepth ) { UInt16 pixelValue = (UInt16)((colorsTmp [index] - intercept) / slope); colors [x + y * texWidth] = F2C(pixelValue); if (pixelValue > max) { max = pixelValue; } if (pixelValue < min) { min = pixelValue; } index++; } } } } else if (image.GetPixelID() == PixelIDValueEnum.sitkInt32) { bufferPtr = image.GetBufferAsInt32(); Int32[] colorsTmp = new Int32[numberOfPixels]; Marshal.Copy(bufferPtr, colorsTmp, 0, (int)numberOfPixels); int index = 0; //for (UInt32 z = 0; z < texDepth; z++) { for (UInt32 y = 0; y < texHeight; y++) { for (UInt32 x = 0; x < texWidth; x++) { if (x < origTexWidth && y < origTexHeight) // && z < origTexDepth ) { UInt32 pixelValue = (UInt32)((colorsTmp [index] - intercept) / slope); colors [x + y * texWidth] = F2C(pixelValue); if (pixelValue > max) { max = (Int64)pixelValue; } if (pixelValue < min) { min = (Int64)pixelValue; } index++; } } } } else { throw(new System.Exception("Unsupported pixel format: " + image.GetPixelID())); } // If the DICOM header did not contain info about the minimum/maximum values and no one // has manually set them yet, set the min/max values found for this slice: if (!seriesInfo.foundMinMaxPixelValues) { seriesInfo.setMinMaxPixelValues((int)min, (int)max); } // Make the loaded image accessable from elsewhere: this.image = image; }
static void Main(string[] args) { if (args.Length < 9) { Console.WriteLine("Missing Parameters "); Console.WriteLine("Usage: " + System.AppDomain.CurrentDomain.FriendlyName + " inputImage outputImage seedX seedY " + " Sigma SigmoidAlpha SigmoidBeta TimeThreshold"); return; } string inputFilename = args[0]; string outputFilename = args[1]; uint[] seedPosition = { Convert.ToUInt32(args[2]), Convert.ToUInt32(args[3]), 0 }; double sigma = double.Parse(args[4], CultureInfo.InvariantCulture); double alpha = double.Parse(args[5], CultureInfo.InvariantCulture);; double beta = double.Parse(args[6], CultureInfo.InvariantCulture); double timeThreshold = double.Parse(args[7], CultureInfo.InvariantCulture); double stoppingTime = double.Parse(args[8], CultureInfo.InvariantCulture); // Read input image SitkImage inputImage = SimpleITK.ReadImage(inputFilename, PixelIDValueEnum.sitkFloat32); // The input image will be processed with a few iterations of // feature-preserving diffusion. We create a filter and set the // appropriate parameters. CurvatureAnisotropicDiffusionImageFilter smoothing = new CurvatureAnisotropicDiffusionImageFilter(); smoothing.SetTimeStep(0.125); smoothing.SetNumberOfIterations(5); smoothing.SetConductanceParameter(9.0); SitkImage smoothingOutput = smoothing.Execute(inputImage); SitkImage gradientMagnitudeOutput = SimpleITK.GradientMagnitudeRecursiveGaussian(smoothingOutput, sigma); SitkImage sigmoidOutput = SimpleITK.Sigmoid(gradientMagnitudeOutput, alpha, beta, 1.0, 0.0); FastMarchingImageFilter fastMarching = new FastMarchingImageFilter(); //VectorUIntList trialPoints; Add trialPoints into list if using multiple seeds. Here we only use one seedpoint VectorUInt32 trialPoint = new VectorUInt32(3); trialPoint.Add(seedPosition[0]); trialPoint.Add(seedPosition[1]); trialPoint.Add(seedPosition[2]); fastMarching.AddTrialPoint(trialPoint); // Since the front representing the contour will propagate continuously // over time, it is desirable to stop the process once a certain time has // been reached. This allows us to save computation time under the // assumption that the region of interest has already been computed. The // value for stopping the process is defined with the method // SetStoppingValue(). In principle, the stopping value should be a // little bit higher than the threshold value. fastMarching.SetStoppingValue(stoppingTime); SitkImage fastmarchingOutput = fastMarching.Execute(sigmoidOutput); BinaryThresholdImageFilter thresholder = new BinaryThresholdImageFilter(); thresholder.SetLowerThreshold(0.0); thresholder.SetUpperThreshold(timeThreshold); thresholder.SetOutsideValue(0); thresholder.SetInsideValue(255); SitkImage result = thresholder.Execute(fastmarchingOutput); SimpleITK.WriteImage(result, outputFilename); }
static void Main(string[] args) { try { if (args.Length < 2) { Console.WriteLine("Usage: N4BiasFieldCorrection inputImage outputImage" + " [shrinkFactor] [maskImage] [numberOfIterations]" + " [numberOfFittingLevels]\n"); return; } // Read input image Image inputImage = sitk.ReadImage(args[0], PixelIDValueEnum.sitkFloat32); Image image = inputImage; Image maskImage; if (args.Length > 3) { maskImage = sitk.ReadImage(args[3], PixelIDValueEnum.sitkUInt8); } else { maskImage = sitk.OtsuThreshold(image, 0, 1, 200); } if (args.Length > 2) { uint s = UInt32.Parse(args[2]); uint[] s_array = new uint[image.GetDimension()]; for (uint i = 0; i < image.GetDimension(); i++) { s_array[i] = s; } VectorUInt32 shrink = new VectorUInt32(s_array); image = sitk.Shrink(inputImage, shrink); maskImage = sitk.Shrink(maskImage, shrink); } N4BiasFieldCorrectionImageFilter corrector = new N4BiasFieldCorrectionImageFilter(); uint numFittingLevels = 4; if (args.Length > 5) { numFittingLevels = UInt32.Parse(args[5]); } if (args.Length > 4) { uint it = UInt32.Parse(args[4]); uint[] it_array = new uint[numFittingLevels]; for (uint i = 0; i < numFittingLevels; i++) { it_array[i] = it; } VectorUInt32 iterations = new VectorUInt32(it_array); corrector.SetMaximumNumberOfIterations(iterations); } Image corrected_image = corrector.Execute(image, maskImage); Image log_bias_field = corrector.GetLogBiasFieldAsImage(inputImage); Image bias_field = sitk.Divide(inputImage, sitk.Exp(log_bias_field)); sitk.WriteImage(corrected_image, args[1]); if (Environment.GetEnvironmentVariable("SITK_NOSHOW") == null) { SimpleITK.Show(corrected_image, "N4 Corrected"); } } catch (Exception ex) { Console.WriteLine(ex); } }
public static string getListOfPackageImage(int index) { Class.manageImg managerView = HttpContext.Current.Session["managerView"] as Class.manageImg; managerView.currentPageIndex = index; HttpContext.Current.Session["managerView"] = managerView; ViewImage_ that = HttpContext.Current.Session["viewImage"] as ViewImage_; that.updateReference(index); // set default values managerView.allImages[index][0].getUnlabelled = new int[] { -1, -1 }; managerView.allImages[index][0].getPercentage = "-"; if (managerView.tc.ActiveLearning) // when active learning is enabled, check if threshold is reached { if (DataAccess.DataAccessTestCase.checkThreshold(managerView)) { return("alert('Active learning procedure has been executed, page index will be reset'); setTimeout(function(){ Reset=true; }, 3000);"); } } if (index >= 0 && index <= managerView.iNGroups) { managerView.sCurrentImageFolder = new List <String>(managerView.allImages[index].Count); for (var i = 0; i < managerView.allImages[index].Count; i++) { managerView.sCurrentImageFolder.Add(null); managerView.sCurrentImageFolder[i] = managerView.tc.dbPath + "\\" + managerView.allImages[index][i].Path.Replace('/', '\\'); //- Martin } // get the labelscales for (int i = 0; i < managerView.allImages[index].Count; i++) { if (managerView.tc.DiskreteScale) { managerView.allImages[index][i].LableDiscrete = DataAccessTestCase.GetDiscreteLabel(managerView.IDUser, managerView.allImages[index][i].IdGroupImage);//pcw } else { managerView.allImages[index][i].LableContinuous = DataAccessTestCase.GetLableContinuous(managerView.allImages[index][i].IdGroupImage, managerView.IDUser, managerView.allImages[index][i].TypeScaleContinuous);//pcw } } List <String> sPath = managerView.sCurrentImageFolder; if (sPath == null) { throw new ArgumentException("path variable in manageImg is not specified!"); } else { // get paths and dimension of every individual image for (int i = 0; i < managerView.allImages[index].Count; i++) { managerView.allImages[index][i].imagePaths = (System.IO.Directory.EnumerateFiles(sPath[i], "*.*", SearchOption.AllDirectories).Where(s => Constant.lsExtensions.Any(e => s.EndsWith(e)))).ToArray(); Array.Sort(managerView.allImages[index][i].imagePaths); int[] iSize = new int[] { 0, 0, 0 }; string pfad = managerView.allImages[index][i].imagePaths[0]; itk.simple.Image itkImage = SimpleITK.ReadImage(pfad); // bei DICOM werden nur die Dimensionen des ersten Bildes ausgelesen, diese sind bei den anderen Schichten identisch VectorUInt32 UInt32Size = itkImage.GetSize(); iSize[0] = unchecked ((int)UInt32Size[0]); iSize[1] = unchecked ((int)UInt32Size[1]); if (managerView.allImages[index][i].imagePaths.Length > 1)// for DICOM files { iSize[2] = managerView.allImages[index][i].imagePaths.Length; } else if (managerView.allImages[index][i].imagePaths.Length == 1)// { iSize[2] = unchecked ((int)UInt32Size[2]); iSize[2]--; } managerView.allImages[index][i].imageDimensions = iSize; } } // get index of the next/previous page with not completely labelled images try { managerView.allImages[index][0].getNumOfPages = managerView.allImages.Count(); managerView.allImages[index][0].getUnlabelled = getUnlabelled(index); managerView.allImages[index][0].getPercentage = getCurrPercentage(); }catch (ArgumentOutOfRangeException ex) { return("alert('An error occurred while trying to load the image package, please contact the administrator');"); } //Obfuscate image paths for (int c = 0; c < managerView.allImages[index].Count; c++) { for (int z = 0; z < managerView.allImages[index][c].imagePaths.Length; z++) { managerView.allImages[index][c].imagePaths[z] = obfuscate(managerView.allImages[index][c].imagePaths[z]); } } // put the array in the JSON format var json = new JavaScriptSerializer().Serialize(managerView.allImages[index]); return(json); //return managerView.allImages[index]; } else { return(null); } }
/*! Constructor, fills most of the attributes of the DICOMSeries class. * \note This does some heavy file/directory parsing to determine the files which are part of * this series and their order. This is why the DICOMSeries should be constructed in a * background thread and then passed to the main thread. */ public DICOMSeries(string directory, string seriesUID) { // Get the file names for the series: filenames = ImageSeriesReader.GetGDCMSeriesFileNames(directory, seriesUID); if (filenames.Count <= 0) { throw(new System.Exception("No files found for series " + seriesUID + ".")); } this.seriesUID = seriesUID; // Load the first slice in volume to get meta information: firstSlice = SimpleITK.ReadImage(filenames[0]); VectorDouble o1 = firstSlice.GetOrigin(); if (o1.Count < 3) { throw(new System.Exception("Invalid origins found in first image.")); } origin = new Vector3((float)o1 [0], (float)o1 [1], (float)o1 [2]); numberOfSlices = filenames.Count; // Offset between two adjacent slices. If only one slice is present, // this defaults to zero. Vector3 sliceOffset = Vector3.zero; // If we have more than one slice, also load the last slice to be able to determine the slice spacing: if (filenames.Count > 1) { lastSlice = SimpleITK.ReadImage(filenames[filenames.Count - 1]); // Get the origins of the two images: VectorDouble o2 = lastSlice.GetOrigin(); if (o2.Count < 3) { throw(new System.Exception("Invalid origins found in last image.")); } Vector3 lastOrigin = new Vector3((float)o2 [0], (float)o2 [1], (float)o2 [2]); // Calculate offset between two adjacent slices (assuming all neighbours are the same distance apart): // Note: I expect sliceOffset.x and sliceOffset.y to be zero most of the time. // Using a Vector just for completeness. sliceOffset = (lastOrigin - origin) / (filenames.Count - 1); } // Load the direction cosines: // ITK stores the direction cosines in a matrix with row-major-ordering. The weird indexing is because // we need the first and second column (0,3,6 for X and 1,4,7 for Y) VectorDouble direction = firstSlice.GetDirection(); if (direction.Count < 6) { throw(new System.Exception("Invalid direction cosines found in images.")); } directionCosineX = new Vector3((float)direction [0], (float)direction [3], (float)direction [6]); directionCosineY = new Vector3((float)direction [1], (float)direction [4], (float)direction [7]); sliceNormal = Vector3.Cross(directionCosineX, directionCosineY); // Calculate the which direction the normal is facing to determine the orienation (Transverse, // Coronal or Saggital). float absX = Mathf.Abs(sliceNormal.x); float absY = Mathf.Abs(sliceNormal.y); float absZ = Mathf.Abs(sliceNormal.z); if (absX > absY && absX > absZ) { sliceOrientation = SliceOrientation.Saggital; } else if (absY > absX && absY > absZ) { sliceOrientation = SliceOrientation.Coronal; } else if (absZ > absX && absZ > absY) { sliceOrientation = SliceOrientation.Transverse; } else { sliceOrientation = SliceOrientation.Unknown; } // Load the direction cosines: // NOTE: It seems that the the first value is the spacing between rows (i.e. y direction), // the second value is the spacing between columns (i.e. x direction). // I was not able to verify this so far, since all test dicoms we had have the same spacing in // x and y direction... VectorDouble spacing = firstSlice.GetSpacing(); if (spacing.Count < 2) { throw(new System.Exception("Invalid pixel spacing found in images.")); } pixelSpacing = new Vector2((float)spacing [1], (float)spacing [0]); // Set up the transformation matrix: Matrix4x4 transformMatrix = new Matrix4x4(); // Column 1: transformMatrix [0, 0] = directionCosineX.x * pixelSpacing.x; transformMatrix [1, 0] = directionCosineX.y * pixelSpacing.x; transformMatrix [2, 0] = directionCosineX.z * pixelSpacing.x; transformMatrix [3, 0] = 0f; // Column 2: transformMatrix [0, 1] = directionCosineY.x * pixelSpacing.y; transformMatrix [1, 1] = directionCosineY.y * pixelSpacing.y; transformMatrix [2, 1] = directionCosineY.z * pixelSpacing.y; transformMatrix [3, 1] = 0f; // Column 3: transformMatrix [0, 2] = sliceOffset.x; transformMatrix [1, 2] = sliceOffset.y; transformMatrix [2, 2] = sliceOffset.z; transformMatrix [3, 2] = 0f; // Column 4: transformMatrix [0, 3] = origin.x; transformMatrix [1, 3] = origin.y; transformMatrix [2, 3] = origin.z; transformMatrix [3, 3] = 1f; // Convert to a the left-hand-side coordinate system which Unity uses: Matrix4x4 rightHandToLeftHand = new Matrix4x4(); rightHandToLeftHand [0, 0] = 1f; rightHandToLeftHand [1, 1] = 1f; rightHandToLeftHand [2, 2] = -1f; rightHandToLeftHand [3, 3] = 1f; pixelToPatient = rightHandToLeftHand * transformMatrix; // Inverse transformation: patientToPixel = pixelToPatient.inverse; // Read the minimum and maximum values which are stored in this image: minPixelValue = UInt16.MinValue; maxPixelValue = UInt16.MaxValue; foundMinMaxPixelValues = false; try { minPixelValue = Int32.Parse(firstSlice.GetMetaData("0028|0106")); maxPixelValue = Int32.Parse(firstSlice.GetMetaData("0028|0107")); foundMinMaxPixelValues = true; } catch { } }
static void Main(string[] args) { if (args.Length < 6) { Console.WriteLine("Missing Parameters "); Console.WriteLine("Usage: " + System.AppDomain.CurrentDomain.FriendlyName + " inputImage outputImage" + " lowerThreshold upperThreshold seedX seedY [seed2X seed2Y ... ]"); return; } string inputFilename = args[0]; string outputFilename = args[1]; double lowerThreshold = double.Parse(args[2], CultureInfo.InvariantCulture); double upperThreshold = double.Parse(args[3], CultureInfo.InvariantCulture); // // Read the image // SitkImage inputImage = SimpleITK.ReadImage(inputFilename, PixelIDValueEnum.sitkFloat32); // // Blur using CurvatureFlowImageFilter // CurvatureFlowImageFilter blurFilter = new sitk.CurvatureFlowImageFilter(); blurFilter.SetNumberOfIterations(5); blurFilter.SetTimeStep(0.125); inputImage = blurFilter.Execute(inputImage); // // Set up ConnectedThresholdImageFilter for segmentation // ConnectedThresholdImageFilter segmentationFilter = new sitk.ConnectedThresholdImageFilter(); segmentationFilter.SetLower(lowerThreshold); segmentationFilter.SetUpper(upperThreshold); segmentationFilter.SetReplaceValue(255); for (uint i = 4; i + 1 < args.Length; i += 2) { VectorUInt32 seed = new VectorUInt32(new uint[] { Convert.ToUInt32(args[i]), Convert.ToUInt32(args[i + 1]), 0 }); segmentationFilter.AddSeed(seed); Console.WriteLine("Adding a seed at: "); for (int j = 0; j + 1 < seed.Count; j++) { Console.WriteLine(seed[j] + " "); } } SitkImage outImage = segmentationFilter.Execute(inputImage); // // Write out the resulting file // SimpleITK.WriteImage(outImage, outputFilename); return; }
static void Main(string[] args) { if (args.Length < 1) { Console.WriteLine("Usage: inputImage outputImage"); return; } string inputFilename = args[0]; string outputFilename = args[1]; // Read input image SitkImage input = SimpleITK.ReadImage(inputFilename); // Cast so we know the the pixel type input = SimpleITK.Cast(input, PixelId.sitkFloat32); // calculate the number of pixels VectorUInt32 size = input.GetSize(); int len = 1; for (int dim = 0; dim < input.GetDimension(); dim++) { len *= (int)size[dim]; } IntPtr buffer = input.GetBufferAsFloat(); // There are two ways to access the buffer: // (1) Access the underlying buffer as a pointer in an "unsafe" block // (note that in C# "unsafe" simply means that the compiler can not // perform full type checking), and requires the -unsafe compiler flag // unsafe { // float* bufferPtr = (float*)buffer.ToPointer(); // // Now the byte pointer can be accessed as per Brad's email // // (of course this example is only a 2d single channel image): // // This is a 1-D array but can be access as a 3-D. Given an // // image of size [xS,yS,zS], you can access the image at // // index [x,y,z] as you wish by image[x+y*xS+z*xS*yS], // // so x is the fastest axis and z is the slowest. // for (int j = 0; j < size[1]; j++) { // for (int i = 0; i < size[0]; i++) { // float pixel = bufferPtr[i + j*size[1]]; // // Do something with pixel here // } // } // } // (2) Copy the buffer to a "safe" array (i.e. a fully typed array) // (note that this means memory is duplicated) float[] bufferAsArray = new float[len]; // Allocates new memory the size of input Marshal.Copy(buffer, bufferAsArray, 0, len); double total = 0.0; for (int j = 0; j < size[1]; j++) { for (int i = 0; i < size[0]; i++) { float pixel = bufferAsArray[i + j * size[1]]; total += pixel; } } Console.WriteLine("Pixel value total: {0}", total); // Set buffer of new SimpleITK Image from managed array. // bufferAsArray could also have come from a bmp,png,etc... uint width = input.GetWidth(); uint height = input.GetHeight(); SitkImage outImage = new SitkImage(width, height, PixelId.sitkFloat32); IntPtr outImageBuffer = outImage.GetBufferAsFloat(); Marshal.Copy(bufferAsArray, 0, outImageBuffer, (int)(size[0] * size[1])); // // Write out the resulting file // outImage = SimpleITK.RescaleIntensity(outImage, 0, 255); outImage = SimpleITK.Cast(outImage, PixelId.sitkUInt8); SimpleITK.WriteImage(outImage, outputFilename); }
public void ProcessRequest(HttpContext context) { // image ID, brightness and contrast value Int32 iID, iBVal, iRot, iNoFiles; Double dCVal; String sPath; // response type context.Response.ContentType = "image/png"; // get and convert image parameters if (context.Request.QueryString["NqC3ke"] != null) // ID { iID = Convert.ToInt32(context.Request.QueryString["NqC3ke"]); } else { throw new ArgumentException("No image id specified"); } if (context.Request.QueryString["tXt9X3"] != null) // brightness { iBVal = Convert.ToInt32(context.Request.QueryString["tXt9X3"]); } else { throw new ArgumentException("No image brightness specified"); } if (context.Request.QueryString["XwjRGm"] != null) // contrast { dCVal = Convert.ToDouble(context.Request.QueryString["XwjRGm"]); } else { throw new ArgumentException("No image contrast specified"); } if (context.Request.QueryString["WkYTCe"] != null) // path { string obscuredPath = Convert.ToString(context.Request.QueryString["WkYTCe"]); string unobscuredPath = ""; // old obfuscation //for (int z = 0; z < obscuredPath.Length; z++) //{ // int add = Constant.ceasarOdd; // if (z % 2 == 0) // { // add = Constant.ceasarEven; // } // char c = (char)(obscuredPath[z] - add); // unobscuredPath += c.ToString(); //} // new obfuscation string allowedChars = Constant.allowedCharacters; for (int p = 0; p < obscuredPath.Length; p++) { for (int c = 0; c < allowedChars.Length; c++) { if (obscuredPath[p] == allowedChars[c]) { // character is allowed int add = Constant.ceasarOdd; if (p % 2 == 0) { add = Constant.ceasarEven; } // remove unneccessary loops add = add % (allowedChars.Length - 1); int shift = c - add; if (shift > (allowedChars.Length - 1)) { shift = shift - (allowedChars.Length - 1); } else if ((shift) < 0) { shift = shift + (allowedChars.Length - 1); } char character = allowedChars[shift]; unobscuredPath += character.ToString(); break; } } } //Debug.Print("deobfuscation\t\t" + obscuredPath + " -> " + unobscuredPath); sPath = unobscuredPath; } else { sPath = ""; } if (context.Request.QueryString["Hsfke2"] != null) // rotate { iRot = Convert.ToInt32(context.Request.QueryString["Hsfke2"]); } else { iRot = 0; } if (context.Request.QueryString["yAR8st"] != null) // length { iNoFiles = Convert.ToInt32(context.Request.QueryString["yAR8st"]); } else { iNoFiles = 1; } // load image in byte array "bmVal" //var bOld = false; byte[] bmVal; int iWidth; int iHeight; int iDepth; itk.simple.Image itkImage; if (!File.Exists(sPath)) { // file was not found } try { itkImage = SimpleITK.ReadImage(sPath, PixelIDValueEnum.sitkFloat32); } catch (System.StackOverflowException) { itk.simple.ImageFileReader reader = new itk.simple.ImageFileReader(); reader.SetFileName(sPath); itkImage = reader.Execute(); } // get image direction var iDir = itkImage.GetDirection(); // get spacing VectorDouble dSpacing = itkImage.GetSpacing(); // get size and number of dimension VectorUInt32 iSize = itkImage.GetSize(); int len = 1; for (int iDim = 0; iDim < itkImage.GetDimension(); iDim++) { len *= (int)iSize[iDim]; } iWidth = unchecked ((int)iSize[0]); iHeight = unchecked ((int)iSize[1]); iDepth = unchecked ((int)iSize[2]); // convert mm->dpi float fResolutionX = System.Convert.ToSingle(System.Convert.ToDouble(iWidth) * 25.4 / dSpacing[0]); float fResolutionY = System.Convert.ToSingle(System.Convert.ToDouble(iHeight) * 25.4 / dSpacing[1]); // 2D image size int iLength = iWidth * iHeight; // copy buffer to new array IntPtr ipBuffer = itkImage.GetBufferAsFloat(); float[] fArray = new float[iLength]; // no negative indices if (iID < 0) { iID = 0; } // multiple DICOM files int iOffset = 0; if (iNoFiles > 1) { if (iID >= (iNoFiles - 1)) { iID = iNoFiles - 1; } iOffset = 0; } else if (iNoFiles == 1) { // one DICOM file if (iDepth == 1) { iID = 1; iOffset = 0; } // one 3D MHD file else if (iDepth > 1) { if (iID > (iDepth - 1)) { iID = iDepth - 1; } iOffset = (iID - 1) * iLength; } } // new pointer to data (due to offset) Marshal.Copy(new IntPtr(ipBuffer.ToInt64() + sizeof(float) * iOffset), fArray, 0, iLength); // convert floating point to 8 bit integer values - range:0 - 255 float fMax = fArray.Max(); float fMin = fArray.Min(); bmVal = new byte[iWidth * iHeight]; for (int iI = 0; iI < iLength; iI++) { var tmp = (float)(((fArray[iI] - fMin) / (fMax - fMin) * 255) + iBVal) * (float)dCVal; if (tmp > 255) { tmp = 255; } if (tmp < 0) { tmp = 0; } bmVal[iI] = (byte)tmp; } Bitmap bmOutput = fCreateBitmap(bmVal, new int[] { iWidth, iHeight }, new float[] { fResolutionX, fResolutionY }); // rotate image and flip in X direction due to different image orientation convention // corrects *.mhd orientation List <String> lsExtensions = new List <String> { ".mhd", ".MHD" }; foreach (var ext in lsExtensions) { if (sPath.EndsWith(ext)) { // *.mhd files have a different orientation (compared to *.ima) bmOutput.RotateFlip(RotateFlipType.Rotate90FlipNone); } } switch (iRot) { case 0: bmOutput.RotateFlip(RotateFlipType.RotateNoneFlipX); break; case 90: bmOutput.RotateFlip(RotateFlipType.Rotate90FlipX); break; case 180: bmOutput.RotateFlip(RotateFlipType.Rotate180FlipX); break; case 270: bmOutput.RotateFlip(RotateFlipType.Rotate270FlipX); break; default: break; } // save new image to response stream bmOutput.Save(context.Response.OutputStream, ImageFormat.Png); bmOutput.Dispose(); }
public DICOM2D(DICOMSeries seriesInfo, int slice) : base(seriesInfo) { dimensions = 2; slice = Mathf.Clamp(slice, 0, seriesInfo.filenames.Count - 1); this.slice = slice; VectorString fileNames = seriesInfo.filenames; // Read the DICOM image: image = SimpleITK.ReadImage(fileNames[slice]); loadImageData(image); VectorDouble o1 = image.GetOrigin(); if (o1.Count < 3) { throw(new System.Exception("Invalid origins found in first image.")); } origin = new Vector3((float)o1 [0], (float)o1 [1], (float)o1 [2]); // Load the direction cosines: // ITK stores the direction cosines in a matrix with row-major-ordering. The weird indexing is because // we need the first and second column (0,3,6 for X and 1,4,7 for Y) VectorDouble direction = image.GetDirection(); if (direction.Count < 6) { throw(new System.Exception("Invalid direction cosines found in images.")); } directionCosineX = new Vector3((float)direction [0], (float)direction [3], (float)direction [6]); directionCosineY = new Vector3((float)direction [1], (float)direction [4], (float)direction [7]); sliceNormal = Vector3.Cross(directionCosineX, directionCosineY); // Calculate which direction the normal is facing to determine the orienation (Transverse, // Coronal or Saggital). float absX = Mathf.Abs(sliceNormal.x); float absY = Mathf.Abs(sliceNormal.y); float absZ = Mathf.Abs(sliceNormal.z); if (absX > absY && absX > absZ) { sliceOrientation = SliceOrientation.Saggital; } else if (absY > absX && absY > absZ) { sliceOrientation = SliceOrientation.Coronal; } else if (absZ > absX && absZ > absY) { sliceOrientation = SliceOrientation.Transverse; } else { sliceOrientation = SliceOrientation.Unknown; } // Load the pixel spacing: // NOTE: It seems that the the first value is the spacing between rows (i.e. y direction), // the second value is the spacing between columns (i.e. x direction). // I was not able to verify this so far, since all test dicoms we had have the same spacing in // x and y direction... VectorDouble spacing = image.GetSpacing(); if (spacing.Count < 2) { throw(new System.Exception("Invalid pixel spacing found in images.")); } pixelSpacing = new Vector2((float)spacing [1], (float)spacing [0]); // Generate the transformation matrices which can later be used to translate pixels to // 3D positions and vice versa. setupTransformationMatrices(); }
static void Main(string[] args) { if (args.Length < 8) { Console.WriteLine("Missing Parameters "); Console.WriteLine("Usage: " + System.AppDomain.CurrentDomain.FriendlyName + "inputImage initialModel outputImage cannyThreshold " + "cannyVariance advectionWeight initialModelIsovalue maximumIterations "); return; } string inputFilename = args[0]; string initialModelFilename = args[1]; string outputFilename = args[2]; double cannyThreshold = double.Parse(args[3], CultureInfo.InvariantCulture); double cannyVariance = double.Parse(args[4], CultureInfo.InvariantCulture); double advectionWeight = double.Parse(args[5], CultureInfo.InvariantCulture); double intialModelIsovalue = double.Parse(args[6], CultureInfo.InvariantCulture); uint maxIterations = uint.Parse(args[7], CultureInfo.InvariantCulture); // Read input image SitkImage inputImage = SimpleITK.ReadImage(inputFilename, PixelId.sitkFloat32); SitkImage initialModel = SimpleITK.ReadImage(initialModelFilename, PixelId.sitkFloat32); // The input image will be processed with a few iterations of // feature-preserving diffusion. We create a filter and set the // appropriate parameters. GradientAnisotropicDiffusionImageFilter diffusion = new GradientAnisotropicDiffusionImageFilter(); diffusion.SetConductanceParameter(1.0); diffusion.SetTimeStep(0.125); diffusion.SetNumberOfIterations(5); SitkImage diffusedImage = diffusion.Execute(inputImage); // As with the other ITK level set segmentation filters, the terms of the // CannySegmentationLevelSetImageFilter level set equation can be // weighted by scalars. For this application we will modify the relative // weight of the advection term. The propagation and curvature term weights // are set to their defaults of 0 and 1, respectively. CannySegmentationLevelSetImageFilter cannySegmentation = new CannySegmentationLevelSetImageFilter(); cannySegmentation.SetAdvectionScaling(advectionWeight); cannySegmentation.SetCurvatureScaling(1.0); cannySegmentation.SetPropagationScaling(0.0); // The maximum number of iterations is specified from the command line. // It may not be desirable in some applications to run the filter to // convergence. Only a few iterations may be required. cannySegmentation.SetMaximumRMSError(0.01); cannySegmentation.SetNumberOfIterations(maxIterations); // There are two important parameters in the // CannySegmentationLevelSetImageFilter to control the behavior of the // Canny edge detection. The variance parameter controls the // amount of Gaussian smoothing on the input image. The threshold // parameter indicates the lowest allowed value in the output image. // Thresholding is used to suppress Canny edges whose gradient magnitudes // fall below a certain value. cannySegmentation.SetThreshold(cannyThreshold); cannySegmentation.SetVariance(cannyVariance); // Finally, it is very important to specify the isovalue of the surface in // the initial model input image. In a binary image, for example, the // isosurface is found midway between the foreground and background values. cannySegmentation.SetIsoSurfaceValue(intialModelIsovalue); SitkImage output = cannySegmentation.Execute(initialModel, diffusedImage); BinaryThresholdImageFilter thresholder = new BinaryThresholdImageFilter(); thresholder.SetUpperThreshold(10.0); thresholder.SetLowerThreshold(0.0); thresholder.SetOutsideValue(0); thresholder.SetInsideValue(255); output = thresholder.Execute(output); output = SimpleITK.Cast(output, PixelIDValueEnum.sitkUInt8); SimpleITK.WriteImage(output, outputFilename); // Print out some useful information Console.WriteLine(""); Console.WriteLine("Max. no. iterations: {0}", cannySegmentation.GetNumberOfIterations()); Console.WriteLine("Max. RMS error: {0}", cannySegmentation.GetMaximumRMSError()); Console.WriteLine(""); Console.WriteLine("No. elpased iterations: {0}", cannySegmentation.GetElapsedIterations()); Console.WriteLine("RMS change: {0}", cannySegmentation.GetRMSChange()); }
/*! Constructor, fills most of the attributes of the DICOMSeries class. * \note This does some heavy file/directory parsing to determine the files which are part of * this series and their order. This is why the DICOMSeries should be constructed in a * background thread and then passed to the main thread. */ public DICOMSeries(string directory, string seriesUID) { Debug.Log("Loading Meta Data for Series: " + seriesUID); // Get the file names for the series: filenames = ImageSeriesReader.GetGDCMSeriesFileNames(directory, seriesUID); if (filenames.Count <= 0) { throw(new System.Exception("No files found for series " + seriesUID + ".")); } this.seriesUID = seriesUID; // Load the first slice in volume to get meta information: firstSlice = SimpleITK.ReadImage(filenames[0]); lastSlice = SimpleITK.ReadImage(filenames[Math.Max(filenames.Count - 1, 0)]); numberOfSlices = filenames.Count; // Load the direction cosines: // ITK stores the direction cosines in a matrix with row-major-ordering. The weird indexing is because // we need the first and second column (0,3,6 for X and 1,4,7 for Y) VectorDouble direction = firstSlice.GetDirection(); if (direction.Count < 6) { throw(new System.Exception("Invalid direction cosines found in images.")); } directionCosineX = new Vector3((float)direction [0], (float)direction [3], (float)direction [6]); directionCosineY = new Vector3((float)direction [1], (float)direction [4], (float)direction [7]); sliceNormal = Vector3.Cross(directionCosineX, directionCosineY); if (lastSlice != null) { // Get the origins of the two images: VectorDouble o1 = firstSlice.GetOrigin(); if (o1.Count < 3) { throw(new System.Exception("Invalid origins found in first image.")); } Vector3 origin = new Vector3((float)o1 [0], (float)o1 [1], (float)o1 [2]); VectorDouble o2 = lastSlice.GetOrigin(); if (o2.Count < 3) { throw(new System.Exception("Invalid origins found in last image.")); } Vector3 lastOrigin = new Vector3((float)o2 [0], (float)o2 [1], (float)o2 [2]); // Calculate offset between two adjacent slices (assuming all neighbours are the same distance apart): // Note: I expect sliceOffset.x and sliceOffset.y to be zero most of the time. // Using a Vector just for completeness. sliceOffset = (lastOrigin - origin) / (numberOfSlices - 1); } if (lastSlice != null && numberOfSlices > 1) { // Load the direction cosines: // ITK stores the direction cosines in a matrix with row-major-ordering. The weird indexing is because // we need the first and second column (0,3,6 for X and 1,4,7 for Y) VectorDouble directionLast = lastSlice.GetDirection(); if (directionLast.Count < 6) { throw(new System.Exception("Invalid direction cosines found in images.")); } Vector3 directionCosineXLast = new Vector3((float)directionLast [0], (float)directionLast [3], (float)directionLast [6]); Vector3 directionCosineYLast = new Vector3((float)directionLast [1], (float)directionLast [4], (float)directionLast [7]); Vector3 sliceNormalLast = Vector3.Cross(directionCosineXLast, directionCosineYLast); // If the first and last slice have the same orientation, then consider this series to be a volume. // TODO: Better check? if ((sliceNormal == sliceNormalLast)) { isConsecutiveVolume = true; } else { Debug.LogWarning("First and last slice of the series do not have the same orientation. This will not be considered a volume.\n" + "\tNormal first slice, Normal last slice: " + sliceNormal + " " + sliceNormalLast); } } else { isConsecutiveVolume = false; } if (isConsecutiveVolume) { // Calculate which direction the normal is facing to determine the orienation (Transverse, // Coronal or Saggital). float absX = Mathf.Abs(sliceNormal.x); float absY = Mathf.Abs(sliceNormal.y); float absZ = Mathf.Abs(sliceNormal.z); if (absX > absY && absX > absZ) { sliceOrientation = SliceOrientation.Saggital; } else if (absY > absX && absY > absZ) { sliceOrientation = SliceOrientation.Coronal; } else if (absZ > absX && absZ > absY) { sliceOrientation = SliceOrientation.Transverse; } else { sliceOrientation = SliceOrientation.Unknown; } } else { sliceOrientation = SliceOrientation.Unknown; // Can't know what the orientation is if the first and last slice have different normals } // Read the minimum and maximum values which are stored in this image: minPixelValue = UInt16.MinValue; maxPixelValue = UInt16.MaxValue; foundMinMaxPixelValues = false; try { minPixelValue = UInt32.Parse(firstSlice.GetMetaData("0028|0106")); maxPixelValue = UInt32.Parse(firstSlice.GetMetaData("0028|0107")); foundMinMaxPixelValues = true; } catch { } }