public void run() { GC.Collect(); int opCode = int.Parse(Format.splitMailbox(LMC.mailboxes[LMC.programCounter].ToString(), false)); LMC.checkOp(opCode); Format.consolePrinter(); updateTextBoxes(); LMC.checkZF(); updateForm(); if (LMC.operation == "Branch" || LMC.operation == "Branch if Zero (True)" || LMC.operation == "Branch if Positive (True)" || LMC.operation == "Interrupt Ended") { LMC.programCounter--; } if (LMC.operation != "Halt") { LMC.programCounter++; } if (LMC.operation == "Halt") { MessageBox.Show("LMC execution has ended."); } }
private void BttnInterrupt_Click(object sender, EventArgs e) { LMC.interruptFlag = true; LMC.programCounter = LMC.interruptHandler(); LMC.calculator = 0; while (LMC.interruptFlag) { run(); } LMC.interruptFlag = true; //Turned on just to see it flick to green. updateForm(); LMC.interruptFlag = false; //Get it back to red. }
public void TestCount() { // test 0 nodes added Assert.AreEqual(this.nodeCollection.Count(), 0); // test 1 nodes added var lmc = new LMC(); this.nodeCollection.Add(lmc); Assert.AreEqual(this.nodeCollection.Count(), 1); // test 2 nodes added var milkyWay = new MilkyWay(); this.nodeCollection.Add(milkyWay); Assert.AreEqual(this.nodeCollection.Count(), 2); // adding a follower should not increase collection count var solarSystem = new SolarSystem(); milkyWay.Precedes(solarSystem); Assert.AreEqual(this.nodeCollection.Count(), 2); }
public static void RunSpeedLarge() { var folder = @"C:\dev\GitHub\p9-data\large\fits\meerkat_tiny\"; var data = LMC.Load(folder); int gridSize = 3072; int subgridsize = 32; int kernelSize = 16; int max_nr_timesteps = 1024; double cellSize = 1.5 / 3600.0 * PI / 180.0; int wLayerCount = 24; var maxW = 0.0; for (int i = 0; i < data.uvw.GetLength(0); i++) { for (int j = 0; j < data.uvw.GetLength(1); j++) { maxW = Math.Max(maxW, Math.Abs(data.uvw[i, j, 2])); } } maxW = Partitioner.MetersToLambda(maxW, data.frequencies[data.frequencies.Length - 1]); var visCount2 = 0; for (int i = 0; i < data.flags.GetLength(0); i++) { for (int j = 0; j < data.flags.GetLength(1); j++) { for (int k = 0; k < data.flags.GetLength(2); k++) { if (!data.flags[i, j, k]) { visCount2++; } } } } double wStep = maxW / (wLayerCount); data.c = new GriddingConstants(data.visibilitiesCount, gridSize, subgridsize, kernelSize, max_nr_timesteps, (float)cellSize, wLayerCount, wStep); data.metadata = Partitioner.CreatePartition(data.c, data.uvw, data.frequencies); var psfVis = new Complex[data.uvw.GetLength(0), data.uvw.GetLength(1), data.frequencies.Length]; for (int i = 0; i < data.visibilities.GetLength(0); i++) { for (int j = 0; j < data.visibilities.GetLength(1); j++) { for (int k = 0; k < data.visibilities.GetLength(2); k++) { if (!data.flags[i, j, k]) { psfVis[i, j, k] = new Complex(1.0, 0); } else { psfVis[i, j, k] = new Complex(0, 0); } } } } Console.WriteLine("gridding psf"); var psfGrid = IDG.GridW(data.c, data.metadata, psfVis, data.uvw, data.frequencies); var psf = FFT.WStackIFFTFloat(psfGrid, data.c.VisibilitiesCount); FFT.Shift(psf); Directory.CreateDirectory("PSFSpeedExperimentApproxDeconv"); FitsIO.Write(psf, "psfFull.fits"); //tryout with simply cutting the PSF ReconstructionInfo experimentInfo = null; var psfCuts = new int[] { 2, 8, 16, 32, 64 }; var outFolder = "PSFSpeedExperimentApproxDeconv"; outFolder += @"\"; var fileHeader = "cycle;lambda;sidelobe;maxPixel;dataPenalty;regPenalty;currentRegPenalty;converged;iterCount;ElapsedTime"; /* * foreach (var cut in psfCuts) * { * using (var writer = new StreamWriter(outFolder + cut + "Psf.txt", false)) * { * writer.WriteLine(fileHeader); * experimentInfo = ReconstructSimple(data, psf, outFolder, cut, 8, cut+"dirty", cut+"x", writer, 0.0, 1e-5f, false); * File.WriteAllText(outFolder + cut + "PsfTotal.txt", experimentInfo.totalDeconv.Elapsed.ToString()); * } * }*/ Directory.CreateDirectory("PSFSpeedExperimentApproxPSF"); outFolder = "PSFSpeedExperimentApproxPSF"; outFolder += @"\"; foreach (var cut in psfCuts) { using (var writer = new StreamWriter(outFolder + cut + "Psf.txt", false)) { writer.WriteLine(fileHeader); experimentInfo = ReconstructSimple(data, psf, outFolder, cut, 8, cut + "dirty", cut + "x", writer, 0.0, 1e-5f, true); File.WriteAllText(outFolder + cut + "PsfTotal.txt", experimentInfo.totalDeconv.Elapsed.ToString()); } } }
public static void RunApproximationMethods() { var folder = @"C:\dev\GitHub\p9-data\large\fits\meerkat_tiny\"; var data = LMC.Load(folder); var rootFolder = Directory.GetCurrentDirectory(); var maxW = 0.0; for (int i = 0; i < data.uvw.GetLength(0); i++) { for (int j = 0; j < data.uvw.GetLength(1); j++) { maxW = Math.Max(maxW, Math.Abs(data.uvw[i, j, 2])); } } maxW = Partitioner.MetersToLambda(maxW, data.frequencies[data.frequencies.Length - 1]); var visCount2 = 0; for (int i = 0; i < data.flags.GetLength(0); i++) { for (int j = 0; j < data.flags.GetLength(1); j++) { for (int k = 0; k < data.flags.GetLength(2); k++) { if (!data.flags[i, j, k]) { visCount2++; } } } } var visibilitiesCount = visCount2; int gridSize = 3072; int subgridsize = 32; int kernelSize = 16; int max_nr_timesteps = 1024; double cellSize = 1.5 / 3600.0 * PI / 180.0; int wLayerCount = 24; double wStep = maxW / (wLayerCount); data.c = new GriddingConstants(visibilitiesCount, gridSize, subgridsize, kernelSize, max_nr_timesteps, (float)cellSize, wLayerCount, wStep); data.metadata = Partitioner.CreatePartition(data.c, data.uvw, data.frequencies); data.visibilitiesCount = visibilitiesCount; var psfVis = new Complex[data.uvw.GetLength(0), data.uvw.GetLength(1), data.frequencies.Length]; for (int i = 0; i < data.visibilities.GetLength(0); i++) { for (int j = 0; j < data.visibilities.GetLength(1); j++) { for (int k = 0; k < data.visibilities.GetLength(2); k++) { if (!data.flags[i, j, k]) { psfVis[i, j, k] = new Complex(1.0, 0); } else { psfVis[i, j, k] = new Complex(0, 0); } } } } Console.WriteLine("gridding psf"); var psfGrid = IDG.GridW(data.c, data.metadata, psfVis, data.uvw, data.frequencies); var psf = FFT.WStackIFFTFloat(psfGrid, data.c.VisibilitiesCount); FFT.Shift(psf); Directory.CreateDirectory("PSFSizeExperimentLarge"); Directory.SetCurrentDirectory("PSFSizeExperimentLarge"); FitsIO.Write(psf, "psfFull.fits"); Console.WriteLine(PSF.CalcMaxLipschitz(psf)); Console.WriteLine(visCount2); //reconstruct with full psf and find reference objective value var fileHeader = "cycle;lambda;sidelobe;maxPixel;dataPenalty;regPenalty;currentRegPenalty;converged;iterCount;ElapsedTime"; var objectiveCutoff = REFERENCE_L2_PENALTY + REFERENCE_ELASTIC_PENALTY; var recalculateFullPSF = true; if (recalculateFullPSF) { ReconstructionInfo referenceInfo = null; using (var writer = new StreamWriter("1Psf.txt", false)) { writer.WriteLine(fileHeader); referenceInfo = ReconstructSimple(data, psf, "", 1, 12, "dirtyReference", "xReference", writer, 0.0, 1e-5f, false); File.WriteAllText("1PsfTotal.txt", referenceInfo.totalDeconv.Elapsed.ToString()); } objectiveCutoff = referenceInfo.lastDataPenalty + referenceInfo.lastRegPenalty; } //tryout with simply cutting the PSF ReconstructionInfo experimentInfo = null; var psfCuts = new int[] { 16, 32 }; var outFolder = "cutPsf"; Directory.CreateDirectory(outFolder); outFolder += @"\"; foreach (var cut in psfCuts) { using (var writer = new StreamWriter(outFolder + cut + "Psf.txt", false)) { writer.WriteLine(fileHeader); experimentInfo = ReconstructSimple(data, psf, outFolder, cut, 12, cut + "dirty", cut + "x", writer, 0.0, 1e-5f, false); File.WriteAllText(outFolder + cut + "PsfTotal.txt", experimentInfo.totalDeconv.Elapsed.ToString()); } } //Tryout with cutting the PSF, but starting from the true bMap outFolder = "cutPsf2"; Directory.CreateDirectory(outFolder); outFolder += @"\"; foreach (var cut in psfCuts) { using (var writer = new StreamWriter(outFolder + cut + "Psf.txt", false)) { writer.WriteLine(fileHeader); experimentInfo = ReconstructSimple(data, psf, outFolder, cut, 12, cut + "dirty", cut + "x", writer, 0.0, 1e-5f, true); File.WriteAllText(outFolder + cut + "PsfTotal.txt", experimentInfo.totalDeconv.Elapsed.ToString()); } } //combined, final solution. Cut the psf in half, optimize until convergence, and then do one more major cycle with the second method outFolder = "properSolution"; Directory.CreateDirectory(outFolder); outFolder += @"\"; foreach (var cut in psfCuts) { using (var writer = new StreamWriter(outFolder + cut + "Psf.txt", false)) { writer.WriteLine(fileHeader); experimentInfo = ReconstructGradientApprox(data, psf, outFolder, cut, 12, cut + "dirty", cut + "x", writer, 0.0, 1e-5f); File.WriteAllText(outFolder + cut + "PsfTotal.txt", experimentInfo.totalDeconv.Elapsed.ToString()); } } Directory.SetCurrentDirectory(rootFolder); }
protected override void Init() { LMC.BuildCachedMessages(); RectConfig.Popup = new Rect(ScreenSize.x / 2 + 10, ScreenSize.y / 2 - 260, 360, 480); RectConfig.Body = new Rect(20, 30, 320, 420); }
public static void Run() { var folder = @"C:\dev\GitHub\p9-data\large\fits\meerkat_tiny\"; var data = LMC.Load(folder); var rootFolder = Directory.GetCurrentDirectory(); var maxW = 0.0; for (int i = 0; i < data.uvw.GetLength(0); i++) { for (int j = 0; j < data.uvw.GetLength(1); j++) { maxW = Math.Max(maxW, Math.Abs(data.uvw[i, j, 2])); } } maxW = Partitioner.MetersToLambda(maxW, data.frequencies[data.frequencies.Length - 1]); var visCount2 = 0; for (int i = 0; i < data.flags.GetLength(0); i++) { for (int j = 0; j < data.flags.GetLength(1); j++) { for (int k = 0; k < data.flags.GetLength(2); k++) { if (!data.flags[i, j, k]) { visCount2++; } } } } var visibilitiesCount = visCount2; int gridSize = 2048; int subgridsize = 32; int kernelSize = 16; int max_nr_timesteps = 1024; double cellSize = 2.0 / 3600.0 * PI / 180.0; int wLayerCount = 32; double wStep = maxW / (wLayerCount); data.c = new GriddingConstants(visibilitiesCount, gridSize, subgridsize, kernelSize, max_nr_timesteps, (float)cellSize, wLayerCount, wStep); data.metadata = Partitioner.CreatePartition(data.c, data.uvw, data.frequencies); data.visibilitiesCount = visibilitiesCount; var psfVis = new Complex[data.uvw.GetLength(0), data.uvw.GetLength(1), data.frequencies.Length]; for (int i = 0; i < data.visibilities.GetLength(0); i++) { for (int j = 0; j < data.visibilities.GetLength(1); j++) { for (int k = 0; k < data.visibilities.GetLength(2); k++) { if (!data.flags[i, j, k]) { psfVis[i, j, k] = new Complex(1.0, 0); } else { psfVis[i, j, k] = new Complex(0, 0); } } } } Console.WriteLine("gridding psf"); var psfGrid = IDG.GridW(data.c, data.metadata, psfVis, data.uvw, data.frequencies); var psf = FFT.WStackIFFTFloat(psfGrid, data.c.VisibilitiesCount); FFT.Shift(psf); var objectiveCutoff = REFERENCE_L2_PENALTY + REFERENCE_ELASTIC_PENALTY; var actualLipschitz = (float)PSF.CalcMaxLipschitz(psf); Console.WriteLine("Calc Histogram"); var histPsf = GetHistogram(psf, 256, 0.05f); var experiments = new float[] { 0.5f, /*0.4f, 0.2f, 0.1f, 0.05f*/ }; Console.WriteLine("Done Histogram"); Directory.CreateDirectory("PSFMask"); Directory.SetCurrentDirectory("PSFMask"); FitsIO.Write(psf, "psfFull.fits"); //reconstruct with full psf and find reference objective value ReconstructionInfo experimentInfo = null; var outFolder = ""; var fileHeader = "cycle;lambda;sidelobe;dataPenalty;regPenalty;currentRegPenalty;converged;iterCount;ElapsedTime"; foreach (var maskPercent in experiments) { using (var writer = new StreamWriter(outFolder + maskPercent + "Psf.txt", false)) { var maskedPSF = Common.Copy(psf); var maskedPixels = MaskPSF(maskedPSF, histPsf, maskPercent); writer.WriteLine(maskedPixels + ";" + maskedPixels / (double)maskedPSF.Length); FitsIO.Write(maskedPSF, outFolder + maskPercent + "Psf.fits"); writer.WriteLine(fileHeader); writer.Flush(); experimentInfo = Reconstruct(data, actualLipschitz, maskedPSF, outFolder, 1, 10, maskPercent + "dirty", maskPercent + "x", writer, objectiveCutoff, 1e-5f, false); File.WriteAllText(outFolder + maskPercent + "PsfTotal.txt", experimentInfo.totalDeconv.Elapsed.ToString()); } } Directory.SetCurrentDirectory(rootFolder); }