public override void DebugPrint(string Directory, Dictionary <int, Catchment> Catchments) { if (Reduction.Count == 0) { return; } DataTable dt = new DataTable(); dt.Columns.Add("ID15", typeof(int)); dt.Columns.Add("RedFactor", typeof(double)); using (ShapeWriter sw = new ShapeWriter(Path.Combine(Directory, Name + "_factors")) { Projection = MainModel.projection }) { foreach (var kvp in Reduction) { if (Catchments.ContainsKey(kvp.Key)) { GeoRefData gd = new GeoRefData() { Geometry = Catchments[kvp.Key].Geometry }; gd.Data = dt.NewRow(); gd.Data[0] = kvp.Key; gd.Data["RedFactor"] = kvp.Value; sw.Write(gd); } } } }
private void Print(Dictionary <int, Catchment> AllCatchments, string FileNameAttach) { //Get the output coordinate system ProjNet.CoordinateSystems.ICoordinateSystem projection; using (System.IO.StreamReader sr = new System.IO.StreamReader(Path.Combine(Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location), "Default.prj"))) { ProjNet.CoordinateSystems.CoordinateSystemFactory cs = new ProjNet.CoordinateSystems.CoordinateSystemFactory(); projection = cs.CreateFromWkt(sr.ReadToEnd()); } using (ShapeWriter sw = new ShapeWriter(Path.Combine(Path.GetDirectoryName(OutputFile.FileName), Path.GetFileNameWithoutExtension(OutputFile.FileName) + FileNameAttach)) { Projection = projection }) { for (int i = 0; i < ReductionVariables.Rows.Count; i++) { GeoRefData gd = new GeoRefData() { Data = ReductionVariables.Rows[i], Geometry = AllCatchments[(int)ReductionVariables.Rows[i]["ID"]].Geometry }; sw.Write(gd); } } }
public void WritePointShapeTest() { string File = @"..\..\..\TestData\WriteTest.Shp"; ShapeWriter PSW = new ShapeWriter(File); PSW.WritePointShape(10, 20); PSW.WritePointShape(20, 30); PSW.WritePointShape(30, 40); DataTable DT = new DataTable(); DT.Columns.Add("Name", typeof(string)); DT.Rows.Add(new object[] { "point1" }); DT.Rows.Add(new object[] { "point2" }); DT.Rows.Add(new object[] { "point3" }); PSW.Data.WriteDate(DT); PSW.Dispose(); ShapeReader PSR = new ShapeReader(File); IXYPoint p; DataTable DTread = PSR.Data.Read(); foreach (DataRow dr in DTread.Rows) { Console.WriteLine(dr[0].ToString()); p = (IXYPoint)PSR.ReadNext(); Console.WriteLine(p.X.ToString() + " " + p.Y.ToString()); } }
private void SaveAndCalc(string filename) { using (ShapeWriter sw = new ShapeWriter(filename)) { DataTable dt = new DataTable(); dt.Columns.Add("Branch", typeof(string)); dt.Columns.Add("TopoID", typeof(string)); dt.Columns.Add("Chainage", typeof(double)); dt.Columns.Add("DataType", typeof(string)); dt.Columns.Add("Min", typeof(double)); dt.Columns.Add("Max", typeof(double)); dt.Columns.Add("Average", typeof(double)); dt.Columns.Add("MedianMin", typeof(double)); foreach (var p in res11file.Points) { double min = double.MaxValue;; double max = double.MinValue; double middel = 0; List <double> yearlymins = new List <double>(); int CurrentYear = 0; for (int i = res11file.GetTimeStep(StartTime); i < res11file.GetTimeStep(EndTime); i++) { if (CurrentYear != res11file.TimeSteps[i].Year) { CurrentYear = res11file.TimeSteps[i].Year; yearlymins.Add(Double.MaxValue); } double d = p.GetData(i); min = Math.Min(min, d); max = Math.Max(max, d); middel += d; yearlymins[yearlymins.Count - 1] = Math.Min(yearlymins.Last(), d); } middel /= (res11file.GetTimeStep(EndTime) - res11file.GetTimeStep(StartTime)); var drow = dt.NewRow(); drow[0] = p.BranchName; drow[1] = p.TopoID; drow[2] = p.Chainage; drow[3] = p.pointType.ToString(); drow[4] = min; drow[5] = max; drow[6] = middel; drow[7] = yearlymins.Sum(var => var) / yearlymins.Count; GeoRefData grf = new GeoRefData(); grf.Geometry = p; grf.Data = drow; sw.Write(grf); } } }
/// <summary> /// Writes a point shape with entries for each intake in the list. Uses the dataRow as attributes. /// </summary> /// <param name="FileName"></param> /// <param name="Intakes"></param> /// <param name="Start"></param> /// <param name="End"></param> public static void WriteShapeFromDataRow(string FileName, IEnumerable <JupiterIntake> Intakes) { ShapeWriter PSW = new ShapeWriter(FileName); foreach (JupiterIntake JI in Intakes) { PSW.WritePointShape(JI.well.X, JI.well.Y); PSW.Data.WriteData(JI.Data); } PSW.Dispose(); }
public void DebugPrint(string outputpath, string FileName, string SoilGridCodesFileName) { List <GeoRefData> Allpoints; ProjNet.CoordinateSystems.ICoordinateSystem proj; using (ShapeReader shr = new ShapeReader(SoilGridCodesFileName)) { proj = shr.Projection; Allpoints = shr.GeoData.ToList(); } Dictionary <int, GeoRefData> data = new Dictionary <int, GeoRefData>(); foreach (var p in Allpoints) { data.Add((int)p.Data["GRIDID"], p); } string name = Path.GetFileNameWithoutExtension(FileName); name = "Y_" + name.Substring(name.Length - 4); var dt = Allpoints.First().Data.Table; dt.Columns.Add(name, typeof(double)); //if(GridCounts!=null) // dt.Columns.Add(name, typeof(double)); List <GeoRefData> NewPoints = new List <GeoRefData>(); using (ShapeWriter sw = new ShapeWriter(Path.Combine(outputpath, "Leach_" + name + "_debug.shp")) { Projection = proj }) { using (StreamReader sr = new StreamReader(FileName)) { // Headers = sr.ReadLine().Split(new string[] { ";" }, StringSplitOptions.RemoveEmptyEntries); while (!sr.EndOfStream) { var de = sr.ReadLine().Split(new string[] { ";" }, StringSplitOptions.RemoveEmptyEntries); double leach = double.Parse(de[de.Count() - 2]); int gridid = int.Parse(de[0]); var p = data[gridid]; p.Data[name] = leach; NewPoints.Add(p); } } foreach (var v in NewPoints) { sw.Write(v); } } }
public void SaveToShape(string ShapeFileName) { using (ShapeWriter sw = new ShapeWriter(ShapeFileName)) { DataTable dt = new DataTable(); dt.Columns.Add("LinkID", typeof(string)); dt.Columns.Add("FromNode", typeof(string)); dt.Columns.Add("ToNode", typeof(string)); dt.Columns.Add("SpecifiedLength", typeof(double)); foreach (var b in Links.Values) { GeoRefData grf = new GeoRefData(); var l = new XYPolyline(); l.Points.Add(new XYPoint(b.UpstreamNode.pfsnode.X, b.UpstreamNode.pfsnode.Y)); l.Points.Add(new XYPoint(b.DownstreamNode.pfsnode.X, b.DownstreamNode.pfsnode.Y)); grf.Geometry = l; grf.Data = dt.NewRow(); grf.Data[0] = b.pfslink.LinkID; grf.Data[1] = b.pfslink.FromNode; grf.Data[2] = b.pfslink.ToNode; grf.Data[3] = b.pfslink.SpecifiedLength; sw.Write(grf); } } if (Branches != null && Branches.Count > 0) { using (ShapeWriter sw = new ShapeWriter(Path.Combine(Path.GetDirectoryName(ShapeFileName), Path.GetFileNameWithoutExtension(ShapeFileName) + "_branches.shp"))) { DataTable dt = new DataTable(); dt.Columns.Add("Name", typeof(string)); dt.Columns.Add("Length", typeof(double)); foreach (var b in Branches) { var line = new XYPolyline(); line.Points.AddRange(b.Links.Select(p => p.UpstreamNode.Location)); line.Points.Add(b.Links.Last().DownstreamNode.Location); GeoRefData grf = new GeoRefData(); grf.Geometry = line; grf.Data = dt.NewRow(); grf.Data[0] = b.Name; grf.Data[1] = line.GetLength(); sw.Write(grf); } } } }
/// <summary> /// Writes the wells to a point shape /// Calculates statistics on the observations within the period from start to end /// </summary> /// <param name="FileName"></param> /// <param name="Wells"></param> /// <param name="Start"></param> /// <param name="End"></param> public static void WriteSimpleShape(string FileName, IEnumerable <IIntake> Intakes, DateTime Start, DateTime End) { ShapeWriter PSW = new ShapeWriter(FileName); OutputTables.PejlingerOutputDataTable PDT = new OutputTables.PejlingerOutputDataTable(); foreach (Intake I in Intakes) { var SelectedObs = I.HeadObservations.ItemsInPeriod(Start, End); PSW.WritePointShape(I.well.X, I.well.Y); OutputTables.PejlingerOutputRow PR = PDT.NewPejlingerOutputRow(); PR.NOVANAID = I.ToString(); PR.LOCATION = I.well.Description; PR.XUTM = I.well.X; PR.YUTM = I.well.Y; PR.JUPKOTE = I.well.Terrain; if (I.Screens.Count > 0) { PR.INTAKETOP = I.Screens.Min(var => var.DepthToTop); PR.INTAKEBOT = I.Screens.Max(var => var.DepthToBottom); } PR.NUMBEROFOB = SelectedObs.Count(); if (SelectedObs.Count() > 0) { PR.STARTDATO = SelectedObs.Min(x => x.Time); PR.ENDDATO = SelectedObs.Max(x => x.Time); PR.MAXOBS = SelectedObs.Max(num => num.Value); PR.MINOBS = SelectedObs.Min(num => num.Value); PR.MEANOBS = SelectedObs.Average(num => num.Value); } PDT.Rows.Add(PR); } PSW.Data.WriteDate(PDT); PSW.Dispose(); }
public override void DebugPrint(string Directory, Dictionary <int, Catchment> Catchments) { if (ExtraOutput) { using (ShapeWriter sw = new ShapeWriter(System.IO.Path.Combine(Directory, Name + "_debug"))) { for (int i = 0; i < Data.Rows.Count; i++) { if (Catchments.ContainsKey((int)Data.Rows[i][0])) { Geometry.GeoRefData gd = new Geometry.GeoRefData() { Geometry = Catchments[(int)Data.Rows[i][0]].Geometry, Data = Data.Rows[i] }; sw.Write(gd); } } } } }
public void TestMethod1() { Dictionary <int, int> dmuTOId15 = new Dictionary <int, int>(); using (StreamReader sr = new StreamReader(@"D:\DK_information\Overfladevand\stationer\maol.txt")) { sr.ReadLine(); while (!sr.EndOfStream) { var data = sr.ReadLine().Split(new string[] { "\t" }, StringSplitOptions.None); dmuTOId15.Add(int.Parse(data[1]), int.Parse(data[3])); } } using (ShapeWriter sw = new ShapeWriter(@"D:\DK_information\Overfladevand\stationer\stationer2.shp")) { using (ShapeReader sh = new ShapeReader(@"D:\DK_information\Overfladevand\stationer\stationer.shp")) { var dt = sh.Data.Read(); dt.Columns.Add("ID15", typeof(int)); for (int i = 0; i < dt.Rows.Count; i++) { int dmunr = int.Parse(dt.Rows[i][0].ToString()); int id15; if (dmuTOId15.TryGetValue(dmunr, out id15)) { dt.Rows[i]["ID15"] = id15; } sw.Write(new Geometry.GeoRefData() { Geometry = sh.ReadNext(i), Data = dt.Rows[i] }); } } } }
public override void DebugPrint(string Directory, Dictionary <int, Catchment> Catchments) { if (ExtraOutput & Update) { using (ShapeWriter sw = new ShapeWriter(Path.Combine(Directory, Name + "_debug.shp"))) { System.Data.DataTable dt = new System.Data.DataTable(); dt.Columns.Add("ID15", typeof(int)); dt.Columns.Add("LakeName", typeof(string)); dt.Columns.Add("NitrateReduction", typeof(double)); dt.Columns.Add("NitrateConcentration", typeof(double)); dt.Columns.Add("FlushingRatio", typeof(double)); foreach (var c in Catchments.Values.Where(c => c.BigLake != null)) { var row = dt.NewRow(); row[0] = c.ID; row[1] = c.BigLake.Name; row[2] = c.BigLake.NitrateReduction.Average; row[3] = c.BigLake.NitrateConcentration.Average; row[4] = c.BigLake.FlushingRatio.Average; sw.Write(new GeoRefData() { Geometry = c.Geometry, Data = row }); using (StreamWriter st = new StreamWriter(Path.Combine(Directory, Name) + "_" + c.ID + "_debug.csv")) { st.WriteLine("Time;NitrateReduction;NitrateContration;FlushingRatio"); for (int i = 0; i < c.BigLake.NitrateReduction.Items.Count; i++) { st.WriteLine(c.BigLake.NitrateReduction.Items[i].Time.ToString() + ";" + c.BigLake.NitrateReduction.Items[i].Value + ";" + c.BigLake.NitrateConcentration.Items[i].Value + ";" + c.BigLake.FlushingRatio.Items[i].Value); } } } } } }
private void button3_Click(object sender, EventArgs e) { if (saveFileDialog1.ShowDialog() == DialogResult.OK) { IEnumerable <Plant> plants = listBoxAnlaeg.Items.Cast <Plant>(); IEnumerable <JupiterIntake> intakes = JupiterReader.AddDataForNovanaExtraction(plants, dateTimeStartExt.Value, dateTimeEndExt.Value); HeadObservations.WriteShapeFromDataRow(saveFileDialog1.FileName, intakes); IEnumerable <Plant> PlantWithoutIntakes = plants.Where(var => var.PumpingIntakes.Count == 0); if (PlantWithoutIntakes.Count() > 0) { if (DialogResult.Yes == MessageBox.Show("The list contains plants with no intakes attached. Should these be written to a new shape-file?", "Plants without intakes!", MessageBoxButtons.YesNo)) { if (saveFileDialog1.ShowDialog() == DialogResult.OK) { NovanaTables.IndvindingerDataTable dt = JupiterReader.FillPlantData(PlantWithoutIntakes, dateTimeStartExt.Value, dateTimeEndExt.Value); ShapeWriter PSW = new ShapeWriter(saveFileDialog1.FileName); PSW.WritePointShape(dt, dt.ANLUTMXColumn.ColumnName, dt.ANLUTMYColumn.ColumnName); PSW.Dispose(); } } } } }
public void WritePolyLineTest() { string File = @"..\..\..\TestData\PolyLineTest.Shp"; XYPolyline line = new XYPolyline(); line.Points.Add(new XYPoint(0, 0)); line.Points.Add(new XYPoint(2, 2)); line.Points.Add(new XYPoint(4, 5)); DataTable dt = new DataTable(); dt.Columns.Add("tekst", typeof(string)); GeoRefData grf = new GeoRefData(); grf.Geometry = line; grf.Data = dt.NewRow(); grf.Data[0] = "Her er værdien"; ShapeWriter sp = new ShapeWriter(File); sp.Write(grf); sp.Dispose(); }
/// <summary> /// Writes a polyline shape file with the network /// </summary> /// <param name="shapefilename"></param> public void WriteToShape(string shapefilename) { using (ShapeWriter swc = new ShapeWriter(shapefilename + "_QHPoints")) { DataTable dat = new DataTable(); dat.Columns.Add("BranchName", typeof(string)); dat.Columns.Add("Chainage", typeof(double)); dat.Columns.Add("Type", typeof(string)); foreach (var b in nwkfile.MIKE_11_Network_editor.COMPUTATIONAL_SETUP.branchs) { foreach (var p in b.points.points) { GeoRefData gd = new GeoRefData(); gd.Data = dat.NewRow(); gd.Data["BranchName"] = b.name; gd.Data["Chainage"] = p.Par1; if (p.Par3 == 0) { gd.Data["Type"] = "h"; } else { gd.Data["Type"] = "q"; } var bran = Branches.FirstOrDefault(br => br.Name == b.name); if (bran != null) { gd.Geometry = bran.GetPointAtChainage(p.Par1); swc.Write(gd); } } } } ShapeWriter sw = new ShapeWriter(shapefilename); ShapeWriter swCsc = new ShapeWriter(shapefilename + "_CrossSections"); DataTable dtCsc = new DataTable(); dtCsc.Columns.Add("Name", typeof(string)); dtCsc.Columns.Add("TopoID", typeof(string)); dtCsc.Columns.Add("Chainage", typeof(double)); DataTable dt = new DataTable(); dt.Columns.Add("Name", typeof(string)); dt.Columns.Add("TopoID", typeof(string)); dt.Columns.Add("ChainageStart", typeof(double)); dt.Columns.Add("ChainageEnd", typeof(double)); foreach (var b in branches) { GeoRefData grf = new GeoRefData(); grf.Geometry = b.Line; grf.Data = dt.NewRow(); grf.Data[0] = b.Name; grf.Data[1] = b.TopoID; grf.Data[2] = b.ChainageStart; grf.Data[3] = b.ChainageEnd; sw.Write(grf); foreach (var Csc in b.CrossSections) { GeoRefData csc_data = new GeoRefData(); csc_data.Geometry = Csc.Line; csc_data.Data = dtCsc.NewRow(); csc_data.Data[0] = Csc.BranchName; csc_data.Data[1] = Csc.TopoID; csc_data.Data[2] = Csc.Chainage; swCsc.Write(csc_data); } } sw.Dispose(); swCsc.Dispose(); }
private void WriteFont(SWFFont font, int fid) { WriteBuffer fontTag = this.OpenTag(Tag.DefineFont3, font.Name + "; id=" + fid); char[] codes = font.CodePoints; /* Tag.DefineFont3 */ { fontTag.WriteUI16((uint)fid); fontTag.WriteBit(font.HasLayout); fontTag.WriteBit(false); /* ISSUE 50: ShiftJIS support */ fontTag.WriteBit(font.IsSmall); fontTag.WriteBit(false); /* ISSUE 51: ANSI support, though I think this might never be false. */ fontTag.WriteBit(true); /* ISSUE 52: We always write wide offsets. This is because we're too lazy to measure our table. */ fontTag.WriteBit(true); /* Spec says must be true. */ fontTag.WriteBit(font.IsItalic); fontTag.WriteBit(font.IsBold); fontTag.WriteUI8((uint)font.LanguageCode); fontTag.WriteString(font.Name, true); fontTag.WriteUI16((uint)font.GlyphCount); byte[][] shapeData = new byte[font.GlyphCount][]; int totalShapeBytes = 0; for (int i = 0; i < font.GlyphCount; i++) { Tag format; shapeData[i] = ShapeWriter.ShapeToBytes(font.GetGlyphShape(codes[i]), out format); if (format != Tag.DefineFont3) { throw new SWFModellerException(SWFModellerError.Internal, "Can't write non-font shapes as glyphs"); } totalShapeBytes += shapeData[i].Length; } int startOffset = font.GlyphCount * 4 + 4; /* 4 bytes per offset (wide offsets) + 4 for the code table offset */ int nextOffset = startOffset; foreach (byte[] shapeBytes in shapeData) { fontTag.WriteUI32((uint)nextOffset); nextOffset += shapeBytes.Length; } fontTag.WriteUI32((uint)(startOffset + totalShapeBytes)); foreach (byte[] shapeBytes in shapeData) { fontTag.WriteBytes(shapeBytes); } foreach (char code in codes) { fontTag.WriteUI16((uint)code); } if (font.HasLayout) { fontTag.WriteSI16(font.Ascent.Value); fontTag.WriteSI16(font.Descent.Value); fontTag.WriteSI16(font.Leading.Value); Rect[] bounds = new Rect[font.GlyphCount]; int boundsPos = 0; foreach (char c in codes) { GlyphLayout gl = font.GetLayout(c); fontTag.WriteSI16(gl.Advance); bounds[boundsPos++] = gl.Bounds; } foreach (Rect bound in bounds) { fontTag.WriteRect(bound); fontTag.Align8(); } fontTag.WriteUI16((uint)font.KerningTable.Length); foreach (KerningPair kern in font.KerningTable) { fontTag.WriteUI16(kern.LeftChar); fontTag.WriteUI16(kern.RightChar); fontTag.WriteSI16(kern.Adjustment); } } } this.CloseTag(); if (font.HasPixelAlignment) { WriteBuffer zonesTag = this.OpenTag(Tag.DefineFontAlignZones, font.Name + "; id=" + fid); zonesTag.WriteUI16((uint)fid); if (font.ThicknessHint == null) { throw new SWFModellerException(SWFModellerError.Internal, "Can't have pixel aligmnent without a font thickness hint."); } zonesTag.WriteUBits((uint)font.ThicknessHint, 2); zonesTag.WriteUBits(0, 6); /* Reserved */ foreach (char c in codes) { PixelAlignment pa = font.GetPixelAligment(c); if (pa.ZoneInfo.Length != 2) { throw new SWFModellerException(SWFModellerError.Internal, "Pixel aligment should always have 2 zones."); } zonesTag.WriteUI8((uint)pa.ZoneInfo.Length); foreach (PixelAlignment.ZoneData zi in pa.ZoneInfo) { /* These int values are just unparsed 16-bit floats. */ zonesTag.WriteUI16((uint)zi.AlignmentCoord); zonesTag.WriteUI16((uint)zi.Range); } zonesTag.WriteUBits(0, 6); /* Reserved */ zonesTag.WriteBit(pa.HasY); zonesTag.WriteBit(pa.HasX); } this.CloseTag(); } if (font.HasExtraNameInfo) { WriteBuffer nameTag = this.OpenTag(Tag.DefineFontName, font.FullName + "; id=" + fid); nameTag.WriteUI16((uint)fid); nameTag.WriteString(font.FullName); nameTag.WriteString(font.Copyright); this.CloseTag(); } }
private void WriteCharacter(ICharacter ch, ListSet <Timeline> unboundClasses) { int cid; if (ch == null) { return; } if (this.characterMarshal.HasMarshalled(ch)) { return; } int fontID = -1; if (ch is IFontUserProcessor) { IFontUserProcessor fup = (IFontUserProcessor)ch; fup.FontUserProc(delegate(IFontUser fu) { if (fu.HasFont && !characterMarshal.HasMarshalled(fu.Font)) { fontID = characterMarshal.GetIDFor(fu.Font); this.WriteFont(fu.Font, fontID); } else { fontID = characterMarshal.GetExistingIDFor(fu.Font); } }); } if (ch is IShape) { IImage[] images = ((IShape)ch).GetImages(); if (images != null) { foreach (IImage image in images) { this.WriteImage(image); } } Tag format; byte[] shapeBytes = ShapeWriter.ShapeToBytes((IShape)ch, out format); WriteBuffer shapeTag = this.OpenTag(format); cid = this.characterMarshal.GetIDFor(ch); shapeTag.WriteUI16((uint)cid); shapeTag.WriteBytes(shapeBytes); #if DEBUG this.LogMessage("char id=" + cid); #endif this.CloseTag(); } else if (ch is Sprite) { this.WriteSprite((Sprite)ch, unboundClasses); } else if (ch is EditText) { this.WriteEditText((EditText)ch, fontID); } else if (ch is StaticText) { this.WriteStaticText((StaticText)ch); } else { /* ISSUE 73 */ throw new SWFModellerException( SWFModellerError.UnimplementedFeature, "Character of type " + ch.GetType().ToString() + " not currently supported in writer"); } if (ch is Timeline) { Timeline tl = (Timeline)ch; if (tl.HasClass && !(tl.Class is AdobeClass) && !unboundClasses.Contains(tl)) { unboundClasses.Add(tl); } } }
private void btnSaveToShape_Click(object sender, EventArgs e) { if (this.saveFileDialogShp.ShowDialog() == DialogResult.OK) { try { List<ShapeType> list = new List<ShapeType>(); foreach (Feature feature in this._currentFeatureSet) { ShapeType item = feature.Geometry.ToShapeType(); if (!list.Contains(item)) { list.Add(item); } } List<DataColumn> list2 = new List<DataColumn>(); foreach (string str in this._currentFeatureSet[0].Attributes.Keys) { list2.Add(new DataColumn(str, this._currentFeatureSet[0].Attributes[str].GetType())); } foreach (ShapeType type2 in list) { string str2 = ""; if (list.Count > 1) { switch (type2) { case ShapeType.Point: str2 = "_point"; break; case ShapeType.MultiPoint: str2 = "_points"; break; case ShapeType.PolyLine: str2 = "_lines"; break; case ShapeType.Polygon: str2 = "_regions"; break; } } ShapeWriter writer = new ShapeWriter(Path.Combine(Path.GetDirectoryName(this.saveFileDialogShp.FileName), Path.GetFileNameWithoutExtension(this.saveFileDialogShp.FileName) + str2 + ".shp"), type2, list2.ToArray()); writer.Open(); foreach (Feature feature2 in this._currentFeatureSet) { if (feature2.Geometry.ToShapeType() == type2) { writer.WriteFeature(feature2); } } writer.Close(); } } catch (Exception exception) { MessageBox.Show("Failed to export result:\r\n" + exception.Message); } } }
public void Calibrate(MainModel MW, DateTime CStart, DateTime CEnd) { dt.Columns.Add("ID15", typeof(int)); dt.Columns.Add("No_iterations", typeof(int)); dt.Columns.Add("LastError", typeof(double)); dt.Columns.Add("GWRatio", typeof(double)); dt.Columns.Add("IntRatio", typeof(double)); dt.Columns.Add("MainRatio", typeof(double)); dt.Columns.Add("RedFactor", typeof(double)); dt.PrimaryKey = new DataColumn[] { dt.Columns[0] }; DateTime CurrentTime = CStart; string gwsourcename = MW.SourceModels.Single(s => s.GetType() == typeof(GroundWaterSource)).Name; foreach (var item in MW.AllCatchments.Values) { var row = dt.NewRow(); row[0] = item.ID; CurrentTime = CStart; double gwleach = 0; double gwsourec = 0; double gwConcDeg = 0; double intsource = 0; double intred = 0; double upstream = 0; double mainred = 0; while (CurrentTime < CEnd) { double IntMass = 0; var CurrentState = MW.StateVariables.Rows.Find(new object[] { item.ID, CurrentTime }); gwleach += (double)CurrentState["Leaching"]; gwsourec += (double)CurrentState[gwsourcename]; IntMass = (double)CurrentState[gwsourcename]; foreach (var conc in MW.InternalReductionModels.Where(s => s.GetType() == typeof(ConceptualSourceReducer) && ((ConceptualSourceReducer)s).SourceModelName == gwsourcename)) { gwConcDeg += (double)CurrentState[conc.Name]; IntMass -= (double)CurrentState[conc.Name]; } foreach (var intsou in MW.SourceModels.Where(s => s.Name != gwsourcename)) { intsource += (double)CurrentState[intsou.Name]; IntMass += (double)CurrentState[intsou.Name]; } foreach (var conc in MW.InternalReductionModels.Where(s => s.GetType() != typeof(ConceptualSourceReducer))) { intred += (double)CurrentState[conc.Name]; IntMass -= (double)CurrentState[conc.Name]; } foreach (var mainr in MW.MainStreamRecutionModels) { if (!CurrentState.IsNull(mainr.Name)) { mainred += (double)CurrentState[mainr.Name]; IntMass -= (double)CurrentState[mainr.Name]; } } if (!CurrentState.IsNull("DownStreamOutput")) { IntMass = (double)CurrentState["DownStreamOutput"] - IntMass; upstream += IntMass; } CurrentTime = CurrentTime.AddMonths(1); } if (gwleach == 0) { row["GWRatio"] = 1; } else { row["GWRatio"] = (gwleach - gwsourec + gwConcDeg) / gwleach; } row["IntRatio"] = intred / (gwsourec - gwConcDeg + intsource); row["MainRatio"] = mainred / (gwsourec - gwConcDeg + intsource - intred + upstream); dt.Rows.Add(row); } CurrentTime = CStart; this.MW = MW; List <Catchment> SortedCatchments = new List <Catchment>(); ConceptualSourceReducer GWCor = new ConceptualSourceReducer(); GWCor.Name = "Calibrator"; GWCor.SourceModelName = "GroundWater"; var LastConceptual = MW.InternalReductionModels.LastOrDefault(c => c.GetType() == typeof(ConceptualSourceReducer)); if (LastConceptual == null) { MW.InternalReductionModels.Insert(0, GWCor); } else { MW.InternalReductionModels.Insert(MW.InternalReductionModels.IndexOf(LastConceptual) + 1, GWCor); } if (!MW.StateVariables.Columns.Contains(GWCor.Name)) { MW.StateVariables.Columns.Add(GWCor.Name, typeof(double)); } ConceptualSourceReducer IntCor = new ConceptualSourceReducer(); IntCor.Name = "Calib_Int"; MW.InternalReductionModels.Add(IntCor); if (!MW.StateVariables.Columns.Contains(IntCor.Name)) { MW.StateVariables.Columns.Add(IntCor.Name, typeof(double)); } ConceptualSourceReducer MainCor = new ConceptualSourceReducer(); MainCor.Name = "Calib_Main"; MW.MainStreamRecutionModels.Add(MainCor); if (!MW.StateVariables.Columns.Contains(MainCor.Name)) { MW.StateVariables.Columns.Add(MainCor.Name, typeof(double)); } foreach (var item in MW.EndCatchments) { GetCatchmentsWithObs(item, SortedCatchments); } foreach (var item in MW.AllCatchments.Values) { GWCor.Reduction.Add(item.ID, 0); IntCor.Reduction.Add(item.ID, 0); MainCor.Reduction.Add(item.ID, 0); } int totaliter = 0; foreach (var v in SortedCatchments) { List <double> Errors = new List <double>(); double localdamp = DampingFactor; double currentreducer = 0; double Error = double.MaxValue; int itercount = 0; var row = dt.Rows.Find(v.ID); NewMessage("Calibrating " + v.ID); while (Math.Abs(Error) > AbsoluteConvergence & itercount < MaxNoOfIterations) { v.ObsNitrate = null; v.SimNitrate = null; double accgws = 0; double accs = 0; double accsink = 0; double accmainsink = 0; double obssum = 0; CurrentTime = CStart; while (CurrentTime < CEnd) { v.MoveInTime(CurrentTime); double obsn = v.Measurements.Nitrate.GetValue(CurrentTime, InterpolationMethods.DeleteValue); if (obsn != v.Measurements.Nitrate.DeleteValue) { obssum += obsn; accgws += AccumulateUpstream(GWCor.SourceModelName, v, CurrentTime); foreach (var s in MW.InternalReductionModels) { accsink += AccumulateUpstream(s.Name, v, CurrentTime); } foreach (var s in MW.SourceModels.Where(ss => ss.Name != GWCor.SourceModelName)) { accs += AccumulateUpstream(s.Name, v, CurrentTime); } foreach (var s in MW.MainStreamRecutionModels) { accmainsink += AccumulateUpstream(s.Name, v, CurrentTime); } } CurrentTime = CurrentTime.AddMonths(1); } double[] sim; double[] obs; v.ObsNitrate.AlignRemoveDeletevalues(v.SimNitrate, out obs, out sim); double simerror = obs.Sum() - sim.Sum(); Error = (accs + accgws - accsink - accmainsink) - obssum; if (itercount == 0 & double.IsNaN(Error)) { NewMessage("Initial error is NAN. Could not calibrate " + v.ID); break; } currentreducer = Error / accgws * localdamp; Errors.Add(Error); NewMessage(Error.ToString()); if (double.IsNaN(Error) || (itercount > 2 && Math.Abs(Error) > Errors.Skip(itercount - 3).Take(3).Select(e => Math.Abs(e)).Max())) { SendReducUpstream(v, GWCor.Reduction, currentreducer, "GWRatio", true); SendReducUpstream(v, IntCor.Reduction, InternalRatio * currentreducer, "IntRatio", true); SendReducUpstream(v, MainCor.Reduction, MainRatio * currentreducer, "MainRatio", true); NewMessage("Reduce damping and resetting reducer to first value"); localdamp *= 0.5; currentreducer = Errors.First() / accgws * localdamp; Error = 2 * AbsoluteConvergence; //To make sure we do not NAN for testing in the next iteration. } SendReducUpstream(v, GWCor.Reduction, currentreducer, "GWRatio", false); SendReducUpstream(v, IntCor.Reduction, InternalRatio * currentreducer, "IntRatio", false); SendReducUpstream(v, MainCor.Reduction, MainRatio * currentreducer, "MainRatio", false); itercount++; } totaliter += itercount; row[0] = v.ID; row[1] = itercount; row[2] = Error; row["RedFactor"] = GWCor.Reduction[v.ID]; NewMessage(v.ID + " calibrated in " + itercount + " iterations. Final error: " + Error + ". ReductionFactor: " + GWCor.Reduction[v.ID]); } NewMessage("Total number of model calls: " + totaliter); var outdir = Path.GetDirectoryName(MW.AlldataFile.FileName); GWCor.DebugPrint(outdir, MW.AllCatchments); IntCor.DebugPrint(outdir, MW.AllCatchments); MainCor.DebugPrint(outdir, MW.AllCatchments); using (ShapeWriter sw = new ShapeWriter(Path.Combine(outdir, "CalibrationResult")) { Projection = MainModel.projection }) { for (int i = 0; i < dt.Rows.Count; i++) { GeoRefData gd = new GeoRefData() { Geometry = MW.AllCatchments[(int)dt.Rows[i][0]].Geometry }; gd.Data = dt.Rows[i]; sw.Write(gd); } } }
public void DebugPrint(string Directory) { //We need to process data for extra output while we have the particles { foreach (var c in Catchments.Where(ca => ca.EndParticles.Count >= 1)) { if (c.EndParticles.Count >= 20) { c.ParticleBreakthroughCurves = new List <Tuple <double, double> >(); MathNet.Numerics.Statistics.Percentile p = new MathNet.Numerics.Statistics.Percentile(c.EndParticles.Select(pa => pa.TravelTime)); for (int i = 1; i < np; i++) { c.ParticleBreakthroughCurves.Add(new Tuple <double, double>(i / np * 100.0, p.Compute(i / np))); } //Also do oxidized breakthrough curves if (c.EndParticles.Count(pp => pp.Registration != 1) >= 20) { c.ParticleBreakthroughCurvesOxidized = new List <Tuple <double, double> >(); p = new MathNet.Numerics.Statistics.Percentile(c.EndParticles.Where(pp => pp.Registration != 1).Select(pa => pa.TravelTime)); for (int i = 1; i < np; i++) { c.ParticleBreakthroughCurvesOxidized.Add(new Tuple <double, double>(i / np * 100.0, p.Compute(i / np))); } } } DataRow row = DebugData.Rows.Find(c.ID); if (row == null) { row = DebugData.NewRow(); row[0] = c.ID; DebugData.Rows.Add(row); } row["PartCount"] = c.EndParticles.Count; row["RedoxCount"] = c.EndParticles.Count(pp => pp.Registration == 1); row["RedoxRatio"] = c.EndParticles.Count(pp => pp.Registration == 1) / (double)c.EndParticles.Count; if (c.EndParticles.Count > 0) { row["Drain_to_River"] = c.EndParticles.Count(pa => pa.SinkType == SinkType.Drain_to_River) / (double)c.EndParticles.Count; row["Drain_to_Boundary"] = c.EndParticles.Count(pa => pa.SinkType == SinkType.Drain_to_Boundary) / (double)c.EndParticles.Count; row["Unsaturated_zone"] = c.EndParticles.Count(pa => pa.SinkType == SinkType.Unsaturated_zone) / (double)c.EndParticles.Count; row["River"] = c.EndParticles.Count(pa => pa.SinkType == SinkType.River) / (double)c.EndParticles.Count; } row["PartCount_start"] = c.StartParticles.Count; } } NewMessage("Writing breakthrough curves"); var selectedCatchments = Catchments.Where(cc => cc.ParticleBreakthroughCurves != null); using (System.IO.StreamWriter sw = new System.IO.StreamWriter(Path.Combine(Directory, "BC.csv"))) { StringBuilder headline = new StringBuilder(); headline.Append("ID\tNumber of Particles"); for (int i = 1; i < np; i++) { headline.Append("\t + " + (i / np * 100.0)); } sw.WriteLine(headline); foreach (var c in selectedCatchments.Where(cc => cc.ParticleBreakthroughCurves != null)) { StringBuilder line = new StringBuilder(); line.Append(c.ID + "\t" + c.EndParticles.Count); foreach (var pe in c.ParticleBreakthroughCurves) { line.Append("\t" + pe.Item2); } sw.WriteLine(line); } } if (selectedCatchments.Count() > 0) { using (ShapeWriter sw = new ShapeWriter(Path.Combine(Directory, Name + "_debug.shp")) { Projection = MainModel.projection }) { foreach (var bc in selectedCatchments.First().ParticleBreakthroughCurves) { DebugData.Columns.Add(((int)bc.Item1).ToString(), typeof(double)); } foreach (var bc in selectedCatchments.First().ParticleBreakthroughCurves) { DebugData.Columns.Add(((int)bc.Item1).ToString() + "Ox", typeof(double)); } foreach (var c in selectedCatchments) { GeoRefData gd = new GeoRefData() { Geometry = c.Geometry }; var row = DebugData.Rows.Find(c.ID); if (c.ParticleBreakthroughCurves != null) { foreach (var bc in c.ParticleBreakthroughCurves) { row[((int)bc.Item1).ToString()] = bc.Item2; } } if (c.ParticleBreakthroughCurvesOxidized != null) { foreach (var bc in c.ParticleBreakthroughCurvesOxidized) { row[((int)bc.Item1).ToString() + "Ox"] = bc.Item2; } } gd.Data = row; sw.Write(gd); } } } //selectedCatchments = Catchments.Where(cc => cc.EndParticles.Count > 0).ToList(); //foreach (var c in selectedCatchments) //{ // DataTable dt = new DataTable(); // dt.Columns.Add("Part_Id", typeof(int)); // dt.Columns.Add("Sink", typeof(string)); // dt.Columns.Add("Reg", typeof(int)); // using (ShapeWriter sw = new ShapeWriter(Path.Combine(Directory, c.ID + "_particles.shp")) { Projection = MainModel.projection }) // { // foreach (var p in c.EndParticles) // { // var row = dt.NewRow(); // row["Part_Id"] = p.ID; // row["Sink"] = p.SinkType.ToString(); // row["Reg"] = p.Registration; // sw.Write(new GeoRefData() { Geometry = new XYLine(p.XStart, p.YStart, p.X, p.Y), Data = row }); // } // } //} }