Esempio n. 1
0
        /// <summary>
        /// Calculates the Percentiles [0;1] of an Item. MaxEntriesInMemory is used to reduce the memory consumption by sweeping multiple times. Units is MB of memory 
    /// </summary>
    public void Percentile(int Item, int[] TSteps, DFSBase df, double[] Percentiles, int MaxEntriesInMemory)
    {
      //List counts percentiles
      float[][] OutData = new float[Percentiles.Count()][];
      for (int i = 0; i < Percentiles.Count(); i++)
        OutData[i] = new float[dfsdata.Count()];

      List<int> steps = new List<int>();
      steps.Add(0);

      //Get the delete values
      float delete = DfsDLLWrapper.dfsGetDeleteValFloat(_headerPointer);

      //Read first time step and create a list with the indeces of non-delete values
      ReadItemTimeStep(0, Item);
      List<int> NonDeleteEntries = new List<int>();
      for (int i = 0; i < dfsdata.Length; i++)
        if (dfsdata[i] != delete)
          NonDeleteEntries.Add(i);

      //Find out how many sweeps are necessary to not exceed max memory
      double TotalData = (double)NonDeleteEntries.Count * (double)TSteps.Count();
      if (TotalData > (MaxEntriesInMemory*40000))
      {
        int nsteps = (int) Math.Max( TotalData / (MaxEntriesInMemory*40000),1);
        int StepLength = NonDeleteEntries.Count() / nsteps;

        for (int i = 0; i < nsteps; i++)
          steps.Add(steps.Last() + StepLength);
      }

      steps.Add(NonDeleteEntries.Count);

      //Now start the loop
      for (int m = 0; m < steps.Count-1; m++)
      {
        int dfscount = steps[m + 1] - steps[m];

        //First iterater is dfsdata
        float[][] Data = new float[dfscount][];

        for (int i = 0; i < Data.Count(); i++)
          Data[i] = new float[TSteps.Count()];

 
        //Collect all data
        for (int i = 0; i < TSteps.Count(); i++)
        {
          var data = ReadItemTimeStep(TSteps[i], Item);
          int local = 0;
          for (int k = steps[m]; k < steps[m + 1]; k++)
          {
            Data[local][i] = (dfsdata[NonDeleteEntries[k]]);
            local++;
          }
        }

        int local2 = 0;
        
        for (int k = steps[m]; k < steps[m + 1]; k++)
        {
          //Convert to doubles from float
         double[] ddata = new double[TSteps.Count()];
          for (int n = 0; n < TSteps.Count(); n++)
            ddata[n]=Data[local2][n];

          //Calculate the percentile
          MathNet.Numerics.Statistics.Percentile pCalc = new MathNet.Numerics.Statistics.Percentile(ddata);
          pCalc.Method = MathNet.Numerics.Statistics.PercentileMethod.Excel;
          var p = pCalc.Compute(Percentiles);
          
          for (int l = 0; l < Percentiles.Count(); l++)
            OutData[l][NonDeleteEntries[k]] = (float)p[l];
          local2++;
        }
      }

      //Insert deletevalues in output data
      for (int i = 0; i < dfsdata.Length; i++)
      {
        if (!NonDeleteEntries.Contains(i))
        {
          for (int l = 0; l < Percentiles.Count(); l++)
            OutData[l][i] = delete;
        }
      }
      
      //Set Item infor
      for (int i = 0; i < Percentiles.Count(); i++)
      {
        df.Items[i].EumItem = Items[Item - 1].EumItem;
        df.Items[i].EumUnit = Items[Item - 1].EumUnit;
        df.Items[i].Name = Percentiles[i].ToString() + " percentile";
      }
      for (int i = 0; i < Percentiles.Count(); i++)
      {
        df.WriteItemTimeStep(0, i + 1, OutData[i]);
      }
    }
Esempio n. 2
0
        public void DebugPrint(string Directory)
        {
            //We need to process data for extra output while we have the particles
            {
                foreach (var c in Catchments.Where(ca => ca.EndParticles.Count >= 1))
                {
                    if (c.EndParticles.Count >= 20)
                    {
                        c.ParticleBreakthroughCurves = new List <Tuple <double, double> >();
                        MathNet.Numerics.Statistics.Percentile p = new MathNet.Numerics.Statistics.Percentile(c.EndParticles.Select(pa => pa.TravelTime));
                        for (int i = 1; i < np; i++)
                        {
                            c.ParticleBreakthroughCurves.Add(new Tuple <double, double>(i / np * 100.0, p.Compute(i / np)));
                        }
                        //Also do oxidized breakthrough curves
                        if (c.EndParticles.Count(pp => pp.Registration != 1) >= 20)
                        {
                            c.ParticleBreakthroughCurvesOxidized = new List <Tuple <double, double> >();
                            p = new MathNet.Numerics.Statistics.Percentile(c.EndParticles.Where(pp => pp.Registration != 1).Select(pa => pa.TravelTime));
                            for (int i = 1; i < np; i++)
                            {
                                c.ParticleBreakthroughCurvesOxidized.Add(new Tuple <double, double>(i / np * 100.0, p.Compute(i / np)));
                            }
                        }
                    }
                    DataRow row = DebugData.Rows.Find(c.ID);
                    if (row == null)
                    {
                        row    = DebugData.NewRow();
                        row[0] = c.ID;
                        DebugData.Rows.Add(row);
                    }

                    row["PartCount"]  = c.EndParticles.Count;
                    row["RedoxCount"] = c.EndParticles.Count(pp => pp.Registration == 1);
                    row["RedoxRatio"] = c.EndParticles.Count(pp => pp.Registration == 1) / (double)c.EndParticles.Count;
                    if (c.EndParticles.Count > 0)
                    {
                        row["Drain_to_River"]    = c.EndParticles.Count(pa => pa.SinkType == SinkType.Drain_to_River) / (double)c.EndParticles.Count;
                        row["Drain_to_Boundary"] = c.EndParticles.Count(pa => pa.SinkType == SinkType.Drain_to_Boundary) / (double)c.EndParticles.Count;
                        row["Unsaturated_zone"]  = c.EndParticles.Count(pa => pa.SinkType == SinkType.Unsaturated_zone) / (double)c.EndParticles.Count;
                        row["River"]             = c.EndParticles.Count(pa => pa.SinkType == SinkType.River) / (double)c.EndParticles.Count;
                    }
                    row["PartCount_start"] = c.StartParticles.Count;
                }
            }



            NewMessage("Writing breakthrough curves");

            var selectedCatchments = Catchments.Where(cc => cc.ParticleBreakthroughCurves != null);

            using (System.IO.StreamWriter sw = new System.IO.StreamWriter(Path.Combine(Directory, "BC.csv")))
            {
                StringBuilder headline = new StringBuilder();
                headline.Append("ID\tNumber of Particles");

                for (int i = 1; i < np; i++)
                {
                    headline.Append("\t + " + (i / np * 100.0));
                }
                sw.WriteLine(headline);

                foreach (var c in selectedCatchments.Where(cc => cc.ParticleBreakthroughCurves != null))
                {
                    StringBuilder line = new StringBuilder();
                    line.Append(c.ID + "\t" + c.EndParticles.Count);
                    foreach (var pe in c.ParticleBreakthroughCurves)
                    {
                        line.Append("\t" + pe.Item2);
                    }
                    sw.WriteLine(line);
                }
            }

            if (selectedCatchments.Count() > 0)
            {
                using (ShapeWriter sw = new ShapeWriter(Path.Combine(Directory, Name + "_debug.shp"))
                {
                    Projection = MainModel.projection
                })
                {
                    foreach (var bc in selectedCatchments.First().ParticleBreakthroughCurves)
                    {
                        DebugData.Columns.Add(((int)bc.Item1).ToString(), typeof(double));
                    }
                    foreach (var bc in selectedCatchments.First().ParticleBreakthroughCurves)
                    {
                        DebugData.Columns.Add(((int)bc.Item1).ToString() + "Ox", typeof(double));
                    }

                    foreach (var c in selectedCatchments)
                    {
                        GeoRefData gd = new GeoRefData()
                        {
                            Geometry = c.Geometry
                        };
                        var row = DebugData.Rows.Find(c.ID);

                        if (c.ParticleBreakthroughCurves != null)
                        {
                            foreach (var bc in c.ParticleBreakthroughCurves)
                            {
                                row[((int)bc.Item1).ToString()] = bc.Item2;
                            }
                        }

                        if (c.ParticleBreakthroughCurvesOxidized != null)
                        {
                            foreach (var bc in c.ParticleBreakthroughCurvesOxidized)
                            {
                                row[((int)bc.Item1).ToString() + "Ox"] = bc.Item2;
                            }
                        }

                        gd.Data = row;
                        sw.Write(gd);
                    }
                }
            }

            //selectedCatchments = Catchments.Where(cc => cc.EndParticles.Count > 0).ToList();

            //foreach (var c in selectedCatchments)
            //{
            //  DataTable dt = new DataTable();
            //  dt.Columns.Add("Part_Id", typeof(int));
            //  dt.Columns.Add("Sink", typeof(string));
            //  dt.Columns.Add("Reg", typeof(int));

            //  using (ShapeWriter sw = new ShapeWriter(Path.Combine(Directory, c.ID + "_particles.shp")) { Projection = MainModel.projection })
            //  {
            //    foreach (var p in c.EndParticles)
            //    {
            //      var row = dt.NewRow();
            //      row["Part_Id"] = p.ID;
            //      row["Sink"] = p.SinkType.ToString();
            //      row["Reg"] = p.Registration;
            //      sw.Write(new GeoRefData() { Geometry = new XYLine(p.XStart, p.YStart, p.X, p.Y), Data = row });

            //    }
            //  }
            //}
        }
Esempio n. 3
0
        /// <summary>
        /// Calculates the Percentiles [0;1] of an Item. MaxEntriesInMemory is used to reduce the memory consumption by sweeping multiple times. Units is MB of memory
        /// </summary>
        public void Percentile(int Item, int[] TSteps, DFSBase df, double[] Percentiles, int MaxEntriesInMemory)
        {
            //List counts percentiles
            float[][] OutData = new float[Percentiles.Count()][];
            for (int i = 0; i < Percentiles.Count(); i++)
            {
                OutData[i] = new float[dfsdata.Count()];
            }

            List <int> steps = new List <int>();

            steps.Add(0);

            //Get the delete values
            float delete = DfsDLLWrapper.dfsGetDeleteValFloat(_headerPointer);

            //Read first time step and create a list with the indeces of non-delete values
            ReadItemTimeStep(0, Item);
            List <int> NonDeleteEntries = new List <int>();

            for (int i = 0; i < dfsdata.Length; i++)
            {
                if (dfsdata[i] != delete)
                {
                    NonDeleteEntries.Add(i);
                }
            }

            //Find out how many sweeps are necessary to not exceed max memory
            double TotalData = (double)NonDeleteEntries.Count * (double)TSteps.Count();

            if (TotalData > (MaxEntriesInMemory * 40000))
            {
                int nsteps     = (int)Math.Max(TotalData / (MaxEntriesInMemory * 40000), 1);
                int StepLength = NonDeleteEntries.Count() / nsteps;

                for (int i = 0; i < nsteps; i++)
                {
                    steps.Add(steps.Last() + StepLength);
                }
            }

            steps.Add(NonDeleteEntries.Count);

            //Now start the loop
            for (int m = 0; m < steps.Count - 1; m++)
            {
                int dfscount = steps[m + 1] - steps[m];

                //First iterater is dfsdata
                float[][] Data = new float[dfscount][];

                for (int i = 0; i < Data.Count(); i++)
                {
                    Data[i] = new float[TSteps.Count()];
                }


                //Collect all data
                for (int i = 0; i < TSteps.Count(); i++)
                {
                    var data  = ReadItemTimeStep(TSteps[i], Item);
                    int local = 0;
                    for (int k = steps[m]; k < steps[m + 1]; k++)
                    {
                        Data[local][i] = (dfsdata[NonDeleteEntries[k]]);
                        local++;
                    }
                }

                int local2 = 0;

                for (int k = steps[m]; k < steps[m + 1]; k++)
                {
                    //Convert to doubles from float
                    double[] ddata = new double[TSteps.Count()];
                    for (int n = 0; n < TSteps.Count(); n++)
                    {
                        ddata[n] = Data[local2][n];
                    }

                    //Calculate the percentile
                    MathNet.Numerics.Statistics.Percentile pCalc = new MathNet.Numerics.Statistics.Percentile(ddata);
                    pCalc.Method = MathNet.Numerics.Statistics.PercentileMethod.Excel;
                    var p = pCalc.Compute(Percentiles);

                    for (int l = 0; l < Percentiles.Count(); l++)
                    {
                        OutData[l][NonDeleteEntries[k]] = (float)p[l];
                    }
                    local2++;
                }
            }

            //Insert deletevalues in output data
            for (int i = 0; i < dfsdata.Length; i++)
            {
                if (!NonDeleteEntries.Contains(i))
                {
                    for (int l = 0; l < Percentiles.Count(); l++)
                    {
                        OutData[l][i] = delete;
                    }
                }
            }

            //Set Item infor
            for (int i = 0; i < Percentiles.Count(); i++)
            {
                df.Items[i].EumItem = Items[Item - 1].EumItem;
                df.Items[i].EumUnit = Items[Item - 1].EumUnit;
                df.Items[i].Name    = Percentiles[i].ToString() + " percentile";
            }
            for (int i = 0; i < Percentiles.Count(); i++)
            {
                df.WriteItemTimeStep(0, i + 1, OutData[i]);
            }
        }
    public void DebugPrint(string Directory)
    {
      //We need to process data for extra output while we have the particles
      {
        foreach (var c in Catchments.Where(ca => ca.EndParticles.Count >= 1))
        {
          if (c.EndParticles.Count >= 20)
          {
            c.ParticleBreakthroughCurves = new List<Tuple<double, double>>();
            MathNet.Numerics.Statistics.Percentile p = new MathNet.Numerics.Statistics.Percentile(c.EndParticles.Select(pa => pa.TravelTime));
            for (int i = 1; i < np; i++)
            {
              c.ParticleBreakthroughCurves.Add(new Tuple<double, double>(i / np * 100.0, p.Compute(i / np)));
            }
            //Also do oxidized breakthrough curves
            if (c.EndParticles.Count(pp => pp.Registration != 1) >= 20)
            {
              c.ParticleBreakthroughCurvesOxidized = new List<Tuple<double, double>>();
              p = new MathNet.Numerics.Statistics.Percentile(c.EndParticles.Where(pp => pp.Registration != 1).Select(pa => pa.TravelTime));
              for (int i = 1; i < np; i++)
              {
                c.ParticleBreakthroughCurvesOxidized.Add(new Tuple<double, double>(i / np * 100.0, p.Compute(i / np)));
              }
            }

          }
          DataRow row = DebugData.Rows.Find(c.ID);
          if (row == null)
          {
            row = DebugData.NewRow();
            row[0] = c.ID;
            DebugData.Rows.Add(row);
          }

          row["PartCount"] = c.EndParticles.Count;
          row["RedoxCount"] = c.EndParticles.Count(pp => pp.Registration == 1);
          row["RedoxRatio"] = c.EndParticles.Count(pp => pp.Registration == 1) / (double)c.EndParticles.Count;
          if (c.EndParticles.Count > 0)
          {
            row["Drain_to_River"] = c.EndParticles.Count(pa => pa.SinkType == SinkType.Drain_to_River) / (double)c.EndParticles.Count;
            row["Drain_to_Boundary"] = c.EndParticles.Count(pa => pa.SinkType == SinkType.Drain_to_Boundary) / (double)c.EndParticles.Count;
            row["Unsaturated_zone"] = c.EndParticles.Count(pa => pa.SinkType == SinkType.Unsaturated_zone) / (double)c.EndParticles.Count;
            row["River"] = c.EndParticles.Count(pa => pa.SinkType == SinkType.River) / (double)c.EndParticles.Count;
          }
          row["PartCount_start"] = c.StartParticles.Count;
        }
      }




      NewMessage("Writing breakthrough curves");

      var selectedCatchments = Catchments.Where(cc => cc.ParticleBreakthroughCurves != null);

      using (System.IO.StreamWriter sw = new System.IO.StreamWriter(Path.Combine(Directory, "BC.csv")))
      {
        StringBuilder headline = new StringBuilder();
        headline.Append("ID\tNumber of Particles");

        for (int i = 1; i < np; i++)
        {
          headline.Append("\t + " + (i / np * 100.0));
        }
        sw.WriteLine(headline);

        foreach (var c in selectedCatchments.Where(cc => cc.ParticleBreakthroughCurves != null))
        {
          StringBuilder line = new StringBuilder();
          line.Append(c.ID + "\t" + c.EndParticles.Count);
          foreach (var pe in c.ParticleBreakthroughCurves)
          {
            line.Append("\t" + pe.Item2);
          }
          sw.WriteLine(line);
        }
      }

      if (selectedCatchments.Count() > 0)
      {
        using (ShapeWriter sw = new ShapeWriter(Path.Combine(Directory, Name + "_debug.shp")))
        {
          foreach (var bc in selectedCatchments.First().ParticleBreakthroughCurves)
          {
            DebugData.Columns.Add(((int)bc.Item1).ToString(), typeof(double));
          }
          foreach (var bc in selectedCatchments.First().ParticleBreakthroughCurves)
          {
            DebugData.Columns.Add(((int)bc.Item1).ToString() + "Ox", typeof(double));
          }

          foreach (var c in selectedCatchments)
          {
            GeoRefData gd = new GeoRefData() { Geometry = c.Geometry };
            var row = DebugData.Rows.Find(c.ID);

            if (c.ParticleBreakthroughCurves != null)
              foreach (var bc in c.ParticleBreakthroughCurves)
                row[((int)bc.Item1).ToString()] = bc.Item2;

            if (c.ParticleBreakthroughCurvesOxidized != null)
              foreach (var bc in c.ParticleBreakthroughCurvesOxidized)
                row[((int)bc.Item1).ToString() + "Ox"] = bc.Item2;

            gd.Data = row;
            sw.Write(gd);
          }
        }
      }

      //selectedCatchments = Catchments.Where(cc => cc.EndParticles.Count > 0).ToList();

      //foreach (var c in selectedCatchments)
      //{
      //  DataTable dt = new DataTable();
      //  dt.Columns.Add("Part_Id", typeof(int));
      //  dt.Columns.Add("Sink", typeof(string));
      //  dt.Columns.Add("Reg", typeof(int));

      //  using (ShapeWriter sw = new ShapeWriter(Path.Combine(Directory, c.ID + "_particles.shp")) { Projection = MainModel.projection })
      //  {
      //    foreach (var p in c.EndParticles)
      //    {
      //      var row = dt.NewRow();
      //      row["Part_Id"] = p.ID;
      //      row["Sink"] = p.SinkType.ToString();
      //      row["Reg"] = p.Registration;
      //      sw.Write(new GeoRefData() { Geometry = new XYLine(p.XStart, p.YStart, p.X, p.Y), Data = row });

      //    }
      //  }
      //}
    }