Exemple #1
0
        /// <summary>
        /// Calculates future values based on alreadyknown Parameters
        /// </summary>
        /// <param name="yearWithValues">List that gets extended</param>
        /// <param name="from">startyear</param>
        /// <param name="futureYear">year in the future</param>
        /// <param name="parStor">parameter</param>
        /// <returns></returns>
        public List <YearWithValue> Predict(List <YearWithValue> yearWithValues, int from, int futureYear, ParameterStorage parStor)
        {
            double j          = yearWithValues.Max(k => k.Year);
            float  valueToDiv = CategoriesWithYearsAndValues.GetValuesFromList(yearWithValues).Max();

            float[]       inputs = CategoriesWithYearsAndValues.GetYearsFromList(yearWithValues);
            List <double> listForTheNormalizedInputs = new List <double>();

            foreach (var item in inputs)
            {
                listForTheNormalizedInputs.Add(item); //Small brain schleife?
            }
            Input input = StandardizationYears(listForTheNormalizedInputs, futureYear);

            inputs = input.getAlleJahreNormiert();
            if (j < futureYear)
            {
                float inputsMax = inputs.Max();
                while (j < futureYear)
                {
                    j++;
                    yearWithValues.Add(new YearWithValue(j, new Wert((parStor.W * inputsMax + parStor.b) * valueToDiv)));
                    float[]       inputtemp     = CategoriesWithYearsAndValues.GetYearsFromList(yearWithValues);
                    List <double> fuckinghelpme = new List <double>();
                    foreach (var item in inputtemp)
                    {
                        fuckinghelpme.Add(item); //Small brain schleife?
                    }
                    Input input2 = StandardizationYears(fuckinghelpme, futureYear);
                    inputtemp = input2.getAlleJahreNormiert();
                    inputsMax = inputtemp.Max();
                }
            }
            else //cut list from year to futureyear
            {
                if (futureYear > from)
                {
                    int indexMax = yearWithValues.FindIndex(a => a.Year == Convert.ToInt32(futureYear)); //finde Index von Jahr bis zu dem man Daten braucht
                    yearWithValues.RemoveRange(indexMax, yearWithValues.Count - indexMax);               //Cutte List von Jahr bis zu dem man es braucht bis Ende

                    int indexMin = yearWithValues.FindIndex(b => b.Year == Convert.ToInt32(from));
                    yearWithValues.RemoveRange(0, indexMin);
                }
                else
                {
                    var temp = yearWithValues.Where(x => x.Year == from);
                    yearWithValues = temp.ToList();;
                }
            }
            return(yearWithValues);
        }
Exemple #2
0
        public async Task <List <YearWithValue> > TrainLinearOneOutputAsync(List <YearWithValue> KnownValues, int FutureYear)
        {
            var device = DeviceDescriptor.UseDefaultDevice();
            ////Step 2: define values, and variables
            Variable x = Variable.InputVariable(new NDShape(1, 1), DataType.Float, "input");
            Variable y = Variable.InputVariable(new NDShape(1, 1), DataType.Float, "output");

            ////Step 2: define training data set from table above
            float[]       inputs = CategoriesWithYearsAndValues.GetYearsFromList(KnownValues);
            List <double> temp   = new List <double>();

            foreach (var item in inputs)
            {
                temp.Add(item); //Small brain schleife?
            }
            Input input = StandardizationYears(temp, FutureYear);

            inputs = input.getAlleJahreNormiert();
            float[] outputs = CategoriesWithYearsAndValues.GetValuesFromList(KnownValues);
            //Value.CreateBatch(Tensor(Achsen, Dimension), Werte, cpu/gpu)
            float[] outputsnormiert   = new float[outputs.Count()];
            float   WertZumDividieren = outputs.Max();

            for (int i = 0; i < outputs.Length; i++)
            {
                outputsnormiert[i] = outputs[i] / WertZumDividieren;
            }
            //Werte normiert lassen, sonst stackoverflow :>
            var xValues = Value.CreateBatch(new NDShape(1, 1), GetLastNValues(inputs, inputs.Length, input.step), device);
            var yValues = Value.CreateBatch(new NDShape(1, 1), GetLastNValues(outputsnormiert, outputs.Length, input.step), device);
            ////Step 3: create linear regression model
            var lr = createLRModel(x, device);
            ////Network model contains only two parameters b and w, so we query
            ////the model in order to get parameter values
            var paramValues     = lr.Inputs.Where(z => z.IsParameter).ToList();
            var totalParameters = paramValues.Sum(c => c.Shape.TotalSize);
            ////Step 4: create trainer
            var trainer = createTrainer(lr, y);
            ////Ştep 5: training
            double b = 0, w = 0;
            int    max = 2000;

            for (int i = 1; i <= max; i++)
            {
                var d = new Dictionary <Variable, Value>();
                d.Add(x, xValues);
                d.Add(y, yValues);
                //
                trainer.TrainMinibatch(d, true, device);
                //
                var loss = trainer.PreviousMinibatchLossAverage();
                var eval = trainer.PreviousMinibatchEvaluationAverage();
                //
                if (i % 200 == 0)
                {
                    Console.WriteLine($"It={i}, Loss={loss}, Eval={eval}");
                }

                if (i == max)
                {
                    //print weights
                    var b0_name = paramValues[0].Name;
                    var b0      = new Value(paramValues[0].GetValue()).GetDenseData <float>(paramValues[0]);
                    var b1_name = paramValues[1].Name;
                    var b1      = new Value(paramValues[1].GetValue()).GetDenseData <float>(paramValues[1]);
                    Console.WriteLine($" ");
                    Console.WriteLine($"Training process finished with the following regression parameters:");
                    Console.WriteLine($"b={b0[0][0]}, w={b1[0][0]}");
                    b = b0[0][0];
                    w = b1[0][0];
                    ParameterStorage ps = new ParameterStorage(float.Parse(w.ToString()), float.Parse(b.ToString()));
                    int coaid           = await dB.GetCountryByNameAsync(KnownValues.Where(k => k.Name != null).First().Name);

                    await dB.SaveParameterAsync(ps, coaid, KnownValues.Where(k => k.cat_id != 0).First().cat_id, loss);

                    Console.WriteLine(KnownValues.Min(k => k.Year));
                    KnownValues = Predict(KnownValues, Convert.ToInt32(KnownValues.Min(k => k.Year)), FutureYear, ps);
                }
            }


            return(KnownValues);
        }