Esempio n. 1
0
        private static List <double[]> TrainWithAdam(Network n, DataSet ds, DataSet dt)
        {
            Adam br = new Adam();

            //Calling the Train method of the trainer with the desired parameters
            //n, ds, learningRate: .3, numberOfEpochs: 200, shuffle: false, debug: n.Debug, nestrov:false, momentum:0.9, resilient: false, resilientUpdateAccelerationRate: 0.3,
            //resilientUpdateSlowDownRate: 0.1, regularization: AA1_MLP.Enums.Regularizations.L2, regularizationRate: 0.001, validationSet: dt, batchSize: 7
            AdamParams passedParams = new AdamParams();

            passedParams.network            = n;
            passedParams.trainingSet        = ds;
            passedParams.learningRate       = 0.09;
            passedParams.numberOfEpochs     = 200;
            passedParams.shuffle            = true;
            passedParams.debug              = n.Debug;
            passedParams.regularization     = Regularizations.None;
            passedParams.regularizationRate = 0.0001;
            passedParams.validationSet      = dt;
            passedParams.batchSize          = 7;



            var learningCurve = br.Train(passedParams);

            return(learningCurve);
        }
        public bool Write(Dictionary<Guid,string> source, Adam.Core.Application app)
        {
            Record r = new Record(app);
            Excel.Application EApp;
            Excel.Workbook EWorkbook;
            Excel.Worksheet EWorksheet;
            Excel.Range Rng;
            EApp = new Excel.Application();
            object misValue = System.Reflection.Missing.Value;
            EWorkbook = EApp.Workbooks.Add(misValue);
            EWorksheet = (Excel.Worksheet)EWorkbook.Worksheets.Item[1];                         
            EWorksheet.get_Range("A1", misValue).Formula = "UPC code";
            EWorksheet.get_Range("B1", misValue).Formula = "Link";            
            Rng = EWorksheet.get_Range("A2", misValue).get_Resize(source.Count,misValue);
            Rng.NumberFormat = "00000000000000";
            int row = 2;
            foreach(KeyValuePair<Guid,string> pair in source)
            {              
                EWorksheet.Cells[row,1] = pair.Value;                
                r.Load(pair.Key);
                Rng = EWorksheet.get_Range("B"+row, misValue);
                EWorksheet.Hyperlinks.Add(Rng, r.Fields.GetField<TextField>("Content Url").Value);               
                //myExcelWorksheet.Cells[row, 2] = r.Fields.GetField<TextField>("Content Url").Value;                                
                row++;
            }
            ((Excel.Range)EWorksheet.Cells[2, 1]).EntireColumn.AutoFit();
            ((Excel.Range)EWorksheet.Cells[2, 2]).EntireColumn.AutoFit();
            EWorkbook.SaveAs(_fileName, Excel.XlFileFormat.xlWorkbookNormal, misValue, misValue, misValue, misValue,
                Excel.XlSaveAsAccessMode.xlExclusive,
                misValue, misValue, misValue, misValue, misValue);

            EWorkbook.Close(true, misValue, misValue);
            EApp.Quit();
            return true;
        }
Esempio n. 3
0
        public bool TrainNetwork(string ticker)
        {
            Adam adam = new Adam();

            int perceptronId  = -1;
            int neuronCounter = 0;

            foreach (Perceptron perceptron in context.Perceptrons)
            {
                if (perceptron.Stock == ticker)
                {
                    perceptronId = perceptron.PerceptronId;
                }
            }
            foreach (Neuron neuron in context.Neurons)
            {
                if (neuron.PerceptronId == perceptronId)
                {
                    neuronCounter += 1;
                }
            }
            if (perceptronId != -1 && neuronCounter == 365)
            {
                return(adam.WeightAdjustor(context.Perceptrons.Find(perceptronId)));
            }
            return(false);
        }
Esempio n. 4
0
        /// <summary>
        /// 反向传播
        /// </summary>
        /// <param name="delta_yi"></param>
        /// <returns></returns>
        public float[] GetDout(float[] delta_yi, float[] xi)
        {
            float[] dx_hat  = new float[delta_yi.Length];
            float   dsigmab = 0;
            float   dub     = 0;
            float   dgamma  = 0;
            float   dbata   = 0;

            float[] dx = new float[delta_yi.Length];
            for (int i = 0; i < dx_hat.Length; i++)
            {
                dx_hat[i] = delta_yi[i] * Gamma;
            }
            float dx_hat_sum = 0;

            for (int i = 0; i < delta_yi.Length; i++)
            {
                dx_hat_sum += dx_hat[i] * (xi[i] - Ub);
            }

            dsigmab = (float)(dx_hat_sum * (-0.5) * Math.Pow((Sigmab + Adam.E), -1.5));

            float dub_temp_1 = 0;

            for (int i = 0; i < dx_hat.Length; i++)
            {
                dub_temp_1 += (float)(dx_hat[i] * ((-1) / Math.Sqrt(Sigmab + Adam.E)));
            }
            float dub_temp_2 = 0;

            for (int i = 0; i < delta_yi.Length; i++)
            {
                dub_temp_2 += (-2) * (xi[i] - Ub);
            }
            dub = dub_temp_1 + dsigmab * dub_temp_2 / delta_yi.Length;

            for (int i = 0; i < dx.Length; i++)
            {
                dx[i] = (float)(dx_hat[i] / Math.Sqrt(Sigmab + Adam.E) + dsigmab * 2 * (xi[i] - Ub) / dx.Length + dub / dx.Length);
            }
            for (int i = 0; i < dx_hat.Length; i++)
            {
                dgamma += delta_yi[i] * x_hat[i];
                dbata  += delta_yi[i];
            }
            //更新

            if (AdamGamma == null)
            {
                AdamGamma = new Adam();
            }
            if (AdamBata == null)
            {
                AdamBata = new Adam();
            }
            Gamma -= AdamGamma.GetAdam(dgamma);
            Bata  -= AdamBata.GetAdam(bata);
            //BNAdam(dgamma, dbata);
            return(dx);
        }
Esempio n. 5
0
        static Sequential DefineDiscriminator()
        {
            var model = new Sequential();

            model.Add(new Conv2D(64, new Tuple <int, int>(3, 3), padding: "same", input_shape: (imageWidth, imageHeight, 3)));
            model.Add(new LeakyReLU(0.2f));

            model.Add(new Conv2D(128, new Tuple <int, int>(3, 3), strides: new Tuple <int, int>(2, 2), padding: "same"));
            model.Add(new LeakyReLU(0.2f));

            model.Add(new Conv2D(128, new Tuple <int, int>(3, 3), strides: new Tuple <int, int>(2, 2), padding: "same"));
            model.Add(new LeakyReLU(0.2f));

            model.Add(new Conv2D(256, new Tuple <int, int>(3, 3), strides: new Tuple <int, int>(2, 2), padding: "same"));
            model.Add(new LeakyReLU(0.2f));

            model.Add(new Conv2D(256, new Tuple <int, int>(3, 3), strides: new Tuple <int, int>(2, 2), padding: "same"));
            model.Add(new LeakyReLU(0.2f));

            model.Add(new Conv2D(256, new Tuple <int, int>(3, 3), strides: new Tuple <int, int>(2, 2), padding: "same"));
            model.Add(new LeakyReLU(0.2f));

            model.Add(new Flatten());
            model.Add(new Dropout(0.4f));
            model.Add(new Dense(1, activation: "sigmoid"));

            var opt = new Adam(0.0002f, 0.5f);

            model.Compile(opt, loss: "binary_crossentropy", metrics: new string[] { "accuracy" });
            return(model);
        }
Esempio n. 6
0
        /// <summary>
        /// Обучить нейронную сеть на накопленных исследованиях
        /// </summary>
        /// <param name="countLifes">На скольких жизней, начиная от последней, обучить модель</param>
        /// <param name="epochs">Количество эпох обучения. По умолчанию 1</param>
        /// <param name="learningRate">Норма обучения. По умолчанию 1e-3</param>
        /// <param name="trainType">Тип обучения. По умолчанию online</param>
        /// <param name="minLoss">ошибка, при которой обучение останавливается</param>
        /// <param name="optimizer">Оптимизатор. По умолчанию Adam</param>
        /// <param name="loss">Метрика ошибки. По умолчанию MSE</param>
        public void Train(int countLifes = 50, int epochs = 1, double learningRate = 1e-3, TrainType trainType = TrainType.Online, double minLoss = 0.0, IOptimizer optimizer = null, ILoss loss = null)
        {
            if (loss == null)
            {
                loss = new LossMeanSqrSqrt();
            }
            if (optimizer == null)
            {
                optimizer = new Adam();
            }

            int    start   = lifes.Count - countLifes;
            Vector rewards = GetRewards(start, lifes.Count);
            var    inputs  = new List <NNValue>();
            var    outputs = new List <NNValue>();

            for (int i = 0; i < rewards.N; i++)
            {
                var conditions = lifes[start + i].GetConditions();
                foreach (var condition in conditions)
                {
                    var state  = condition.Item1;
                    var action = condition.Item2;

                    inputs.Add(state.ToNNValue());

                    if (rewards[i] > 0)
                    {
                        outputs.Add(new NNValue(action.probabilities.MaxOutVector().TransformVector(x => (x == -1) ? 0 : 1)));
                    }
                    else
                    {
                        outputs.Add(new NNValue((1.0 - action.probabilities).MaxOutVector().TransformVector(x => (x == -1) ? 0 : 1)));
                    }
                }
            }

            #region Shuffle
            for (int i = start; i < inputs.Count; i++)
            {
                var a     = random.Next(start, inputs.Count);
                var b     = random.Next(start, inputs.Count);
                var temp1 = inputs[a];
                var temp2 = outputs[a];

                inputs[a]  = inputs[b];
                outputs[a] = outputs[b];

                inputs[b]  = temp1;
                outputs[b] = temp2;
            }
            #endregion

            #region Train
            DataSetNoReccurent dataSetNoReccurent = new DataSetNoReccurent(inputs.ToArray(), outputs.ToArray(), loss);
            Trainer            trainer            = new Trainer(graphBackward, trainType, optimizer);
            trainer.Train(epochs, learningRate, model, dataSetNoReccurent, minLoss);
            #endregion
        }
Esempio n. 7
0
            public void Adam_is_unique()
            {
                Adam adam        = Adam.GetInstance();
                Adam anotherAdam = Adam.GetInstance();

                Assert.IsTrue(adam is Adam);
                Assert.AreEqual(adam, anotherAdam);
            }
Esempio n. 8
0
 public static Eve GetInstance(Adam adam)
 {
     if (adam is null)
     {
         throw new ArgumentNullException();
     }
     return(_eve);
 }
Esempio n. 9
0
        public static void Run()
        {
            //訓練回数
            const int learningCount = 10000;

            //訓練データ
            Real[][] trainData =
            {
                new Real[] { 0, 0 },
                new Real[] { 1, 0 },
                new Real[] { 0, 1 },
                new Real[] { 1, 1 }
            };

            //訓練データラベル
            Real[][] trainLabel =
            {
                new Real[] { 0 },
                new Real[] { 1 },
                new Real[] { 1 },
                new Real[] { 0 }
            };

            //ネットワークの構成を FunctionStack に書き連ねる
            FunctionStack <Real> nn = new FunctionStack <Real>(
                new Linear <Real>(2, 2, name: "l1 Linear"),
                new Sigmoid <Real>(name: "l1 ReLU"),
                new Linear <Real>(2, 1, name: "l2 Linear")
                );

            //optimizerを宣言(今回はAdam)
            Adam <Real> adam = new Adam <Real>();

            adam.SetUp(nn);

            //訓練ループ
            Console.WriteLine("Training...");
            for (int i = 0; i < learningCount; i++)
            {
                //今回はロス関数にMeanSquaredErrorを使う
                Trainer.Train(nn, trainData[0], trainLabel[0], new MeanSquaredError <Real>());
                Trainer.Train(nn, trainData[1], trainLabel[1], new MeanSquaredError <Real>());
                Trainer.Train(nn, trainData[2], trainLabel[2], new MeanSquaredError <Real>());
                Trainer.Train(nn, trainData[3], trainLabel[3], new MeanSquaredError <Real>());

                //訓練後に毎回更新を実行しなければ、ミニバッチとして更新できる
                adam.Update();
            }

            //訓練結果を表示
            Console.WriteLine("Test Start...");
            foreach (Real[] val in trainData)
            {
                NdArray <Real> result = nn.Predict(val)[0];
                Console.WriteLine(val[0] + " xor " + val[1] + " = " + (result.Data[0] > 0.5 ? 1 : 0) + " " + result);
            }
        }
Esempio n. 10
0
        public void TrainXOR()
        {
            try {
                //Load train data
                float[,] testX = new float[, ] {
                    { 0, 1 },
                };
                float[,] x = new float[, ] {
                    { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 }
                };
                float[] y = new float[] { 0, 1, 1, 0 };

                //Build sequential model
                var model = new Sequential();
                model.Add(new Dense(32, activation: "relu", input_shape: new Shape(2)));
                model.Add(new Dense(32, activation: "relu"));
                model.Add(new Dropout(0.1d));
                model.Add(new Dense(1, activation: "sigmoid"));

                //Compile and train
                var optimizer = new Adam();
                model.Compile(optimizer: optimizer, loss: "mse", metrics: new string[] { "accuracy" });
                model.Fit(x, y, batch_size: 2, epochs: 1000, verbose: 1);

                float[] predicts;
                predicts = model.Predict(x).GetData <float>();
                predicts = model.PredictOnBatch(x).GetData <float>();
                predicts = model.Predict(x).GetData <float>();
                predicts = model.PredictOnBatch(x).GetData <float>();
                predicts = model.Predict(x).GetData <float>();
                predicts = model.PredictOnBatch(x).GetData <float>();

                Stopwatch watch = new Stopwatch();
                watch.Restart();
                for (int i = 0; i < 5; ++i)
                {
                    predicts = model.PredictOnBatch(testX).GetData <float>();
                }
                watch.Stop();
                string batchMs = watch.GetElapsedMilliseconds().ToString();
                watch.Restart();
                for (int i = 0; i < 5; ++i)
                {
                    predicts = model.Predict(testX).GetData <float>();
                }
                watch.Stop();

                //MainWindow.Instance.Dispatcher.BeginInvoke(new Action(() => {
                //	MainWindow.Instance.DebugTextBox.Text = batchMs + " / " + watch.GetElapsedMilliseconds().ToString();
                //}));
            } catch (Exception ex) {
                //MainWindow.Instance.Dispatcher.BeginInvoke(new Action(() => {
                //	MainWindow.Instance.DebugTextBox.Text = ex.ToString();
                //}));
            }
        }
Esempio n. 11
0
 /// <summary>
 /// 将静态变量出栈
 /// </summary>
 public void LoadStaticValue()
 {
     if (StaticValueStack == null || StaticValueStack.Count == 0)
     {
         Console.WriteLine("StartNet->LoadStaticValue()");
         return;
     }
     AgentClass.GetStaticValue(StaticValueStack);
     ClassPublicValue.GetStaticValue(StaticValueStack);
     Adam.GetStackValue(StaticValueStack);
 }
Esempio n. 12
0
        public ActionResult Register(Adam user)
        {
            int count = 0;

            if (ModelState.IsValid)
            {
                //insert user into database here
                return(View("Login"));
            }
            return(View("Login"));
        }
Esempio n. 13
0
 public static Adam GetInstance()
 {
     lock (locker)
     {
         if (uniqueInstance == null)
         {
             uniqueInstance = new Adam("Adam");
         }
         return(uniqueInstance);
     }
 }
Esempio n. 14
0
        static Sequential DefineGan(BaseModel generator, BaseModel discriminator)
        {
            discriminator.SetTrainable(false);

            var model = new Sequential();

            model.Add(generator);
            model.Add(discriminator);

            var opt = new Adam(0.0002f, 0.5f);

            model.Compile(loss: "binary_crossentropy", optimizer: opt);
            return(model);
        }
Esempio n. 15
0
        public void CreateModels()
        {
            var discriminatorOptimizer = new Adam(lr: 0.01f);
            var ganOptimizer           = new Adam(lr: 0.005f);

            // Image generator
            Sequential generatorSeq   = new Sequential();
            Input      generatorInput = new Input(new Shape(ImageData.TotalInputShape));

            generatorSeq.Add(generatorInput);
            generatorSeq.Add(new Dense(128, activation: "relu"));
            generatorSeq.Add(new Dropout(0.3f));
            generatorSeq.Add(new BatchNormalization(momentum: 0.9f));
            generatorSeq.Add(new Dense(256, activation: "relu"));
            generatorSeq.Add(new BatchNormalization(momentum: 0.9f));
            generatorSeq.Add(new Dense(512, activation: "relu"));
            generatorSeq.Add(new BatchNormalization(momentum: 0.9f));
            generatorSeq.Add(new Dense(2048, activation: "relu"));
            generatorSeq.Add(new BatchNormalization(momentum: 0.9f));
            generatorSeq.Add(new Dense(ImageData.OutputShape + ImageData.ManualInputShape, activation: "sigmoid"));

            generatorModel = generatorSeq;
            generatorModel.Compile(optimizer: ganOptimizer, loss: "mse", metrics: new string[] { "accuracy" });

            // Image discriminator
            Sequential discriminatorSeq = new Sequential();

            discriminatorSeq.Add(new Dense(2048, activation: "relu", input_shape: new Shape(ImageData.OutputShape + ImageData.ManualInputShape)));
            discriminatorSeq.Add(new Dropout(0.4f));
            discriminatorSeq.Add(new Dense(512, activation: "relu"));
            discriminatorSeq.Add(new Dropout(0.4f));
            discriminatorSeq.Add(new Dense(128, activation: "relu"));
            discriminatorSeq.Add(new Dropout(0.4f));
            discriminatorSeq.Add(new Dense(32, activation: "relu"));
            discriminatorSeq.Add(new Dropout(0.4f));
            discriminatorSeq.Add(new Dense(1, activation: "sigmoid"));

            discriminatorModel = discriminatorSeq;
            discriminatorModel.Compile(optimizer: discriminatorOptimizer, loss: "binary_crossentropy", metrics: new string[] { "accuracy" });

            // AdversarialModel
            Sequential ganSeq = new Sequential();

            //ganSeq.Add(new Concatenate(generatorModel.ToLayer(), generatorInput));
            ganSeq.Add(generatorModel.ToLayer());
            ganSeq.Add(discriminatorModel.ToLayer());

            ganModel = ganSeq;
            ganModel.Compile(optimizer: ganOptimizer, loss: "binary_crossentropy", metrics: new string[] { "accuracy" });
        }
Esempio n. 16
0
 /// <summary>
 /// 将静态变量进栈
 /// </summary>
 public void SetStaticValue()
 {
     if (StaticValueStack == null)
     {
         StaticValueStack = new Stack <object>();
     }
     else
     {
         StaticValueStack.Clear();
     }
     StaticValueStack = Adam.SetStaticValue(StaticValueStack);
     StaticValueStack = ClassPublicValue.SetStaticValue(StaticValueStack);
     StaticValueStack = AgentClass.SetStaticValue(StaticValueStack);
 }
Esempio n. 17
0
            public void Humans_can_reproduce_when_there_is_a_name_a_mother_and_a_father()
            {
                Adam   adam  = Adam.GetInstance();
                Eve    eve   = Eve.GetInstance(adam);
                Male   seth  = new Male("Seth", eve, adam);
                Female azura = new Female("Azura", eve, adam);
                Male   enos  = new Male("Enos", azura, seth);

                Assert.AreEqual("Eve", eve.Name);
                Assert.AreEqual("Adam", adam.Name);
                Assert.AreEqual("Seth", seth.Name);
                Assert.AreEqual("Azura", azura.Name);
                Assert.AreEqual("Enos", ((Human)enos).Name);
                Assert.AreEqual(seth, ((Human)enos).Father);
                Assert.AreEqual(azura, ((Human)enos).Mother);
            }
Esempio n. 18
0
        public void Human_can_reproduce_when_they_have_a_mother_and_father_and_have_a_name()
        {
            var adam  = Adam.GetInstance();
            var eve   = Eve.GetInstance(adam);
            var seth  = new Male("Seth", eve, adam);
            var azura = new Female("Azura", eve, adam);
            var enos  = new Male("Enos", azura, seth);

            Assert.AreEqual("Eve", eve.Name);
            Assert.AreEqual("Adam", adam.Name);
            Assert.AreEqual("Seth", seth.Name);
            Assert.AreEqual("Azura", azura.Name);
            Assert.AreEqual("Enos", ((Human)enos).Name);
            Assert.AreEqual(seth, ((Human)enos).Father);
            Assert.AreEqual(azura, ((Human)enos).Mother);
        }
Esempio n. 19
0
 public string[] str()
 {
     string[] ss = new string[12];
     ss[0]  = id.ToString();
     ss[1]  = surname;
     ss[2]  = name;
     ss[3]  = year.ToString();
     ss[4]  = points.ToString();
     ss[5]  = Adam.ToString() + " / " + Adam_new.ToString();
     ss[6]  = coef[3].ToString();
     ss[7]  = rank;
     ss[8]  = mark;
     ss[9]  = group;
     ss[10] = new_rank;
     ss[11] = place.ToString();
     return(ss);
 }
Esempio n. 20
0
        public void ScreenAdam(AA1_MLP.Entities.DataSet wholeSet, int k, List <double> learningRates, List <double> regularizationRates, List <int> humberOfHiddenNeurons, int numOfEpochs)
        {
            //Calling the Train method of the trainer with the desired parameters
            //n, ds, learningRate: .3, numberOfEpochs: 200, shuffle: false, debug: n.Debug, nestrov:false, momentum:0.9, resilient: false, resilientUpdateAccelerationRate: 0.3,
            //resilientUpdateSlowDownRate: 0.1, regularization: AA1_MLP.Enums.RegularizationRates.L2, regularizationRate: 0.001, validationSet: dt, batchSize: 7

            string reportsDirectory = "adamKFoldsReports";

            if (Directory.Exists(reportsDirectory))
            {
                Directory.Delete(reportsDirectory, true);
            }
            Directory.CreateDirectory(reportsDirectory);

            /*List<double> momentums = new List<double> { 0, 0.5 };
             * List<double> learningRates = new List<double> { 0.005, 0.01 };
             * List<double> regularizationRates = new List<double> { 0, 0.001 };
             * List<int> humberOfHiddenNeurons = new List<int> { 100, 90, 80, 70, 60, 50, 40, 30, 20, 10 };*/
            //GradientDescentParams passedParams = new GradientDescentParams();
            //IOptimizer trainer = new Gradientdescent();
            AdamParams passedParams = new AdamParams();
            IOptimizer trainer      = new Adam();

            passedParams.numberOfEpochs = numOfEpochs;
            passedParams.batchSize      = 10;
            for (int idxnh = 0; idxnh < humberOfHiddenNeurons.Count; idxnh++)
            {
                // for (int idxmo = 0; idxmo < momentums.Count; idxmo++)
                for (int idxLR = 0; idxLR < learningRates.Count; idxLR++)
                {
                    for (int idxReg = 0; idxReg < regularizationRates.Count; idxReg++)
                    {
                        int nh = humberOfHiddenNeurons[idxnh];
                        passedParams.learningRate       = learningRates[idxLR];
                        passedParams.regularization     = Regularizations.L2;
                        passedParams.regularizationRate = regularizationRates[idxReg];


                        passedParams.NumberOfHiddenUnits = nh;

                        RunKFoldWithSetOfParams(wholeSet, k, passedParams, trainer, reportsDirectory);
                    }
                }
            }
        }
Esempio n. 21
0
 public ActionResult Login(Adam user)
 {
     ModelState.Remove("FirstName");
     ModelState.Remove("LastName");
     ModelState.Remove("Email");
     ModelState.Remove("PassConf");
     ModelState.Remove("Location");
     ModelState.Remove("UserId");
     ModelState.Remove("RegDate");
     ModelState.Remove("EventNo");
     ModelState.Remove("EventOwner");
     if (ModelState.IsValid)
     {
         user.Username = dao.CheckLogin(user);
         if (user.Username != null)
         {
             if (user.UserType == Role.Staff)
             {
                 Session["name"] = "Staff";
                 return(RedirectToAction("Index", "Home"));
             }
             else if (user.UserType == Role.User)
             {
                 Session["name"] = user.UserId;
                 return(RedirectToAction("Index", "Home"));
             }
             else
             {
                 ViewBag.Status = "Error! " + dao.message;
                 return(View("Status"));
             }
         }
         else
         {
             ViewBag.Status = "Error! " + dao.message;
             return(View("Status"));
         }
     }
     else
     {
         return(View(user));
     }
 }
Esempio n. 22
0
    public void Eve_is_unique_and_created_from_a_rib_of_adam()
    {
        Adam adam       = Adam.GetInstance();
        Eve  eve        = Eve.GetInstance(adam);
        Eve  anotherEve = Eve.GetInstance(adam);

        Assert.IsTrue(eve is Eve);
        Assert.AreEqual(eve, anotherEve);

        // GetInstance() is the only static method on Eve
        Assert.AreEqual(1, typeof(Eve).GetMethods().Where(x => x.IsStatic).Count());

        // Eve has no public or internal constructor
        Assert.IsFalse(typeof(Eve).GetConstructors(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance)
                       .Any(x => x.IsPublic || x.IsAssembly));

        // Eve cannot be overridden
        Assert.IsTrue(typeof(Eve).IsSealed);
    }
Esempio n. 23
0
 public ActionResult Login(Adam user)
 {
     ModelState.Remove("Email");
     ModelState.Remove("PassConf");
     ModelState.Remove("Location");
     ModelState.Remove("UserId");
     ModelState.Remove("RegDate");
     ModelState.Remove("EventNo");
     ModelState.Remove("EventOwner");
     if (ModelState.IsValid)
     {
         //insert into database here
         return(View("Profile"));
     }
     else
     {
         return(View(user));
     }
 }
Esempio n. 24
0
        public static void Run()
        {
            DataMaker      dataMaker = new DataMaker(STEPS_PER_CYCLE, NUMBER_OF_CYCLES);
            NdArray <Real> trainData = dataMaker.Make();

            //ネットワークの構成は FunctionStack に書き連ねる
            FunctionStack <Real> model = new FunctionStack <Real>(
                new Linear <Real>(1, 5, name: "Linear l1"),
                new LSTM <Real>(5, 5, name: "LSTM l2"),
                new Linear <Real>(5, 1, name: "Linear l3")
                );

            //optimizerを宣言
            Adam <Real> adam = new Adam <Real>();

            adam.SetUp(model);

            //訓練ループ
            Console.WriteLine("Training...");
            for (int epoch = 0; epoch < TRAINING_EPOCHS; epoch++)
            {
                NdArray <Real>[] sequences = dataMaker.MakeMiniBatch(trainData, MINI_BATCH_SIZE, LENGTH_OF_SEQUENCE);

                Real loss = ComputeLoss(model, sequences);

                adam.Update();

                model.ResetState();

                if (epoch != 0 && epoch % DISPLAY_EPOCH == 0)
                {
                    Console.WriteLine("[{0}]training loss:\t{1}", epoch, loss);
                }
            }

            Console.WriteLine("Testing...");
            NdArray <Real>[] testSequences = dataMaker.MakeMiniBatch(trainData, MINI_BATCH_SIZE, LENGTH_OF_SEQUENCE);

            int sample_index = 45;

            predict(testSequences[sample_index], model, PREDICTION_LENGTH);
        }
Esempio n. 25
0
    public void Father_and_mother_are_essential_for_reproduction()
    {
        // There is just 1 way to reproduce
        Assert.AreEqual(1, typeof(Male).GetConstructors(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance)
                        .Where(x => x.IsPublic || x.IsAssembly).Count());
        Assert.AreEqual(1, typeof(Female).GetConstructors(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance).
                        Where(x => x.IsPublic || x.IsAssembly).Count());

        var adam = Adam.GetInstance();
        var eve  = Eve.GetInstance(adam);

        Assert.Throws <ArgumentNullException>(() => new Male("Seth", null, null));
        Assert.Throws <ArgumentNullException>(() => new Male("Abel", eve, null));
        Assert.Throws <ArgumentNullException>(() => new Male("Seth", null, adam));
        Assert.Throws <ArgumentNullException>(() => new Female("Azura", null, null));
        Assert.Throws <ArgumentNullException>(() => new Female("Awan", eve, null));
        Assert.Throws <ArgumentNullException>(() => new Female("Dina", null, adam));
        Assert.Throws <ArgumentNullException>(() => new Female("Eve", null, null));
        Assert.Throws <ArgumentNullException>(() => new Male("Adam", null, null));
    }
Esempio n. 26
0
        public ActionResult Register(Adam user)
        {
            int count = 0;

            if (ModelState.IsValid)
            {
                count = dao.Insert(user);
                if (count == 1)
                {
                    ViewBag.Status = "User created";
                }

                else
                {
                    ViewBag.Status = "Error! " + dao.message;
                }
                return(View("Status"));
            }
            return(View("Login"));
        }
Esempio n. 27
0
        private static void RunAsync()
        {
            var trainData = new NdArray(new[] { 2, 3, 4 });

            for (int i = 0; i < trainData.Data.Length; i++)
            {
                trainData.Data[i] = (float)i / trainData.Data.Length;
            }

            var functions = new List <Function>();

            functions.Add(new Convolution2D(2, 1, 3));

            var nn = new FunctionStack(functions.ToArray());

            nn.Compress();
            var optimizer = new Adam();

            nn.SetOptimizer(optimizer);

            var result = nn.Predict(trainData)[0];
        }
Esempio n. 28
0
                private ITrainingAlgorithm CreateAlgorithm()
                {
                    ITrainingAlgorithm algorithm = null;

                    switch (this.TaskParameters.Algorithm.Name)
                    {
                    case "Adadelta":
                    default:
                        algorithm = new Adadelta();
                        break;

                    case "Adagrad":
                        algorithm = new Adagrad();
                        break;

                    case "Adam":
                        algorithm = new Adam();
                        break;

                    case "RMSProp":
                        algorithm = new RMSProp();
                        break;

                    case "SGD":
                        algorithm = new SGD();
                        break;
                    }

                    JsonSerializer jsonSerializer = new JsonSerializer();

                    using (JTokenReader jtokenReader = new JTokenReader(this.TaskParameters.Algorithm.Parameters))
                    {
                        jsonSerializer.Populate(jtokenReader, algorithm);
                    }

                    return(algorithm);
                }
Esempio n. 29
0
        private void DrawAdam(Graphics graphics, DrawF draw)
        {
            ArrayList history   = new ArrayList();
            PointF2D  point     = new PointF2D(-4.0f, 2.0f);
            Function2 fun       = new Function2();
            Adam      optimizer = new Adam(0.17f, 0.9f, 0.999f);

            for (int index = 0; index < 30; index++)
            {
                PointF2D xyPoint = draw.getBlockPoint(point.X, point.Y);
                history.Add(xyPoint);
                PointF2D diff = fun.DiffFormula(point.X, point.Y);
                optimizer.Update(point, diff);
            }

            PointF2D prePoint = ((PointF2D)history[0]);

            for (int index = 0; index < 30; index++)
            {
                draw.drawPoint(graphics, Brushes.Blue, ((PointF2D)history[index]));
                draw.drawLine(graphics, prePoint, ((PointF2D)history[index]));
                prePoint = ((PointF2D)history[index]);
            }
        }
Esempio n. 30
0
        static void Main(string[] args)
        {
            //Loading and parsing cup dataset

            /* CupDataManager dm = new CupDataManager();
             * DataSet wholeSet = dm.LoadData(Properties.Settings.Default.TrainingSetLocation, 10, 2, permute: true, seed: 1);
             * List<double> momentums = new List<double> { 0, 0.5 };
             * List<double> learningRates = new List<double> { 0.005, 0.01 };
             * List<double> regularizationRates = new List<double> { 0, 0.001 };
             * List<int> humberOfHiddenNeurons = new List<int> { 80 };
             * //screening SGD+Momentum experiments
             * GradientDescentParams passedParams = new GradientDescentParams();
             * passedParams.nestrov = false;
             * passedParams.resilient = false;
             * passedParams.resilientUpdateAccelerationRate = 0.3;
             * passedParams.resilientUpdateSlowDownRate = 0.1;
             * new KFoldValidation().ScreenGD(wholeSet, 5, momentums, learningRates, regularizationRates, humberOfHiddenNeurons, passedParams,5000);*/
            //screening Adam
            //new KFoldValidation().ScreenAdam(wholeSet, 5, learningRates, regularizationRates, humberOfHiddenNeurons, 5000);


            //ReportHowCloseWeightsAcquiredFromDifferentSeedsAre();


            AA1_MLP.DataManagers.CupDataManager dm = new AA1_MLP.DataManagers.CupDataManager();
            DataSet trainDS = dm.LoadData(@"C:\Users\Ronin\Documents\monks\Monks\UsedFiles\TrainValSplits\60percenttrain.txt", 10, 2, standardize: true);
            DataSet testDS  = dm.LoadData(@"C:\Users\Ronin\Documents\monks\Monks\UsedFiles\TrainValSplits\60percenttest.txt", 10, 2, standardize: true);

            Console.WriteLine("Training Adamax");
            AdamParams adampassedParams = new AdamParams();
            IOptimizer adamtrainer      = new Adam();

            //adampassedParams.numberOfEpochs = 10000;
            //adampassedParams.batchSize = 10;
            //adampassedParams.trainingSet = trainDS;
            //adampassedParams.validationSet = testDS;
            //adampassedParams.learningRate = 0.001;
            //adampassedParams.regularization = Regularizations.L2;
            //adampassedParams.regularizationRate = 0.001;
            //adampassedParams.NumberOfHiddenUnits = 100;


            //adampassedParams.parallelize = false;
            //LastTrain(testDS, adampassedParams, adamtrainer, "10kt100adam", 1);

            adamtrainer = new Adamax();

            adampassedParams.numberOfEpochs      = 100000;
            adampassedParams.batchSize           = 10;
            adampassedParams.trainingSet         = trainDS;
            adampassedParams.validationSet       = testDS;
            adampassedParams.learningRate        = 0.001;
            adampassedParams.regularization      = Regularizations.L2;
            adampassedParams.regularizationRate  = 0.001;
            adampassedParams.NumberOfHiddenUnits = 100;

            //adampassedParams.PrintLoss = true;
            adampassedParams.parallelize = false;
            LastTrain(testDS, adampassedParams, adamtrainer, "100kadamax", 1);



            /*Console.WriteLine("Training Adam");
             * AdamParams adampassedParams = new AdamParams();
             * IOptimizer adamtrainer = new Adamax();
             *
             * adampassedParams.numberOfEpochs = 100;
             * adampassedParams.batchSize = 10;
             * adampassedParams.trainingSet = trainDS;
             * adampassedParams.validationSet = testDS;
             * adampassedParams.learningRate = 0.001;
             * adampassedParams.regularization = Regularizations.L2;
             * adampassedParams.regularizationRate = 0.001;
             * adampassedParams.NumberOfHiddenUnits = 100;
             * adampassedParams.t = 1000000000;
             *
             * adampassedParams.parallelize = false;
             * LastTrain(testDS, adampassedParams, adamtrainer, "100adam", 1);*/
            /* Console.WriteLine("training SGD");
             * GradientDescentParams passedParams = new GradientDescentParams();
             * Gradientdescent trainer = new Gradientdescent();
             * passedParams.numberOfEpochs = 100;
             * passedParams.batchSize = 10;
             * passedParams.trainingSet = trainDS;
             * passedParams.validationSet = testDS;
             * passedParams.learningRate = 0.001;
             * passedParams.regularization = Regularizations.L2;
             * passedParams.regularizationRate = 0.001;
             * passedParams.nestrov = true;
             * passedParams.resilient = false;
             * passedParams.resilientUpdateAccelerationRate = 2;
             * passedParams.resilientUpdateSlowDownRate = 0.5;
             * passedParams.momentum = 0.5;
             * passedParams.NumberOfHiddenUnits = 100;
             * passedParams.parallelize = true;
             * LastTrain(testDS, passedParams, trainer, "5kepochsprofiling_seq", 1);
             *
             * Console.WriteLine();*/


            /*
             * List<int> seeds = new List<int>() { 1,15,40,4,73,2};
             *
             * foreach (var seed in seeds)
             * {
             *   Console.WriteLine("Seed:{0}",seed);
             *
             *   /*AdamParams passedParams = new AdamParams();
             *   IOptimizer trainer = new Adam();
             *   Console.WriteLine("training SGD");
             *   GradientDescentParams passedParams = new GradientDescentParams();
             *   Gradientdescent trainer = new Gradientdescent();
             *   passedParams.numberOfEpochs = 20000;
             *   passedParams.batchSize = 10;
             *   passedParams.trainingSet = trainDS;
             *   passedParams.validationSet = testDS;
             *   passedParams.learningRate = 0.001;
             *   passedParams.regularization = Regularizations.L2;
             *   passedParams.regularizationRate = 0.001;
             *   passedParams.nestrov = true;
             *   passedParams.resilient = false;
             *   passedParams.resilientUpdateAccelerationRate = 2;
             *   passedParams.resilientUpdateSlowDownRate = 0.5;
             *
             *   passedParams.momentum = 0.5;
             *   passedParams.NumberOfHiddenUnits = 100;
             *
             *   LastTrain(testDS, passedParams, trainer, "20kseed_"+seed+"_", seed);
             * }*/
            /* Console.WriteLine("Training Adam");
             * AdamParams adampassedParams = new AdamParams();
             * IOptimizer adamtrainer = new Adam();
             *
             * adampassedParams.numberOfEpochs = 30000;
             * adampassedParams.batchSize = 50;
             * adampassedParams.trainingSet = trainDS;
             * adampassedParams.validationSet = testDS;
             * adampassedParams.learningRate = 0.001;
             * adampassedParams.regularization = Regularizations.L2;
             * adampassedParams.regularizationRate = 0.001;
             * adampassedParams.NumberOfHiddenUnits = 100;
             *
             * LastTrain(testDS, adampassedParams, adamtrainer, "BS50_30kepochs_100_final_adam_hdn", 1);
             */


            //Loading and parsing cup dataset

            //  CupDataManager dm = new CupDataManager();
            //Loading the test dataset
            //DataSet TestSet = dm.LoadData(Properties.Settings.Default.TestSetLocation, 10, reportOsutput: false);
            //Loading the trained model
            //var n = AA1_MLP.Utilities.ModelManager.LoadNetwork("Final_hidn18_reg0.01_mo0.5_lr9E-06_model.AA1");

            //double MEE = 0;
            //applying the model on the test data
            //var predictions = ModelManager.GeneratorCUP(TestSet, n);
            //writing the results
            // File.WriteAllText("OMG_LOC-OSM2-TS.txt", string.Join("\n", predictions.Select(s => string.Join(",", s))));
        }
Esempio n. 31
0
        public void Train()
        {
            string dir = "ND_OPT_ConvAutoencoder_Data";

            Directory.CreateDirectory($@"{dir}");
            Directory.CreateDirectory($@"{dir}\Results");
            Directory.CreateDirectory($@"{dir}\Sources");

            AnimeDatasets a_dataset = new AnimeDatasets(StartSide, @"I:\Datasets\VAE_Dataset\White", @"I:\Datasets\VAE_Dataset\White\conv");//@"I:\Datasets\anime-faces\combined", @"I:\Datasets\anime-faces\combined_small");

            a_dataset.InitializeDataset();

            AnimeDatasets b_dataset = new AnimeDatasets(EndSide, @"I:\Datasets\VAE_Dataset\White", @"I:\Datasets\VAE_Dataset\White\conv");//@"I:\Datasets\anime-faces\combined", @"I:\Datasets\anime-faces\combined_small");

            b_dataset.InitializeDataset();

            Adam      sgd       = new Adam(0.001f);
            Quadratic quadratic = new Quadratic();

            NRandom r  = new NRandom(0);
            NRandom r2 = new NRandom(0);

            Matrix loss_deriv = new Matrix(OutputSize, 1, MemoryFlags.ReadWrite, true);


            #region Setup Database
            Matrix data_vec = new Matrix(LatentSize, 1, MemoryFlags.ReadOnly, false);

            Matrix[]  a_dataset_vec = new Matrix[a_dataset.TrainingFiles.Count];
            float[][] a_dataset_f   = new float[a_dataset.TrainingFiles.Count][];

            Matrix[]  b_dataset_vec = new Matrix[a_dataset.TrainingFiles.Count];
            float[][] b_dataset_f   = new float[a_dataset.TrainingFiles.Count][];

            for (int i = 0; i < a_dataset.TrainingFiles.Count; i++)
            {
                a_dataset_f[i]   = new float[InputSize];
                a_dataset_vec[i] = new Matrix(InputSize, 1, MemoryFlags.ReadOnly, false);
                a_dataset.LoadImage(a_dataset.TrainingFiles[i], a_dataset_f[i]);
                a_dataset_vec[i].Write(a_dataset_f[i]);

                b_dataset_f[i]   = new float[OutputSize];
                b_dataset_vec[i] = new Matrix(OutputSize, 1, MemoryFlags.ReadOnly, false);
                b_dataset.LoadImage(b_dataset.TrainingFiles[i], b_dataset_f[i]);
                b_dataset_vec[i].Write(b_dataset_f[i]);
            }
            #endregion

            for (int i0 = 000; i0 < 20000 * BatchSize; i0++)
            {
                int idx = (r.Next() % (a_dataset.TrainingFiles.Count / 2));

                var out_img = superres_enc_front.ForwardPropagate(a_dataset_vec[idx]);
                quadratic.LossDeriv(out_img[0], b_dataset_vec[idx], loss_deriv, 0);

                superres_dec_back.ResetLayerErrors();
                superres_dec_back.ComputeGradients(loss_deriv);
                superres_dec_back.ComputeLayerErrors(loss_deriv);
                superres_dec_back.UpdateLayers(sgd);

                loss_deriv.Clear();

                if (i0 % BatchSize == 0)
                {
                    a_dataset.SaveImage($@"{dir}\Sources\{i0 / BatchSize}.png", a_dataset_f[idx]);
                    b_dataset.SaveImage($@"{dir}\Results\{i0 / BatchSize}.png", out_img[0].Read());
                }

                Console.Clear();
                Console.Write($"Iteration: {i0 / BatchSize}, Sub-Batch: {i0 % BatchSize}");
            }

            superres_enc_front.Save($@"{dir}\network_final.bin");
            Console.WriteLine("DONE.");
        }