예제 #1
0
 internal Session(INetwork network, IAgenda agenda, IWorkingMemory workingMemory, IEventAggregator eventAggregator)
 {
     _network          = network;
     _workingMemory    = workingMemory;
     _agenda           = agenda;
     _eventAggregator  = eventAggregator;
     _executionContext = new ExecutionContext(this, _workingMemory, _agenda, _eventAggregator);
     _network.Activate(_executionContext);
 }
예제 #2
0
        public static void Run()
        {
            Random  rng  = new Random();
            DataSet data = new XorDataSetGenerator();

            int    inputDimension   = 2;
            int    hiddenDimension  = 3;
            int    outputDimension  = 1;
            int    hiddenLayers     = 1;
            double learningRate     = 0.001;
            double initParamsStdDev = 0.08;

            INetwork nn = NetworkBuilder.MakeFeedForward(inputDimension,
                                                         hiddenDimension,
                                                         hiddenLayers,
                                                         outputDimension,
                                                         data.GetModelOutputUnitToUse(),
                                                         data.GetModelOutputUnitToUse(),
                                                         initParamsStdDev, rng);


            int reportEveryNthEpoch = 10;
            int trainingEpochs      = 100000;

            Trainer.train <NeuralNetwork>(trainingEpochs, learningRate, nn, data, reportEveryNthEpoch, rng);

            Console.WriteLine("Training Completed.");
            Console.WriteLine("Test: 1,1");

            Matrix input  = new Matrix(new double[] { 1, 1 });
            Graph  g      = new Graph(false);
            Matrix output = nn.Activate(input, g);

            Console.WriteLine("Test: 1,1. Output:" + output.W[0]);

            Matrix input1  = new Matrix(new double[] { 0, 1 });
            Graph  g1      = new Graph(false);
            Matrix output1 = nn.Activate(input1, g1);

            Console.WriteLine("Test: 0,1. Output:" + output1.W[0]);

            Console.WriteLine("done.");
        }
예제 #3
0
        public static double Pass(double learningRate, INetwork network, List <DataSequence> sequences,
                                  bool applyTraining, ILoss lossTraining, ILoss lossReporting)
        {
            double numerLoss = 0;
            double denomLoss = 0;

            RnnConfig rnnConfig = (RnnConfig)Serializer.Deserialize(NetworkBuilder.Config.RnnConfigFile);

            foreach (DataSequence seq in sequences)
            {
                network.ResetState();
                Graph g = new Graph(applyTraining);
                network.GenerateDropout(applyTraining);

                for (int i = 0; i < seq.Steps.Count; ++i)
                {
                    DataStep step = seq.Steps[i];

                    // Generate in dropout
                    bool[] dropped = new bool[step.Input.W.Length];
                    for (int col = 0; col < dropped.Length; ++col)
                    {
                        dropped[col] = Math.Abs(rnnConfig.GetTransformed(0, i, col, step.Input.W[col])) < 0.0000001;
                    }

                    Matrix output = network.Activate(step.Input, g, dropped);

                    if (step.TargetOutput != null)
                    {
                        double loss = lossReporting.Measure(output, step.TargetOutput);
                        if (Double.IsNaN(loss) || Double.IsInfinity(loss))
                        {
                            return(loss);
                        }
                        numerLoss += loss;
                        denomLoss++;
                        if (applyTraining)
                        {
                            lossTraining.Backward(output, step.TargetOutput);
                        }
                    }

                    if (i % 10 == 0 && applyTraining)
                    {
                        g.Backward();                             //backprop dw values
                        UpdateModelParams(network, learningRate); //update params

                        g = new Graph(applyTraining);
                        network.GenerateDropout(applyTraining);
                    }
                }
            }
            return(numerLoss / denomLoss);
        }
예제 #4
0
        /// <summary>
        /// Один проход для минипакетного обучения
        /// </summary>
        /// <param name="learningRate">Скорость обучения</param>
        /// <param name="network">Нейросеть</param>
        /// <param name="sequences">Датасет</param>
        /// <param name="isTraining">Производится ли обучение</param>
        /// <param name="lossFunction">Функция ошибки</param>
        /// <returns></returns>
        public double PassBatch(double learningRate, INetwork network, List <DataSequence> sequences,
                                bool isTraining, ILoss lossFunction)
        {
            double numerLoss = 0;
            double denomLoss = 0;
            int    index, passes = (sequences.Count % BatchSize == 0) ? sequences.Count / BatchSize : sequences.Count / BatchSize + 1;

            for (int j = 0; j < passes; j++)
            {
                GraphCPU g = new GraphCPU(isTraining);

                for (int i = 0; i < BatchSize; i++)
                {
                    index = random.Next(sequences.Count);
                    var seq = sequences[index];

                    network.ResetState();
                    foreach (DataStep step in seq.Steps)
                    {
                        NNValue output = network.Activate(step.Input, g);
                        if (step.TargetOutput != null)
                        {
                            double loss = lossFunction.Measure(output, step.TargetOutput);
                            if (Double.IsNaN(loss) || Double.IsInfinity(loss))
                            {
                                return(loss);
                            }
                            numerLoss += loss;
                            denomLoss++;
                            if (isTraining)
                            {
                                lossFunction.Backward(output, step.TargetOutput);
                            }
                        }
                    }
                }

                if (isTraining)
                {
                    g.Backward();                             //backprop dw values
                    UpdateModelParams(network, learningRate); //update params
                }
            }
            return(numerLoss / denomLoss);
        }
예제 #5
0
        /// <summary>
        /// Онлайн обучение
        /// </summary>
        /// <param name="learningRate"></param>
        /// <param name="network"></param>
        /// <param name="sequences"></param>
        /// <param name="applyTraining"></param>
        /// <param name="lossTraining"></param>
        /// <returns></returns>
        public double Pass(double learningRate, INetwork network, List <DataSequence> sequences,
                           bool applyTraining, ILoss lossTraining)
        {
            double numerLoss = 0;
            double denomLoss = 0;



            foreach (DataSequence seq in sequences)
            {
                GraphCPU g = new GraphCPU(applyTraining);

                network.ResetState();
                foreach (DataStep step in seq.Steps)
                {
                    NNValue output = network.Activate(step.Input, g);
                    if (step.TargetOutput != null)
                    {
                        double loss = lossTraining.Measure(output, step.TargetOutput);
                        if (Double.IsNaN(loss) || Double.IsInfinity(loss))
                        {
                            return(loss);
                        }
                        numerLoss += loss;
                        denomLoss++;
                        if (applyTraining)
                        {
                            lossTraining.Backward(output, step.TargetOutput);
                        }
                    }
                }

                if (applyTraining)
                {
                    g.Backward();                             //backprop dw values
                    UpdateModelParams(network, learningRate); //update params
                }
            }



            return(numerLoss / denomLoss);
        }
예제 #6
0
 internal Session(
     INetwork network,
     IAgendaInternal agenda,
     IWorkingMemory workingMemory,
     IEventAggregator eventAggregator,
     IActionExecutor actionExecutor,
     IDependencyResolver dependencyResolver,
     IActionInterceptor actionInterceptor)
 {
     _network           = network;
     _workingMemory     = workingMemory;
     _agenda            = agenda;
     _eventAggregator   = eventAggregator;
     _actionExecutor    = actionExecutor;
     _executionContext  = new ExecutionContext(this, _workingMemory, _agenda, _eventAggregator);
     DependencyResolver = dependencyResolver;
     ActionInterceptor  = actionInterceptor;
     _network.Activate(_executionContext);
 }
예제 #7
0
파일: Session.cs 프로젝트: zhrjin/NRules
 internal void Activate()
 {
     _network.Activate(_executionContext);
 }
예제 #8
0
파일: Session.cs 프로젝트: arastoul/NRules
 internal void Activate()
 {
     _agenda.Initialize(_executionContext);
     _network.Activate(_executionContext);
 }
예제 #9
0
        public void Predict()
        {
            if (!Config.Reload &&
                File.Exists(Config.RnnPredictedXFile) &&
                File.Exists(Config.RnnPredictedYFile))
            {
                return;
            }

            Random rng = new Random(Config.Random.Next());

            CustomDataSet data = new CustomDataSet(Config);

            RnnConfig rnnConfig = (RnnConfig)Serializer.Deserialize(Config.RnnConfigFile);

            int    inputDimension   = data.Training[0].Steps[0].Input.Rows;
            int    hiddenDimension  = 30;
            int    outputDimension  = data.Training[0].Steps[0].TargetOutput.Rows;
            int    hiddenLayers     = 1;
            double learningRate     = 0.01;
            double initParamsStdDev = 0.08;
            double dropout          = 0.5;
            double inDropout        = 0.8;

            INetwork nn = NetworkBuilder.MakeLstm(inputDimension,
                                                  hiddenDimension,
                                                  hiddenLayers,
                                                  outputDimension,
                                                  new LinearUnit(),
                                                  initParamsStdDev, rng, dropout, inDropout, Config);
            //nn = NetworkBuilder.MakeFeedForward(inputDimension,
            //    hiddenDimension,
            //    hiddenLayers,
            //    outputDimension,
            //    new SigmoidUnit(),
            //    new LinearUnit(),
            //    initParamsStdDev, rng, dropout, inDropout, Config);

            int reportEveryNthEpoch = 10;
            int trainingEpochs      = 100;

            Trainer.train <NeuralNetwork>(trainingEpochs, learningRate, nn, data, reportEveryNthEpoch, rng);

            StreamWriter predictedXFile = new StreamWriter(Config.RnnPredictedXFile);
            StreamWriter predictedYFile = new StreamWriter(Config.RnnPredictedYFile);

            for (int i = 0; i < data.Testing.First().Steps.Count; ++i)
            {
                DataStep ds = data.Testing.First().Steps[i];

                Graph g = new Graph(false);

                // Generate in dropout
                bool[] dropped = new bool[ds.Input.W.Length];
                for (int col = 0; col < dropped.Length; ++col)
                {
                    dropped[col] = Math.Abs(rnnConfig.GetTransformed(0, i, col, ds.Input.W[col])) < 0.0000001;
                }

                Matrix input  = new Matrix(ds.Input.W);
                Matrix output = nn.Activate(input, g, dropped);

                // Write into file
                string line1 = "";
                string line2 = "";
                foreach (double d in output.W)
                {
                    line1 += d + ";";
                }
                foreach (double d in ds.TargetOutput.W)
                {
                    line2 += d + ";";
                }

                predictedXFile.WriteLine(line1.Substring(0, line1.Length - 1));
                predictedYFile.WriteLine(line2.Substring(0, line2.Length - 1));
            }
            predictedXFile.Close();
            predictedYFile.Close();
        }