static void Main(string[] argv)
    {
        modshogun.init_shogun_with_defaults();
        double learn_rate = 1.0;
        int max_iter = 1000;

        DoubleMatrix traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        DoubleMatrix testdata_real = Load.load_numbers("../data/fm_test_real.dat");

        DoubleMatrix trainlab = Load.load_labels("../data/label_train_twoclass.dat");

        RealFeatures feats_train = new RealFeatures();
        feats_train.set_feature_matrix(traindata_real);
        RealFeatures feats_test = new RealFeatures();
        feats_test.set_feature_matrix(testdata_real);

        Labels labels = new Labels(trainlab);

        Perceptron perceptron = new Perceptron(feats_train, labels);
        perceptron.set_learn_rate(learn_rate);
        perceptron.set_max_iter(max_iter);
        perceptron.train();

        perceptron.set_features(feats_test);
        DoubleMatrix out_labels = perceptron.apply().get_labels();
        Console.WriteLine(out_labels.ToString());

        modshogun.exit_shogun();
    }
    public static void Main()
    {
        modshogun.init_shogun_with_defaults();
        double learn_rate = 1.0;
        int max_iter = 1000;

        double[,] traindata_real = Load.load_numbers("../data/fm_train_real.dat");
        double[,] testdata_real = Load.load_numbers("../data/fm_test_real.dat");

        // already tried double[][]
        double[] trainlab = Load.load_labels("../data/label_train_twoclass.dat");

        RealFeatures feats_train = new RealFeatures();
        feats_train.set_feature_matrix(traindata_real);
        RealFeatures feats_test = new RealFeatures();
        feats_test.set_feature_matrix(testdata_real);

        BinaryLabels labels = new BinaryLabels(trainlab);

        Perceptron perceptron = new Perceptron(feats_train, labels);
        perceptron.set_learn_rate(learn_rate);
        perceptron.set_max_iter(max_iter);
        perceptron.train();

        perceptron.set_features(feats_test);
        //  already tried double[][]
        double[] out_labels = BinaryLabels.obtain_from_generic(perceptron.apply()).get_labels();

        foreach (double item in out_labels)
            Console.Write(item);

        modshogun.exit_shogun();
    }
示例#3
0
        /// <summary>
        /// Runs this sample
        /// </summary>
        public static void Main(string[] args)
        {
            // create training set (logical AND function)
            DataSet trainingSet = new DataSet(2, 1);

            trainingSet.addRow(new DataSetRow(new double[] { 0, 0 }, new double[] { 0 }));
            trainingSet.addRow(new DataSetRow(new double[] { 0, 1 }, new double[] { 0 }));
            trainingSet.addRow(new DataSetRow(new double[] { 1, 0 }, new double[] { 0 }));
            trainingSet.addRow(new DataSetRow(new double[] { 1, 1 }, new double[] { 1 }));

            // create perceptron neural network
            NeuralNetwork myPerceptron = new Perceptron(2, 1);

            // learn the training set
            myPerceptron.learn(trainingSet);
            // test perceptron
            Console.WriteLine("Testing trained perceptron");
            testNeuralNetwork(myPerceptron, trainingSet);
            // save trained perceptron
            myPerceptron.save("mySamplePerceptron.nnet");
            // load saved neural network
            NeuralNetwork loadedPerceptron = NeuralNetwork.load("mySamplePerceptron.nnet");

            // test loaded neural network
            Console.WriteLine("Testing loaded perceptron");
            testNeuralNetwork(loadedPerceptron, trainingSet);
        }
示例#4
0
        public void Perceptron_CanBackProp_Test()
        {
            int howInputs   = 60;
            int howOnHidden = 60;

            Perceptron p = new Perceptron(
                ActivationFunctionType.Sigmoid,
                new int[] { howInputs, howOnHidden, 1 },
                false,
                0.7,
                0.3);

            double[] input = Enumerable.Range(1, howInputs).Select(i => (double)i).ToArray();
            double[] ideal = new double[] { 0.6 };

            double[] first = p.GetResult(input);

            System.Console.WriteLine(p.LossFunction(ideal));

            p.BackPropagation(ideal);

            double[] second = p.GetResult(input);

            System.Console.WriteLine(p.LossFunction(ideal));

            p.BackPropagation(ideal);

            double[] third = p.GetResult(input);

            System.Console.WriteLine(p.LossFunction(ideal));
        }
示例#5
0
    public void BackProp(Perceptron[] outputs)
    {
        int i;

        for (i = 0; i < outputPer.Length; i++)
        {
            Perceptron p     = outputPer[i];
            float      state = p.state;

            float error = state * (1f - state);
            error *= outputs[i].state - state;

            p.AdjustWeights(error);
        }

        for (i = 0; i < hiddenPer.Length; i++)
        {
            Perceptron p     = outputPer[i];
            float      state = p.state;

            float sum = 0f;

            for (i = 0; i < outputs.Length; i++)
            {
                float incomingW = outputs[i].GetIncomingWeight();
                sum += incomingW * outputs[i].error;

                float error = state * (1f - state) * sum;

                p.AdjustWeights(error);
            }
        }
    }
示例#6
0
    // Сеть будет учиться до тех пор, пока вероятность ошибки не снизится до strictness
    private static void learnExample(float strictness)
    {
        Perceptron perceptron = new Perceptron(GESTURES_COUNT);

        perceptron.teach(GESTURE_FILES, strictness);
        perceptron.saveToFile();
    }
示例#7
0
    public void Backpropagate(decimal[] Cost, decimal BPrefix = 0)
    {
        decimal[] NonLinearZs = new decimal[Perceptrons.Length];

        for (int i = 0; i < Perceptrons.Length; i++)
        {
            NonLinearZs[i] = Perceptrons[i].GetZ();
        }

        for (int i = 0; i < Perceptrons.Length; i++)
        {
            Perceptron P = Perceptrons[i];

            BPrefix = 1m;

            // Prepare the prefix being: (dC0/da0) * (da0/dZ0)
            BPrefix *= 2 * (P.CurrentActivation - Convert.ToDecimal(LastLabels[i].Strength));
            BPrefix *= ActivationFunctions.GetAppropriateDerivativeActivationFunction(LayerActivationFunction)
                           (NonLinearZs, i)[i];

            // Update current weights.
            for (int j = 0; j < P.Weights.Length; j++)
            {
                decimal LR = Convert.ToDecimal(parentNeuralNetwork.LearningRate);
                P.Weights[j].Value -= LR * BPrefix * PreviousLayer.GetInput()[j];
            }

            // Tell last layer to propagate using this perceptron's relative prefix.
            PreviousLayer.Backpropagate(Cost, BPrefix);
        }
    }
示例#8
0
        public static void Perceptron()
        {
            var datas = Data.sample(100, args => Math.Sign(2 * args[0] + args[1]));

            Console.WriteLine(datas.Count);
            // datas.ForEach(data => Console.WriteLine(data.Item1[0] + " " + data.Item1[1] + " : " + data.Item2));

            var perceptron = new Perceptron
            {
                Weight = datas[0].Item1,
                Bias   = 0.3
            };

            Enumerable.Range(0, 3).ToList().ForEach(i =>
                                                    datas.ForEach(data => { perceptron.Renew(data.Item1, data.Item2); }));



            using (StreamWriter s = File.CreateText("./data.txt"))
            {
                datas.ForEach(data => s.WriteLine(data.Item1[0] + "," + data.Item1[1] + "," + data.Item2));
            }

            using (StreamWriter s = File.CreateText("./predict.txt"))
            {
                datas.Select(data => perceptron.Forward(data.Item1)).ToList().ForEach(s.WriteLine);
            }
            using (StreamWriter s = File.CreateText("./params.txt"))
            {
                perceptron.Weight.ForEach(s.WriteLine);
                s.WriteLine(perceptron.Bias);
            }
        }
示例#9
0
        public ActionResult Create(Perceptron perceptron)
        {
            int x = 0;
            NetworkInitializer networkInitializer = new NetworkInitializer();
            Perceptron         p = networkInitializer.CreatePerceptron(perceptron.Stock);

            if (ModelState.IsValid)
            {
                foreach (Perceptron pone in _db.Perceptrons)
                {
                    if (pone.Stock == p.Stock)
                    {
                        x += 1;
                    }
                }
                if (x == 0)
                {
                    _db.Perceptrons.Add(p);
                    _db.SaveChanges();
                    networkInitializer.CreateNetwork(p.Stock);
                    //networkInitializer.InitializeJoiningTables(p.Stock);
                    return(RedirectToAction("Index"));
                }
            }
            return(View(perceptron));
        }
示例#10
0
        private void Init()
        {
            //Init classes
            scalingFunction     = new ScalingFunction();
            weightsGenerator    = new WeightsGeneratorRNGCSP();
            activationFunctions = new ActivationFunctions();
            geneticAlgorithm    = new GeneticAlgorithm(weightsGenerator);
            neuralNetwork       = new List <NeuralNetwork>();
            perceptron          = new Perceptron();

            //Init Lists
            keyStore      = new List <BTCKeyStore>();
            dataSet       = new List <DataSet>();
            valkeyStore   = new List <BTCKeyStore>();
            valdataSet    = new List <DataSet>();
            neuralNetwork = new List <NeuralNetwork>();

            deathRate = 10;  //If too high, then chance plays an increasing role and skews the result.

            cb           = new CircularBuffer(deathRate);
            oldMetric    = double.MaxValue;
            attemptstats = new double[32];

            GenerateValidationDataset();
        }
        public void RunIterations_ModuleFiveQOne()
        {
            var input = new List <double> {
                .8, .9
            };
            var startingWeights = new List <double> {
                .24, .88
            };
            var bias          = 0;
            var desiredOutput = .15;
            var eta           = 5;

            // init perceptron
            Perceptron <List <double> > perceptron = new Perceptron <List <double> >
            {
                Bias         = bias,
                Weights      = startingWeights,
                DeltaWeights = new List <double>(),
                CurrentInput = input
            };

            _iterationCalulator.RunIterations(perceptron, input, desiredOutput, eta, 31);

            Debug.WriteLine("Press Enter to exit...");
            Console.ReadLine();
        }
示例#12
0
 public void SetLink(float weight, Perceptron from, Perceptron to, bool recurrent)
 {
     this.weight    = weight;
     this.from      = from;
     this.to        = to;
     this.recurrent = recurrent;
 }
        public void RunIterations_ModuleFourPerceptron()
        {
            var input = new List <double> {
                1, 0
            };
            var startingWeights = new List <double> {
                -.3, .6
            };
            var bias          = .2;
            var desiredOutput = .8;
            var eta           = .1;

            // init perceptron
            Perceptron <List <double> > perceptron = new Perceptron <List <double> >
            {
                Bias         = bias,
                Weights      = startingWeights,
                DeltaWeights = new List <double>(),
                CurrentInput = input
            };

            _iterationCalulator.RunIterations(perceptron, input, desiredOutput, eta, 3);

            Debug.WriteLine("Press Enter to exit...");
            Console.ReadLine();
        }
    // Use this for initialization
    void Start()
    {
        m_leftBottom = Camera.main.ViewportToWorldPoint(new Vector3(0, 0));
        m_rightUp    = Camera.main.ViewportToWorldPoint(new Vector3(1, 1));
        m_LineRenderer.SetPosition(0, new Vector2(m_leftBottom.x, F(m_leftBottom.x)));
        m_LineRenderer.SetPosition(1, new Vector2(m_rightUp.x, F(m_rightUp.x)));

        m_Perceptron = new Perceptron(3);

        m_XMLWriter = new XMLWriter(m_Perceptron);

        m_Trainer = new Trainer(m_XMLWriter, m_Perceptron, this, m_leftBottom, m_rightUp);

        //for (int point = 0; point < m_TrainingPoints.Length; point++)
        //{
        //    Vector2 position = new Vector2(Random.Range(m_leftBottom.x, m_rightUp.x), Random.Range(m_leftBottom.y, m_rightUp.y));
        //    int label = ComputeLabel(position);

        //    m_TrainingPoints[point] = new TrainingPoint(position, label);
        //    GameObject pointObject = Instantiate<GameObject>(Resources.Load<GameObject>("TrainingPoint"), position, Quaternion.identity);
        //    m_TrainingPoints[point].TrainingPointObject = pointObject;
        //}

        UpdateWeightUI();
    }
示例#15
0
        public INeuron CreateNeuron(IActivationFunction activationFunction)
        {
            INeuron             neuron = null;
            Func <string, bool> equals = value => string.Equals(Neuron, value, StringComparison.InvariantCultureIgnoreCase);

            if (equals("Perceptron"))
            {
                neuron = new Perceptron(activationFunction, Alpha);
            }
            else if (equals("Adaline"))
            {
                neuron = new Adaline(activationFunction, Alpha);
            }
            else if (equals("BackpropagationNeuron"))
            {
                neuron = new BackpropagationNeuron(activationFunction, Alpha, Momentum, Regularization);
            }
            else if (equals("SOMNeuron"))
            {
                neuron = new SOMNeuron(activationFunction, Alpha);
            }
            else
            {
                Console.WriteLine($"Wrong neuron: {Neuron}");
            }

            return(neuron);
        }
示例#16
0
        private void button1_Click(object sender, EventArgs e)
        {
            int    No_of_folds = int.Parse(textBox1.Text);
            double eta_start   = double.Parse(textBox2.Text);
            double eta_stop    = double.Parse(textBox3.Text);
            double eta_step    = double.Parse(textBox4.Text);
            int    epoch_start = int.Parse(textBox5.Text);
            int    epoch_stop  = int.Parse(textBox6.Text);
            int    epoch_step  = int.Parse(textBox7.Text);

            Console.WriteLine(No_of_folds);
            Console.WriteLine(eta_start);
            Console.WriteLine(eta_stop);
            Console.WriteLine(eta_step);
            Console.WriteLine(epoch_start);
            Console.WriteLine(epoch_stop);
            Console.WriteLine(epoch_step);

            this.Hide();
            Console.WriteLine("Optimising...");
            OptimizeParameters optparam = new OptimizeParameters();

            optparam.Set(eta_start, eta_stop, eta_step, epoch_start, epoch_stop, epoch_step);
            optparam.CrossValidate(ld.Data, ld.Targets, No_of_folds);
            Console.WriteLine("Optimal parameters\nEta : {0}\tEpochs: {1}", optparam.eta, optparam.num_epochs);


            perc = new Perceptron();
            perc.Train(ld.Data, ld.Targets, 1, optparam.num_epochs, optparam.eta);

            MessageBox.Show("Done");
            this.Close();
        }
示例#17
0
        public ActionResult Delete(int id)
        {
            Perceptron p = _db.Perceptrons.Find(id);

            _db.Perceptrons.Remove(p);
            _db.SaveChanges();
            return(RedirectToAction("Index"));
        }
示例#18
0
 public void SetUp()
 {
     _network     = new NetworkBuilder().Setup();
     _perceptron  = new Perceptron(_network.GetInputs());
     _activations = new List <double> {
         1d, 2d
     };
 }
示例#19
0
 void InitInputPerceptrons()
 {
     for (int i = 0; i < nbInputPerceptrons; i++)
     {
         Perceptron perceptron = new Perceptron(this);
         m_inputPerceptrons.Add(perceptron);
     }
 }
示例#20
0
 private void Reset(float[] DNA)
 {
     map             = new CleanedSpaceMap(robotWidth);
     negativeFitness = 0;
     GameObject.Find("Trail").GetComponent <TrailRenderer>().Clear();
     dead  = false;
     brain = new Perceptron(DNA);
 }
示例#21
0
 public Trainer(XMLWriter xmlWriter, Perceptron perceptron, MainManager mainManager, Vector2 bottomLeft, Vector2 upperRight)
 {
     m_XMLWriter   = xmlWriter;
     m_Perceptron  = perceptron;
     m_MainManager = mainManager;
     m_BottomLeft  = bottomLeft;
     m_UpperRight  = upperRight;
 }
示例#22
0
 public PerceptronSauvegarde(Perceptron p)
 {
     if (p is PerceptronFinal)
     {
         type = "Final";
     }
     senders = p.Senders.Select(x => x.Item2).ToList();
 }
示例#23
0
    // Unit Tested
    public NeuralNetwork CreateNetwork(bool bestOfAll = false, bool leader = false)
    {
        //newPerceptronsObjects.Clear();
        while (perceptrons.Count < newPerceptronsObjects.Count)
        {
            newPerceptronsObjects.RemoveAt(newPerceptronsObjects.Count - 1);
        }

        for (int i = 0; i < this.perceptrons.Count; i++)
        {
            if (i < newPerceptronsObjects.Count)
            {
                newPerceptronsObjects[i].SetPerceptron(perceptrons[i]);
            }
            else
            {
                Perceptron perceptron = new Perceptron(perceptrons[i]);

                newPerceptronsObjects.Add(perceptron);
            }
        }

        for (int i = 0; i < connections.Count; i++)
        {
            if (connections[i].enabled)
            {
                Perceptron from = null;
                Perceptron to   = null;
                int        index;

                index = GetElementPos(connections[i].from);
                from  = newPerceptronsObjects[index];

                index = GetElementPos(connections[i].to);
                to    = newPerceptronsObjects[index];


                from.AddOutLink(connections[i].weight, from, to, connections[i].recurrent);
                to.AddInLink(connections[i].weight, from, to, connections[i].recurrent);
            }
        }

        for (int i = 0; i < newPerceptronsObjects.Count; i++)
        {
            newPerceptronsObjects[i].Finish();
        }

        if (network != null)
        {
            network.SetNeuralNetwork(ref newPerceptronsObjects, inputs, bestOfAll, leader);
        }
        else
        {
            network = new NeuralNetwork(ref newPerceptronsObjects, inputs, bestOfAll, leader);
        }

        return(network);
    }
 void Start()
 {
     if (randomNet)
     {
         neuronsPerLayer = new int[] { 4, 4, 2 };
         inputs          = new float[2];
         p = new Perceptron(neuronsPerLayer, inputs);
     }
 }
        public static void MNIST2()
        {
            DataSet dataSet = new DataSet("mnist2.txt", ' ', 10, false);
            var     p       = new Perceptron(600, 3, ErrorFunction.CrossEntropy()).Layer(784, null).Layer(16, ActivationFunction.Sigmoid())
                              .Layer(16, ActivationFunction.Sigmoid()).Layer(10, ActivationFunction.Sigmoid());

            p.Train2(dataSet, 200);
            double mse = p.CalculateMeanErrorOverDataSet(dataSet);
        }
示例#26
0
 public Layer(int inputCount, int neuronCount)
 {
     Perceptrons = new Perceptron[neuronCount];
     for (int i = 0; i < Perceptrons.Length; i++)
     {
         Perceptrons[i] = new Perceptron(inputCount);
     }
     this.inputCount = inputCount;
 }
示例#27
0
        public void PerceptronSerializerPreservesBias()
        {
            var perceptronBefore = new Perceptron(_network.First());
            var asJson           = _perceptronSerializer.SerializeJson(perceptronBefore);
            var perceptronAfter  = _perceptronSerializer.DeserializeJson <IUnit, IConnection, IUnitActivation <IUnit> >(asJson);
            var biases           = perceptronAfter.Network.SelectMany(u => u).Where(u => u.UnitActivation.UnitType == UnitType.BiasUnit);

            biases.Select(u => u.UnitActivation.Properties.NetInput).ShouldAllBeEquivalentTo(Bias);
        }
示例#28
0
        public void PerceptronSerializerPreservesSlopeMultiplier()
        {
            var perceptronBefore = new Perceptron(_network.First());
            var asJson           = _perceptronSerializer.SerializeJson(perceptronBefore);
            var perceptronAfter  = _perceptronSerializer.DeserializeJson <IUnit, IConnection, IUnitActivation <IUnit> >(asJson);
            var slopes           = perceptronAfter.Network.SelectMany(u => u).Where(u => u.UnitActivation.UnitType == UnitType.NormalUnit);

            slopes.Select(u => u.UnitActivation.Properties.SlopeMultiplier).ShouldAllBeEquivalentTo(SlopeMultiplier);
        }
示例#29
0
    // Start is called before the first frame update
    void Start()
    {
        onGround = false;
        moving   = false;

        perceptron = new Perceptron(4);

        originalPos = transform.position;
    }
示例#30
0
        public void PerceptronWeightsAreChangedAfterTrainIteration()
        {
            LayerStructure structure  = new LayerStructure(9, new [] { 7 }, 5);
            Perceptron     perceptron = new Perceptron(GetInitParamsByLayerStructure(structure));

            perceptron.FillWeightsRandomly();

            Assert.IsFalse(true);
        }
示例#31
0
    // Copy
    public void Copy(Perceptron p)
    {
        for (int i = 0; i < Weights.Length; i++)
        {
            Weights[i] = p.Weights[i];
        }

        bias = p.bias;
    }
示例#32
0
        private async void SettingsPage_Loaded(object sender, RoutedEventArgs e)
        {
            trainSetConfig = await TrainSetConfigHelper.ParseConfigJson();

            imageProcessor = new ImageProcessor();
            perceptron     = new Perceptron();

            symbol1TextBlock.Text = trainSetConfig.Train1.Symbol;
            symbol2TextBlock.Text = trainSetConfig.Train2.Symbol;
        }
        public void GetOutput_WithoutActivationFunction_ShouldReturnDotProductOfInputValuesAndWeights()
        {
            var biasWeight = 0.5d;
            var inputs = new[] {-1.0d, -0.5d};
            var weights = new[] {0.1d, 0.5d};

            var perceptron = new Perceptron(weights, biasWeight, _passthroughActivation);
            double result = perceptron.GetOutput(inputs);

            Assert.AreEqual(0.15d, result, 0.01d);
        }
        public void GetOutput_WithoutActivationFunction_ShouldReturnSumOfInputs_WhenWeightsAreAllUnity()
        {
            double biasWeight = 0.1d;
            var inputs = new[] {0.5d, 0.25d};
            var weights = new[] {1.0d, 1.0d};

            var perceptron = new Perceptron(weights, biasWeight, _passthroughActivation);
            double result = perceptron.GetOutput(inputs);

            Assert.AreEqual(0.85d, result);
        }
        public void GetOutput_WithoutActivationFunction_ShouldReturnBiasValue_WhenThereAreNoInputs()
        {
            double biasWeight = 0.2d;
            var inputs = new double[] { };
            var weights = new double[] { };

            var perceptron = new Perceptron(weights, biasWeight, _passthroughActivation);
            double result = perceptron.GetOutput(inputs);

            Assert.AreEqual(0.2d, result);
        }
        public void Train_ShouldUpdateBiasWeight_WhenInputsAreZero_AndAnswerIsIncorrect()
        {
            var biasWeight = 0.0d;
            var inputs = new[] {0.0d, 0.0d};
            var weights = new[] {0.5d, 0.5d};

            double desiredAnswer = 1.0d;
            var perceptron = new Perceptron(weights, biasWeight, _passthroughActivation);
            perceptron.Train(inputs, desiredAnswer);

            Assert.AreEqual(0.1d, perceptron.BiasWeight);
        }
    void Start()
    {
        ptron = new Perceptron(3);

        //Make 2,000 training points.
        for (int i = 0; i < training.Length; i++)
        {
            float x = Random.Range(-m_Width/2,m_Width/2);
            float y = Random.Range(-m_Height/2,m_Height/2);
            //Is the correct answer 1 or -1?
            int answer = 1;
            if (y < f(x)) answer = -1;
            training[i] = new Trainer(x, y, answer);
        }
    }
示例#38
0
        public void Train_WithActivationFunction_ShouldPredictLogicGateOutput(string truthTableString)
        {
            var biasWeight = 0.0d;
            var weights = new[] { 0.0d, 0.0d };
            var perceptron = new Perceptron(weights, biasWeight, _stepActivation);
            IList<double[]> truthTable = ParseTable(truthTableString);

            for (var i = 0; i < 10; i++)    // train on the table 10 times through
            {
                PerformActionOnTruthTable(truthTable, perceptron.Train);
            }

            // assert the predition of the perceptron
            PerformActionOnTruthTable(truthTable, (inputs, expected) => Assert.AreEqual(expected, perceptron.GetOutput(inputs)));
        }
示例#39
0
        public void Load(Stream stream)
        {
            if (stream == null)
                throw new ArgumentNullException("stream");

            perceptronMsd = new Perceptron();
            perceptronMsd.Load(stream);
            perceptronLemma = new PerceptronString();
            perceptronLemma.Load(stream);
        }
示例#40
0
        public void Train(IEnumerable<Sentence> sentences)
        {
            perceptronMsd = new Perceptron();
            perceptronLemma = new PerceptronString();

            IndexedSentence[] normalizedSentences =
                sentences.Select(s => new IndexedSentence((Reverse ? (s as IEnumerable<Token>).Reverse() : s).Select(t => Normalize(t)))).ToArray();

            HashSet<Tag> alltags = new HashSet<Tag>();
            foreach (IndexedSentence sentence in normalizedSentences)
            {
                foreach (Token token in sentence)
                {
                    alltags.Add(new Tag(token.CorrectTag.Msd, null));
                    foreach (Tag tag in token.PossibleTags)
                        alltags.Add(new Tag(tag.Msd, null));
                }
            }

            for (int iteration = 0; iteration < Iterations; iteration++)
            {
                if (IterationStarted != null)
                    IterationStarted(iteration + 1);

                foreach (IndexedSentence sentence in normalizedSentences.OrderBy(s => random.NextUInt()))
                {
                    foreach (Token token in sentence)
                    {
                        if (token.PossibleTags != null && token.PossibleTags.Length == 1)
                        {
                            token.PredictedTag = token.CorrectTag; // todo: possibleTags[0]?? what if it's not correct tag?
                            continue;
                        }

                        IEnumerable<Tag> possibleTags = token.PossibleTags.OrderBy(t => t.Msd).ToArray();
                        if (possibleTags == null || !possibleTags.Any())
                            possibleTags = alltags;

                        double? bestMsdScore = null;
                        Tag bestMsd = null;
                        Features bestMsdFeatures = null;

                        Features localFeaturesTag = GetFeatures(featureTemplatesTag, token, sentence, true);

                        foreach (Tag tag in possibleTags)
                        {
                            Tag tagMsd = new Tag(tag.Msd);
                            token.PredictedTag = tagMsd;

                            Features featuresTag = GetFeatures(featureTemplatesTag, token, sentence, false);
                            featuresTag.AddRange(localFeaturesTag);

                            double score = perceptronMsd.Score(featuresTag, tagMsd);
                            if (bestMsdScore == null || score > bestMsdScore.Value)
                            {
                                bestMsdScore = score;
                                bestMsd = tagMsd;
                                bestMsdFeatures = featuresTag;
                            }
                        }

                        perceptronMsd.Update(bestMsdFeatures, new Tag(token.CorrectTag.Msd), bestMsd);

                        if (token.CorrectTag.Lemma != null)
                        {
                            double? bestLemmaScore = null;
                            string bestLemma = null;
                            Features bestLemmaFeatures = null;

                            token.PredictedTag = new Tag(token.CorrectTag.Msd);
                            Features localFeaturesLemma = GetFeatures(featureTemplatesLemma, token, sentence, true);

                            foreach (Tag tag in possibleTags)
                            {
                                if (tag.Msd != bestMsd.Msd || tag.Lemma == null)
                                    continue;
                                token.PredictedTag = new Tag(token.CorrectTag.Msd, tag.Lemma);
                                Features featuresLemma = GetFeatures(featureTemplatesLemma, token, sentence, false);
                                featuresLemma.AddRange(localFeaturesLemma);
                                double scoreLemma = perceptronLemma.Score(featuresLemma, tag.Lemma);
                                if (bestLemmaScore == null || scoreLemma > bestLemmaScore.Value)
                                {
                                    bestLemmaScore = scoreLemma;
                                    bestLemma = tag.Lemma;
                                    bestLemmaFeatures = featuresLemma;
                                }
                            }

                            if (bestLemmaFeatures != null)
                                perceptronLemma.Update(bestLemmaFeatures, token.CorrectTag.Lemma, bestLemma);
                        }

                        token.PredictedTag = token.CorrectTag;
                    }
                }

                if (IterationFinished != null)
                    IterationFinished(iteration + 1);
            }

            if (Average)
            {
                perceptronMsd.AverageWeights();
                perceptronLemma.AverageWeights();
            }

            perceptronMsd.RemoveInsignificantWeights(WeightThreshold);
            perceptronLemma.RemoveInsignificantWeights(WeightThreshold);
        }
示例#41
0
 internal static HandleRef getCPtr(Perceptron obj) {
   return (obj == null) ? new HandleRef(null, IntPtr.Zero) : obj.swigCPtr;
 }
示例#42
0
        static void TrainingTest(List<string> tags)
        {
            //const string modelFile = "../../data/gene.key.model";
            //const string input = "../../data/gene.key";

            const string modelFile = "../../data/training/tag.model";
            const string input = "../../data/training/NYT_19980403_parsed.key";
            var perceptron = new Perceptron(input, modelFile, tags);
            perceptron.Train();
        }