Exemplo n.º 1
0
        public int GetWinner(NormalizeInput input)
        {
            var winner = 0;

            if (Output == null)
            {
                Output = new double[OutputNeuronCount];
            }

            var biggest = Double.MinValue;

            for (var i = 0; i < OutputNeuronCount; i++)
            {
                Output[i] = MatrixMath.DotProduct(input.InputMatrix, OutputWeights.GetRow(i)) * input.NormalizationFactor;
                Output[i] = (Output[i] + 1.0) / 2.0;

                if (Output[i] > biggest)
                {
                    biggest = Output[i];
                    winner  = i;
                }

                if (Output[i] < 0)
                {
                    Output[i] = 0;
                }
                if (Output[i] > 1)
                {
                    Output[i] = 1;
                }
            }
            return(winner);
        }
Exemplo n.º 2
0
		public int GetWinner(NormalizeInput input)
        {
            var winner = 0;
			if (Output == null)
				Output = new double[OutputNeuronCount];

            var biggest = Double.MinValue;
            for (var i = 0; i < OutputNeuronCount; i++)
            {
				Output[i] = MatrixMath.DotProduct(input.InputMatrix, OutputWeights.GetRow(i)) * input.NormalizationFactor;
				Output[i] = (Output[i] + 1.0) / 2.0;

				if (Output[i] > biggest)
                {
					biggest = Output[i];
                    winner = i;
                }

				if (Output[i] < 0)
					Output[i] = 0;
				if (Output[i] > 1)
					Output[i] = 1;
            }
			return winner;
        }
        protected void ForceWin()
        {
            int which         = 0;
            var outputWeights = Network.OutputWeights;
            var dist          = Double.MaxValue;

            for (var tset = 0; tset < TrainingSet.Length; tset++)
            {
                var best = Network.GetWinner(TrainingSet[tset]);

                var output = Network.Output;
                if (output[best] < dist)
                {
                    dist  = output[best];
                    which = tset;
                }
            }

            var input = new NormalizeInput(TrainingSet[which], Network.NormalizationType);

            Network.GetWinner(input);
            var output2 = Network.Output;

            dist = Double.MinValue;
            var i = _outputNeuronCount;

            while (i-- > 0)
            {
                if (_neuronWinCounts[i] != 0)
                {
                    continue;
                }

                if (output2[i] > dist)
                {
                    dist  = output2[i];
                    which = i;
                }
            }

            for (var j = 0; j < input.InputMatrix.Columns; j++)
            {
                outputWeights[which, j] = input.InputMatrix[0, j];
            }

            NormalizeRowWeight(outputWeights, which);
        }
        public void EvaluateErrors()
        {
            _correctionMatrix.Clear();
            for (var i = 0; i < _neuronWinCounts.Length; i++)
            {
                _neuronWinCounts[i] = 0;
            }

            _globalError = 0.0;

            // loop through all training sets to determine correction
            for (var tset = 0; tset < TrainingSet.Length; tset++)
            {
                var input = new NormalizeInput(TrainingSet[tset], Network.NormalizationType);
                var best  = Network.GetWinner(input);

                _neuronWinCounts[best]++;
                var bestRow = Network.OutputWeights.GetRow(best);

                double diff;
                var    length = 0.0;
                for (var i = 0; i < _inputNeuronCount; i++)
                {
                    diff    = TrainingSet[tset][i] * input.NormalizationFactor - bestRow[0, i];
                    length += diff * diff;
                    if (LearningMethod == LearningMethod.Subtractive)
                    {
                        _correctionMatrix.Add(best, i, diff);
                    }
                    else
                    {
                        _workMatrix[0, i] = LearningRate * TrainingSet[tset][i] * input.NormalizationFactor + bestRow[0, i];
                    }
                }

                diff    = input.SyntheticInput - bestRow[0, _inputNeuronCount];
                length += diff * diff;

                if (LearningMethod == LearningMethod.Subtractive)
                {
                    _correctionMatrix.Add(best, _inputNeuronCount, diff);
                }
                else
                {
                    _workMatrix[0, _inputNeuronCount] = LearningRate * input.SyntheticInput + bestRow[0, _inputNeuronCount];
                }

                if (length > _globalError)
                {
                    _globalError = length;
                }

                if (LearningMethod == LearningMethod.Additive)
                {
                    NormalizeRowWeight(_workMatrix, 0);
                    for (var i = 0; i <= _inputNeuronCount; i++)
                    {
                        _correctionMatrix.Add(best, i, _workMatrix[0, i] - bestRow[0, i]);
                    }
                }
            }
            _globalError = Math.Sqrt(_globalError);
        }
		public void EvaluateErrors()
        {
			correctionMatrix.Clear();
			for (var i = 0; i < neuronWinCounts.Length; i++)
                neuronWinCounts[i] = 0;

            globalError = 0.0;
           
			// loop through all training sets to determine correction
            for (var tset = 0; tset < TrainingSet.Length; tset++)
            {
                var input = new NormalizeInput(TrainingSet[tset], Network.NormalizationType);
                var best = Network.GetWinner(input);

                neuronWinCounts[best]++;
                var bestRow = Network.OutputWeights.GetRow(best);

				double diff;
				var length = 0.0;
                for (var i = 0; i < inputNeuronCount; i++)
                {
                    diff = TrainingSet[tset][i] * input.NormalizationFactor - bestRow[0, i];
                    length += diff * diff;
                    if (LearningMethod == LearningMethod.Subtractive)
                        correctionMatrix.Add(best, i, diff);
                    else
                        workMatrix[0, i] = LearningRate * TrainingSet[tset][i] * input.NormalizationFactor + bestRow[0, i];
                }

                diff = input.SyntheticInput - bestRow[0, inputNeuronCount];
                length += diff * diff;

				if (LearningMethod == LearningMethod.Subtractive)
					correctionMatrix.Add(best, inputNeuronCount, diff);
                else
                    workMatrix[0, inputNeuronCount] =  LearningRate * input.SyntheticInput + bestRow[0, inputNeuronCount];

                if (length > globalError)
                    globalError = length;

				if (LearningMethod == LearningMethod.Additive)
                {
                    NormalizeRowWeight(workMatrix, 0);
                    for (var i = 0; i <= inputNeuronCount; i++)
                        correctionMatrix.Add(best, i, workMatrix[0, i] - bestRow[0, i]);
                }
            }
			globalError = Math.Sqrt(globalError);
        }
        protected void ForceWin()
        {
	        int which = 0;
			var outputWeights = Network.OutputWeights;
			var dist = Double.MaxValue;
            for (var tset = 0; tset < TrainingSet.Length; tset++)
            {
                var best = Network.GetWinner(TrainingSet[tset]);
                
				var output = Network.Output;
                if (output[best] < dist)
                {
                    dist = output[best];
                    which = tset;
                }
            }

            var input = new NormalizeInput(TrainingSet[which], Network.NormalizationType);
			Network.GetWinner(input);
            var output2 = Network.Output;

            dist = Double.MinValue;
            var i = outputNeuronCount;
            while (i-- > 0)
            {
                if (neuronWinCounts[i] != 0)
                    continue;

				if (output2[i] > dist)
                {
                    dist = output2[i];
                    which = i;
                }
            }

            for (var j = 0; j < input.InputMatrix.Columns; j++)
                outputWeights[which, j] =  input.InputMatrix[0, j];

            NormalizeRowWeight(outputWeights, which);
        }