コード例 #1
0
 public AnnBasicOperations(IActivationFunction activationFunction,
                           IErrorFunction errorFunction, IOptimizerFunction optimizerFunction)
 {
     _activationFunction = activationFunction;
     _errorFunction      = errorFunction;
     _optimizerFunction  = optimizerFunction;
 }
コード例 #2
0
 public NeuralNetwork(IErrorFunction errorFunction, List <InputNeuron> inputLayer, List <HiddenNeuron> hiddenLayer, List <OutputNeuron> outputLayer)
 {
     ErrorFunction = errorFunction;
     _inputLayer   = inputLayer;
     _hiddenLayer  = hiddenLayer;
     _outputLayer  = outputLayer;
 }
コード例 #3
0
ファイル: NN.cs プロジェクト: Hundo1018/NN
 public Network(IRegularizationFunction rf, IErrorFunction ef)
 {
     regularizationFunction = rf;
     errorFunction          = ef;
     layers    = new List <Layer>();
     TrainData = new List <List <List <double> > >();
 }
コード例 #4
0
ファイル: NetworkTrainer.cs プロジェクト: tinrab/Brain
 public NetworkTrainer(Network network, double learningRate, double regularizationRate, IErrorFunction errorFunction)
 {
     _network            = network;
     _errorFunction      = errorFunction;
     _learningRate       = learningRate;
     _regularizationRate = regularizationRate;
 }
コード例 #5
0
        /// <summary>
        /// Construct a gradient worker.
        /// </summary>
        ///
        /// <param name="theNetwork">The network to train.</param>
        /// <param name="theOwner">The owner that is doing the training.</param>
        /// <param name="theTraining">The training data.</param>
        /// <param name="theLow">The low index to use in the training data.</param>
        /// <param name="theHigh">The high index to use in the training data.</param>
        /// <param name="theFlatSpots">Holds an array of flat spot constants.</param>
        public GradientWorker(FlatNetwork theNetwork,
                              Propagation theOwner, IMLDataSet theTraining,
                              int theLow, int theHigh, double[] theFlatSpots, IErrorFunction ef)
        {
            _errorCalculation = new ErrorCalculation();
            _network          = theNetwork;
            _training         = theTraining;
            _low      = theLow;
            _high     = theHigh;
            _owner    = theOwner;
            _flatSpot = theFlatSpots;

            _layerDelta = new double[_network.LayerOutput.Length];
            _gradients  = new double[_network.Weights.Length];
            _actual     = new double[_network.OutputCount];

            _weights         = _network.Weights;
            _layerIndex      = _network.LayerIndex;
            _layerCounts     = _network.LayerCounts;
            _weightIndex     = _network.WeightIndex;
            _layerOutput     = _network.LayerOutput;
            _layerSums       = _network.LayerSums;
            _layerFeedCounts = _network.LayerFeedCounts;
            _ef = ef;
        }
コード例 #6
0
        public RNN(
            int ephocs,
            int timeStepSize,
            int[] nodeLayer,
            double learningRate,
            double momentum,
            IFunction[] activationFunc,
            IErrorFunction errorFunc)
        {
            Ephocs         = ephocs;
            TimeStepSize   = timeStepSize;
            NodeLayer      = nodeLayer;
            Momentum       = momentum;
            ActivationFunc = activationFunc;
            ErrorFunc      = errorFunc;
            DropoutValue   = new double[3];

            LearningRate = new double[3];
            for (int i = 0; i < 3; i++)
            {
                LearningRate[i] = learningRate;
                DropoutValue[i] = 1.0;
            }

            InitVariables();
        }
コード例 #7
0
ファイル: NetworkTrainer.cs プロジェクト: tinrab/Brain
 public NetworkTrainer(Network network, double learningRate = 0.1, double regularizationRate = 0.1)
 {
     _network            = network;
     _errorFunction      = ErrorFunction.Square;
     _learningRate       = learningRate;
     _regularizationRate = regularizationRate;
 }
コード例 #8
0
        public ANN(
            int ephocs,
            int[] nodeLayer,
            double learningRate,
            double momentum,
            IFunction[] activationFunc,
            IErrorFunction errorFunc)
        {
            Ephocs         = ephocs;
            Layer          = nodeLayer.Length;
            NodeLayer      = nodeLayer;
            Momentum       = momentum;
            ActivationFunc = activationFunc;
            ErrorFunc      = errorFunc;
            DropoutValue   = new double[Layer];

            LearningRate = new double[Layer];
            for (int i = 0; i < Layer; i++)
            {
                LearningRate[i] = learningRate;
                DropoutValue[i] = 1.0;
            }

            InitVariables();
        }
コード例 #9
0
        public static void BackProp(List <List <Node> > network, double target, IErrorFunction errorFunc)
        {
            // The output node is a special case. We use the user-defined error
            // function for the derivative.
            var outputNode = network[network.Count - 1][0];

            outputNode.OutputDer = errorFunc.Der(outputNode.Output, target);

            // Go through the layers backwards.
            for (var layerIdx = network.Count - 1; layerIdx >= 1; layerIdx--)
            {
                var currentLayer = network[layerIdx];
                // Compute the error derivative of each node with respect to:
                // 1) its total input
                // 2) each of its input weights.
                for (var i = 0; i < currentLayer.Count; i++)
                {
                    var node = currentLayer[i];
                    node.InputDer     = node.OutputDer * node.ActivationFunction.Der(node.TotalInput);
                    node.AccInputDer += node.InputDer;
                    node.NumAccumulatedDers++;
                }

                // Error derivative with respect to each weight coming into the node.
                for (var i = 0; i < currentLayer.Count; i++)
                {
                    var node = currentLayer[i];
                    for (var j = 0; j < node.InputLinks.Count; j++)
                    {
                        var link = node.InputLinks[j];
                        if (link.IsDead)
                        {
                            continue;
                        }
                        link.ErrorDer     = node.InputDer * link.Source.Output;
                        link.AccErrorDer += link.ErrorDer;
                        link.NumAccumulatedDers++;
                    }
                }
                if (layerIdx == 1)
                {
                    continue;
                }
                var prevLayer = network[layerIdx - 1];
                for (var i = 0; i < prevLayer.Count; i++)
                {
                    var node = prevLayer[i];
                    // Compute the error derivative with respect to each node's output.
                    node.OutputDer = 0;
                    for (var j = 0; j < node.Outputs.Count; j++)
                    {
                        var output = node.Outputs[j];
                        node.OutputDer += output.Weight * output.Dest.InputDer;
                    }
                }
            }
        }
コード例 #10
0
        public void Serialize(IErrorFunction error_func)
        {
            ushort      code;
            List <byte> parameters;

            switch (error_func)
            {
            case Errorest _:
                code       = 1;
                parameters = null;
                break;

            case Deprecated.ErrorStack e:
                code       = 2;
                parameters = new List <byte>();
                parameters.AddRange(BitConverter.GetBytes(e.IndexCount));
                break;

            case Classification _:
                code       = 3;
                parameters = null;
                break;

            case Tagging e:
                code       = 4;
                parameters = new List <byte>();
                parameters.AddRange(BitConverter.GetBytes(e.MinAccept));
                parameters.AddRange(BitConverter.GetBytes(e.MaxReject));
                break;

            case CrossEntropy _:
                code       = 5;
                parameters = null;
                break;

            default:
                var attr = GetAttribute(error_func.GetType());
                if (!registered_functions_via_code.ContainsKey(attr.code | ERROR_FUNCTION_SIGN))
                {
                    throw new ArgumentException(
                              nameof(error_func), "this type of IErrorFunction is not registered.");
                }
                var serializer = registered_functions_via_code[attr.code | ERROR_FUNCTION_SIGN];
                code       = serializer.Code;
                parameters = new List <byte>();
                parameters.AddRange(serializer.Serialize(error_func) ?? new byte[0]);
                if (parameters.Count != serializer.ParameterLength)
                {
                    throw new Exception("invalid parameters' length.");
                }
                break;
            }

            // serialaize type and parameters
            Serialize(code, parameters?.ToArray());
        }
コード例 #11
0
 public NeuralNetworkImage(
     Layer[] layers, IErrorFunction error_fnc,
     IDataConvertor input_convertor, IDataConvertor output_convertor,
     IRegularization regularization)
 {
     CheckImageError(layers, error_fnc);
     this.layers           = layers;
     this.error_fnc        = error_fnc;
     this.input_convertor  = input_convertor;
     this.output_convertor = output_convertor;
     this.regularization   = regularization;
 }
コード例 #12
0
        public static void CheckImageError(Layer[] layers, IErrorFunction error_fnc)
        {
            if (layers == null)
            {
                throw new ArgumentNullException(nameof(layers),
                                                "The nn-image's layers are undefined.");
            }

            if (layers.Length < 1)
            {
                throw new ArgumentException(nameof(layers),
                                            "The nn-image's layers are empty.");
            }

            if (error_fnc == null)
            {
                throw new ArgumentNullException(nameof(error_fnc),
                                                "The error function is undefined.");
            }

            string messages = null;

            int?prv_length = null;
            var i          = 0;

            while (i < layers.Length)
            {
                if (layers[i].Synapse.RowCount < 1)
                {
                    messages += $"\r\nThe number of neuron of layar {i} is zero.";
                }

                if (layers[i].Bias.Count != layers[i].Synapse.RowCount)
                {
                    messages += $"\r\nThe number of neuron's bias of layar {i} is not correct.";
                }

                if (prv_length != null && prv_length != layers[i].Synapse.ColumnCount)
                {
                    messages += $"\r\nThe number of synapsees of layar {i} is not match with previous layer's neuron count.";
                }
                prv_length = layers[i].Synapse.RowCount;

                i++;
            }

            if (messages != null)
            {
                throw new ArgumentException(nameof(layers), messages);
            }
        }
コード例 #13
0
ファイル: GradientWorker.cs プロジェクト: neismit/emds
 public GradientWorker(FlatNetwork theNetwork, TrainFlatNetworkProp theOwner, IMLDataSet theTraining, int theLow, int theHigh, double[] theFlatSpots, IErrorFunction ef)
 {
     goto Label_0155;
     Label_0114:
     this._x071bde1041617fce = theOwner;
     this._x0ba854627e1326f9 = theFlatSpots;
     this._x58c3d5da5c5c72db = new double[this._x87a7fc6a72741c2e.LayerOutput.Length];
     this._xe05127febf8b7904 = new double[this._x87a7fc6a72741c2e.Weights.Length];
     this._xd505507cf33ae543 = new double[this._x87a7fc6a72741c2e.OutputCount];
     if (0 == 0)
     {
         this._x2f33d779e5a20b28 = this._x87a7fc6a72741c2e.Weights;
         if ((((uint) theHigh) + ((uint) theLow)) <= uint.MaxValue)
         {
             this._xb25095f37f20a1c1 = this._x87a7fc6a72741c2e.LayerIndex;
             if (((uint) theLow) <= uint.MaxValue)
             {
                 this._xe05f7b8f952f0ba4 = this._x87a7fc6a72741c2e.LayerCounts;
                 this._x7d5bf19d36074a85 = this._x87a7fc6a72741c2e.WeightIndex;
                 this._x5e72e5e601f79c78 = this._x87a7fc6a72741c2e.LayerOutput;
                 this._x59e01312f2f4aa96 = this._x87a7fc6a72741c2e.LayerSums;
                 this._xc99b49dd213196ca = this._x87a7fc6a72741c2e.LayerFeedCounts;
                 this._x2cb049236d33bbda = ef;
             }
         }
     }
     this._x61830ac74d65acc3 = BasicMLDataPair.CreatePair(this._x87a7fc6a72741c2e.InputCount, this._x87a7fc6a72741c2e.OutputCount);
     if (0 == 0)
     {
         return;
     }
     Label_0155:
     this._x84e81691256999b2 = new ErrorCalculation();
     this._x87a7fc6a72741c2e = theNetwork;
     this._x823a2b9c8bf459c5 = theTraining;
     if (0xff == 0)
     {
         return;
     }
     do
     {
         if ((((uint) theHigh) + ((uint) theLow)) > uint.MaxValue)
         {
             goto Label_0114;
         }
         this._xd12d1dba8a023d95 = theLow;
     }
     while (0 != 0);
     this._x628ea9b89457a2a9 = theHigh;
     goto Label_0114;
 }
コード例 #14
0
        private NeuralNetworkImage CloseCurrentImage()
        {
            var image = new NeuralNetworkImage(
                layers.ToArray(),
                error_func, in_cvrt, out_cvrt, regularization);

            last_layer_input_count = 0;
            layers         = null;
            error_func     = null;
            in_cvrt        = out_cvrt = null;
            regularization = null;

            return(image);
        }
コード例 #15
0
        /// <summary>
        ///     Construct the gradient calculation class.
        /// </summary>
        /// <param name="theNetwork">The network to use.</param>
        /// <param name="ef">The error function to use.</param>
        /// <param name="theOwner">The owner (usually a trainer).</param>
        public GradientCalc(BasicNetwork theNetwork,
                            IErrorFunction ef, IGradientCalcOwner theOwner)
        {
            _network      = theNetwork;
            errorFunction = ef;

            _layerDelta = new double[_network.LayerOutput.Length];
            _gradients  = new double[_network.Weights.Length];
            _actual     = new double[_network.OutputCount];

            _weights     = _network.Weights;
            _layerOutput = _network.LayerOutput;
            _layerSums   = _network.LayerSums;
            _owner       = theOwner;
        }
コード例 #16
0
        /// <summary>
        /// Construct a propagation object.
        /// </summary>
        ///
        /// <param name="network">The network.</param>
        /// <param name="training">The training set.</param>
        protected Propagation(IContainsFlat network, IMLDataSet training) : base(TrainingImplementationType.Iterative)
        {
            _network  = network;
            _flat     = network.Flat;
            _training = training;

            Gradients     = new double[_flat.Weights.Length];
            _lastGradient = new double[_flat.Weights.Length];

            _indexable         = training;
            _numThreads        = 0;
            _reportedException = null;
            FixFlatSpot        = true;
            ErrorFunction      = new LinearErrorFunction();
        }
コード例 #17
0
        /// <summary>
        /// Train a flat network multithreaded.
        /// </summary>
        ///
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training data to use.</param>
        protected TrainFlatNetworkProp(FlatNetwork network,
                                       IMLDataSet training)
        {
            _training = training;
            _network  = network;

            Gradients     = new double[_network.Weights.Length];
            _lastGradient = new double[_network.Weights.Length];

            _indexable         = training;
            _numThreads        = 0;
            _reportedException = null;
            FixFlatSpot        = true;
            ErrorFunction      = new LinearErrorFunction();
        }
コード例 #18
0
ファイル: TestUtils.cs プロジェクト: lcfcosta/machine.academy
        public static void TestTraining(Network network, float[] referenceOutput, IErrorFunction errorFunc, TrainingSuite.TrainingConfig.Regularization regularization, float regularizationLambda, float learningRate)
        {
            List <int> layerConfig = new List <int>();

            layerConfig.Add(5);
            layerConfig.Add(33);
            layerConfig.Add(12);
            layerConfig.Add(51);
            layerConfig.Add(5);

            #region Training
            List <TrainingSuite.TrainingData> trainingData = new List <TrainingSuite.TrainingData>();
            for (int i = 0; i < 1000; i++)
            {
                float[] input         = new float[layerConfig[0]];
                float[] desiredOutput = new float[layerConfig[layerConfig.Count - 1]];

                input[(i * 13426) % 5]         = 1.0f;
                desiredOutput[(i * 13426) % 5] = 1.0f;

                trainingData.Add(new TrainingSuite.TrainingData(input, desiredOutput));
            }

            TrainingSuite suite = new TrainingSuite(trainingData);
            suite.config.epochs = 2;
            suite.config.shuffleTrainingData = false;
            suite.config.miniBatchSize       = 13;

            suite.config.costFunction         = errorFunc;
            suite.config.regularization       = regularization;
            suite.config.regularizationLambda = regularizationLambda;
            suite.config.learningRate         = learningRate;

            var promise = network.Train(suite, ComputeDeviceFactory.CreateFallbackComputeDevice());

            promise.Await();
            #endregion

            float[] testInput = new float[] { 0.3f, 0.4f, 0.6f, 0.1f, 0.5f };
            var     result    = network.Compute(testInput, ComputeDeviceFactory.CreateFallbackComputeDevice());

            Utils.CheckNetworkError(referenceOutput, result);
        }
コード例 #19
0
        public ANN(
            int ephocs,
            int[] nodeLayer,
            double[] learningRate,
            double momentum,
            double[] dropoutValue,
            IFunction[] activationFunc,
            IErrorFunction errorFunc)
        {
            Ephocs         = ephocs;
            Layer          = nodeLayer.Length;
            NodeLayer      = nodeLayer;
            LearningRate   = learningRate;
            Momentum       = momentum;
            ActivationFunc = activationFunc;
            DropoutValue   = dropoutValue;
            ErrorFunc      = errorFunc;

            InitVariables();
        }
コード例 #20
0
        public ISupervisedOperations SupervisedOperations(EActivationFunction act,
                                                          EErrorFunction err,
                                                          EOptimizerFunction opt)
        {
            IActivationFunction activationFunction = null;
            IErrorFunction      errorFunction      = null;
            IOptimizerFunction  optimizerFunction  = null;

            switch (act)
            {
            case EActivationFunction.Sigmoid:
                activationFunction = new SigmoidFunction();
                break;

            case EActivationFunction.LeakRelu:
                activationFunction = new LeakReluFunction();
                break;
            }

            switch (err)
            {
            case EErrorFunction.Dense:
                errorFunction = new DenseErrorFunction();
                break;

            case EErrorFunction.Desired:
                errorFunction = new DesiredErrorFunction();
                break;
            }

            switch (opt)
            {
            case EOptimizerFunction.SGD:
                optimizerFunction = new SGDOptimizerFunction();
                break;
            }

            return(new AnnBasicOperations(activationFunction,
                                          errorFunction,
                                          optimizerFunction));
        }
コード例 #21
0
        public NeuralNetworkInitializer SetCorrection(
            IErrorFunction error_func, IRegularization regularization = null)
        {
            if (layers == null)
            {
                throw new Exception("The layers input is not set yet.");
            }

            if (layers.Count < 1)
            {
                throw new Exception("The layers output is not set yet.");
            }

            this.error_func = error_func ??
                              throw new ArgumentNullException(nameof(error_func),
                                                              "The error function is undefined.");

            last_layer_input_count = -1;
            this.regularization    = regularization;

            return(this);
        }
コード例 #22
0
        public RNN(
            int ephocs,
            int timeStepSize,
            int bpttTruncate,
            int[] nodeLayer,
            double[] learningRate,
            double momentum,
            double[] dropoutValue,
            IFunction[] activationFunc,
            IErrorFunction errorFunc)
        {
            Ephocs         = ephocs;
            TimeStepSize   = timeStepSize;
            NodeLayer      = nodeLayer;
            LearningRate   = learningRate;
            Momentum       = momentum;
            ActivationFunc = activationFunc;
            DropoutValue   = dropoutValue;
            ErrorFunc      = errorFunc;
            BpttTruncate   = bpttTruncate;

            InitVariables();
        }
コード例 #23
0
        /// <summary>
        /// Construct a gradient worker.
        /// </summary>
        ///
        /// <param name="theNetwork">The network to train.</param>
        /// <param name="theOwner">The owner that is doing the training.</param>
        /// <param name="theTraining">The training data.</param>
        /// <param name="theLow">The low index to use in the training data.</param>
        /// <param name="theHigh">The high index to use in the training data.</param>
        /// <param name="theFlatSpots">Holds an array of flat spot constants.</param>
        public GradientWorker(FlatNetwork theNetwork,
                                 TrainFlatNetworkProp theOwner, IMLDataSet theTraining,
                                 int theLow, int theHigh, double[] theFlatSpots, IErrorFunction ef)
        {
            _errorCalculation = new ErrorCalculation();
            _network = theNetwork;
            _training = theTraining;
            _low = theLow;
            _high = theHigh;
            _owner = theOwner;
            _flatSpot = theFlatSpots;

            _layerDelta = new double[_network.LayerOutput.Length];
            _gradients = new double[_network.Weights.Length];
            _actual = new double[_network.OutputCount];

            _weights = _network.Weights;
            _layerIndex = _network.LayerIndex;
            _layerCounts = _network.LayerCounts;
            _weightIndex = _network.WeightIndex;
            _layerOutput = _network.LayerOutput;
            _layerSums = _network.LayerSums;
            _layerFeedCounts = _network.LayerFeedCounts;
            _ef = ef;

            _pair = BasicMLDataPair.CreatePair(_network.InputCount,
                                              _network.OutputCount);
        }
コード例 #24
0
        public double Train(double[] input, double[] expectedOutput, double[]?actualOutput, double learningRate, IErrorFunction errorFunction)
        {
            if (input.Length != InputSize)
            {
                throw new ArgumentException("Expected vector to be of size " + InputSize + ".", nameof(input));
            }
            if (expectedOutput.Length != OutputSize)
            {
                throw new ArgumentException("Expected vector to be of size " + OutputSize + ".", nameof(expectedOutput));
            }

            var vectors = new double[Layers.Count + 1][];

            vectors[0] = input;

            for (int i = 0; i < Layers.Count; i++)
            {
                var layer = Layers[i];

                if (actualOutput == null || i != Layers.Count - 1)
                {
                    vectors[i + 1] = ArraySource(layer.OutputSize);
                }
                else
                {
                    vectors[i + 1] = actualOutput;
                }

                layer.Evaluate(vectors[i], vectors[i + 1]);
            }

            var output = vectors[vectors.Length - 1];

            if (output.Length != OutputSize)
            {
                throw new InvalidOperationException("Internal error.");
            }

            var outputErrorSignal = ArraySource(OutputSize);
            var error             = errorFunction.Derivative(expectedOutput, output, outputErrorSignal);

            for (int i = Layers.Count - 1; i >= 0; i--)
            {
                var layer            = Layers[i];
                var inputErrorSignal = ArraySource(layer.InputSize);
                layer.Train(vectors[i], vectors[i + 1], outputErrorSignal, inputErrorSignal, learningRate);
                outputErrorSignal = inputErrorSignal;
            }

            return(error);
        }
コード例 #25
0
        public NeuralNetworkBuilder SetErrorFunction(IErrorFunction errorFunction)
        {
            _errorFunction = errorFunction;

            return(this);
        }
コード例 #26
0
 public void CalculateDeltaError(IErrorFunction errorFunction, double expectedOutput)
 {
     _deltaError = errorFunction.InvokeForDerivate(Output, expectedOutput) * _activationFunction.InvokeForDerivative(NetOutput);
 }
コード例 #27
0
 public void CalculateError(IErrorFunction errorFunction, double expectedOutput)
 {
     _error = errorFunction.Invoke(Output, expectedOutput);
 }
コード例 #28
0
ファイル: TestUtils.cs プロジェクト: lcfcosta/machine.academy
        public static void TestOpenCLTrainingWithConfig(IErrorFunction errorFunc, TrainingSuite.TrainingConfig.Regularization regularization, float regularizationLambda, float learningRate)
        {
            List <int> layerConfig = new List <int>();

            layerConfig.Add(10);
            layerConfig.Add(512);
            layerConfig.Add(12);
            layerConfig.Add(3);
            layerConfig.Add(51);
            layerConfig.Add(30);

            Network networkReference     = Network.CreateNetworkInitRandom(layerConfig.ToArray(), new SigmoidActivation());
            var     jsonData             = networkReference.ExportToJSON();
            Network networkCpuTrained    = Network.CreateNetworkFromJSON(jsonData);
            Network networkOpenCLTrained = Network.CreateNetworkFromJSON(jsonData);

            var cpuCalculator    = ComputeDeviceFactory.CreateFallbackComputeDevice();
            var openCLCalculator = GetFirstOpenCLDevice();

            var rnd = new Random();
            List <TrainingSuite.TrainingData> trainingData = new List <TrainingSuite.TrainingData>();

            for (int i = 0; i < 1000; i++)
            {
                float[] input  = new float[layerConfig[0]];
                float[] output = new float[layerConfig[layerConfig.Count - 1]];

                var idx = rnd.Next(0, input.Length);
                input[rnd.Next(0, input.Length)] = 1.0f;

                for (int j = 0; j < 10; j++)
                {
                    output[j * 3 + 0] = idx * 0.1f;
                    output[j * 3 + 1] = 1.0f - (idx * 0.1f);
                    output[j * 3 + 2] = idx * 0.05f;
                }

                trainingData.Add(new TrainingSuite.TrainingData(input, output));
            }

            TrainingSuite suite = new TrainingSuite(trainingData);

            suite.config.epochs = 1;
            suite.config.shuffleTrainingData = false;
            suite.config.miniBatchSize       = 13;

            suite.config.costFunction         = errorFunc;
            suite.config.regularization       = regularization;
            suite.config.regularizationLambda = regularizationLambda;
            suite.config.learningRate         = learningRate;

            var promise1 = networkCpuTrained.Train(suite, cpuCalculator);
            var promise2 = networkOpenCLTrained.Train(suite, openCLCalculator);

            promise1.Await();
            promise2.Await();

            Assert.IsTrue(promise1.IsReady() && promise2.IsReady());

            float[] testInput = new float[layerConfig[0]];

            var cpuTrainedOutput    = networkCpuTrained.Compute(testInput, cpuCalculator);
            var openCLTrainedOutput = networkOpenCLTrained.Compute(testInput, cpuCalculator);

            CheckNetworkError(cpuTrainedOutput, openCLTrainedOutput);
        }
コード例 #29
0
        /// <summary>
        /// Train a flat network multithreaded.
        /// </summary>
        ///
        /// <param name="network">The network to train.</param>
        /// <param name="training">The training data to use.</param>
        protected TrainFlatNetworkProp(FlatNetwork network,
                                    IMLDataSet training)
        {
            _training = training;
            _network = network;

            Gradients = new double[_network.Weights.Length];
            _lastGradient = new double[_network.Weights.Length];

            _indexable = training;
            _numThreads = 0;
            _reportedException = null;
            FixFlatSpot = true;
            ErrorFunction = new LinearErrorFunction();
        }
コード例 #30
0
ファイル: Propagation.cs プロジェクト: jongh0/MTree
        /// <summary>
        /// Construct a propagation object.
        /// </summary>
        ///
        /// <param name="network">The network.</param>
        /// <param name="training">The training set.</param>
        protected Propagation(IContainsFlat network, IMLDataSet training) : base(TrainingImplementationType.Iterative)
        {
            _network = network;
            _flat = network.Flat;
            _training = training;

            Gradients = new double[_flat.Weights.Length];
            _lastGradient = new double[_flat.Weights.Length];

            _indexable = training;
            _numThreads = 0;
            _reportedException = null;
            FixFlatSpot = true;
            ErrorFunction = new LinearErrorFunction();
        }
コード例 #31
0
ファイル: TestUtils.cs プロジェクト: zbendefy/machine.academy
        public static void TestOpenCLTrainingWithConfig(IErrorFunction errorFunc, TrainingConfig.Regularization regularization, float regularizationLambda, float learningRate, bool mix_activations = false)
        {
            IActivationFunction alternateActivation = new SigmoidActivation();

            if (mix_activations)
            {
                alternateActivation = new ReLUActivation();
            }

            int input_neurons = 10;
            var layer_config  = new List <Tuple <IActivationFunction, int> >();

            layer_config.Add(new Tuple <IActivationFunction, int>(new SigmoidActivation(), 512));
            layer_config.Add(new Tuple <IActivationFunction, int>(alternateActivation, 12));
            layer_config.Add(new Tuple <IActivationFunction, int>(new SigmoidActivation(), 3));
            layer_config.Add(new Tuple <IActivationFunction, int>(alternateActivation, 51));
            layer_config.Add(new Tuple <IActivationFunction, int>(new SigmoidActivation(), 30));

            Network networkReference     = Network.CreateNetworkInitRandom(input_neurons, layer_config);
            var     jsonData             = networkReference.ExportToJSON();
            Network networkCpuTrained    = Network.CreateNetworkFromJSON(jsonData);
            Network networkOpenCLTrained = Network.CreateNetworkFromJSON(jsonData);

            var cpuCalculator    = ComputeDeviceFactory.CreateFallbackComputeDevice();
            var openCLCalculator = GetFirstOpenCLDevice();

            var rnd = new Random();
            List <TrainingSuite.TrainingData> trainingData = new List <TrainingSuite.TrainingData>();

            for (int i = 0; i < 1000; i++)
            {
                float[] input  = new float[input_neurons];
                float[] output = new float[layer_config.Last().Item2];

                var idx = rnd.Next(0, input.Length);
                input[rnd.Next(0, input.Length)] = 1.0f;

                for (int j = 0; j < 10; j++)
                {
                    output[j * 3 + 0] = idx * 0.1f;
                    output[j * 3 + 1] = 1.0f - (idx * 0.1f);
                    output[j * 3 + 2] = idx * 0.05f;
                }

                trainingData.Add(new TrainingSuite.TrainingData(input, output));
            }

            TrainingSuite suite = new TrainingSuite(trainingData);

            suite.config.epochs = 1;
            suite.config.shuffleTrainingData = false;
            suite.config.miniBatchSize       = 13;

            suite.config.costFunction         = errorFunc;
            suite.config.regularization       = regularization;
            suite.config.regularizationLambda = regularizationLambda;
            suite.config.learningRate         = learningRate;

            var promise1 = networkCpuTrained.Train(suite, cpuCalculator);
            var promise2 = networkOpenCLTrained.Train(suite, openCLCalculator);

            promise1.Await();
            promise2.Await();

            Assert.IsTrue(promise1.IsReady() && promise2.IsReady());

            float[] testInput = new float[input_neurons];

            var cpuTrainedOutput    = networkCpuTrained.Compute(testInput, cpuCalculator);
            var openCLTrainedOutput = networkOpenCLTrained.Compute(testInput, cpuCalculator);

            ValidateFloatArray(cpuTrainedOutput, openCLTrainedOutput);
        }
コード例 #32
0
ファイル: GradientCalc.cs プロジェクト: legendvijay/aifh
        /// <summary>
        ///     Construct the gradient calculation class.
        /// </summary>
        /// <param name="theNetwork">The network to use.</param>
        /// <param name="ef">The error function to use.</param>
        /// <param name="theOwner">The owner (usually a trainer).</param>
        public GradientCalc(BasicNetwork theNetwork,
            IErrorFunction ef, IGradientCalcOwner theOwner)
        {
            _network = theNetwork;
            errorFunction = ef;

            _layerDelta = new double[_network.LayerOutput.Length];
            _gradients = new double[_network.Weights.Length];
            _actual = new double[_network.OutputCount];

            _weights = _network.Weights;
            _layerOutput = _network.LayerOutput;
            _layerSums = _network.LayerSums;
            _owner = theOwner;
        }