Exemplo n.º 1
0
        /// <summary>
        /// Clone into the flat network passed in.
        /// </summary>
        ///
        /// <param name="result">The network to copy into.</param>
        public void CloneFlatNetwork(FlatNetwork result)
        {
            result._inputCount          = _inputCount;
            result._layerCounts         = EngineArray.ArrayCopy(_layerCounts);
            result._layerIndex          = EngineArray.ArrayCopy(_layerIndex);
            result._layerOutput         = EngineArray.ArrayCopy(_layerOutput);
            result._layerSums           = EngineArray.ArrayCopy(_layerSums);
            result._layerFeedCounts     = EngineArray.ArrayCopy(_layerFeedCounts);
            result._contextTargetOffset = EngineArray
                                          .ArrayCopy(_contextTargetOffset);
            result._contextTargetSize = EngineArray
                                        .ArrayCopy(_contextTargetSize);
            result._layerContextCount = EngineArray
                                        .ArrayCopy(_layerContextCount);
            result._biasActivation = EngineArray.ArrayCopy(_biasActivation);
            result._outputCount    = _outputCount;
            result._weightIndex    = _weightIndex;
            result._weights        = _weights;

            result._activationFunctions = new IActivationFunction[_activationFunctions.Length];
            for (int i = 0; i < result._activationFunctions.Length; i++)
            {
                result._activationFunctions[i] = (IActivationFunction)_activationFunctions[i].Clone();
            }

            result._beginTraining = _beginTraining;
            result._endTraining   = _endTraining;
        }
Exemplo n.º 2
0
        /// <summary>
        /// Clone the network.
        /// </summary>
        ///
        /// <returns>A clone of the network.</returns>
        public virtual Object Clone()
        {
            var result = new FlatNetwork();

            CloneFlatNetwork(result);
            return(result);
        }
 public TrainFlatNetworkBackPropagation(FlatNetwork network, IMLDataSet training, double theLearningRate, double theMomentum)
     : base(network, training)
 {
     this._xef52c16be8e501c9 = theMomentum;
     this._x9b481c22b6706459 = theLearningRate;
     this._xe4def4d471bbc130 = new double[network.Weights.Length];
 }
Exemplo n.º 4
0
        /// <summary>
        /// Program entry point.
        /// </summary>
        /// <param name="app">Holds arguments and other info.</param>
        public void Execute(IExampleInterface app)
        {
            var network = new FlatNetwork(2, 4, 0, 1, false);
            network.Randomize();

            IMLDataSet trainingSet = new BasicMLDataSet(XORInput, XORIdeal);


            var train = new TrainFlatNetworkResilient(network, trainingSet);

            int epoch = 1;

            do
            {
                train.Iteration();
                Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error);
                epoch++;
            } while (train.Error > 0.01);

            var output = new double[1];
            // test the neural network
            Console.WriteLine(@"Neural Network Results:");
            foreach (IMLDataPair pair in trainingSet)
            {
                double[] input = pair.Input.Data;
                network.Compute(input, output);
                Console.WriteLine(input[0] + @"," + input[1] + @":" + output[0]);
            }
        }
Exemplo n.º 5
0
 public TrainFlatNetworkQPROP(FlatNetwork network, IMLDataSet training, double theLearningRate)
     : base(network, training)
 {
     this.LearningRate = theLearningRate;
     this.LastDelta = new double[base.Network.Weights.Length];
     this.Decay = 0.0001;
     this.OutputEpsilon = 0.35;
 }
Exemplo n.º 6
0
 public TrainFlatNetworkSCG(FlatNetwork network, IMLDataSet training)
     : base(network, training)
 {
     int num;
     if ((((uint) num) - ((uint) num)) <= uint.MaxValue)
     {
         this._xd938fd32778a1c95 = true;
         if (((uint) num) < 0)
         {
             goto Label_0069;
         }
         goto Label_0134;
     }
     if ((((uint) num) & 0) == 0)
     {
         goto Label_00BD;
     }
     goto Label_0094;
     Label_0069:
     this._xb55b340ae3a3e4e0 = new double[num];
     this._x268cb8b20222b0dc = true;
     if ((((uint) num) + ((uint) num)) <= uint.MaxValue)
     {
         if (((uint) num) >= 0)
         {
             return;
         }
         goto Label_00BD;
     }
     Label_0094:
     num = this._x2f33d779e5a20b28.Length;
     this._xb9f048818eee629c = new double[num];
     this._x9af996963d03fd21 = new double[num];
     if (((uint) num) > uint.MaxValue)
     {
         goto Label_0134;
     }
     this._x9c79b5ad7b769b12 = new double[num];
     goto Label_0069;
     Label_00BD:
     this._x31add3b0010a0a52 = 0.0;
     this._x3271cefb1a159639 = 1E-06;
     this._xd4d57078d70c1d3d = 0.0;
     this._x20753e64c7ee4faf = 0.0;
     this._xac19ed778412b7a3 = false;
     this._x2f33d779e5a20b28 = EngineArray.ArrayCopy(network.Weights);
     goto Label_0094;
     Label_0134:
     this._xd938fd32778a1c95 = true;
     this._xf7845a6fecd5afc3 = 0.0;
     if ((((uint) num) + ((uint) num)) > uint.MaxValue)
     {
         return;
     }
     goto Label_00BD;
 }
        /// <inheritdoc/>
        public virtual void Init(BasicNetwork theNetwork, IMLDataSet theTraining)
        {

            int weightCount = theNetwork.Structure.Flat.Weights.Length;
            _flat = theNetwork.Flat;
            _training = theTraining;
            _network = theNetwork;
            _gradients = new double[weightCount];
            _hessianMatrix = new Matrix(weightCount, weightCount);
            _hessian = _hessianMatrix.Data;
        }
Exemplo n.º 8
0
 /// <inheritdoc/>
 public virtual void Init(BasicNetwork theNetwork, IMLDataSet theTraining)
 {
     int weightCount = theNetwork.Structure.Flat.Weights.Length;
     flat = theNetwork.Flat;
     training = theTraining;
     network = theNetwork;
     gradients = new double[weightCount];
     hessianMatrix = new Matrix(weightCount, weightCount);
     hessian = hessianMatrix.Data;
     derivative = new double[weightCount];
 }
Exemplo n.º 9
0
        public static void Evaluate(FlatNetwork network, IMLDataSet trainingSet)
        {
            double[] output = new double[1];
            foreach (IMLDataPair pair in trainingSet)
            {
                network.Compute(pair.Input.Data, output);
                Console.WriteLine(pair.Input.Data[0] + @"," + pair.Input[1]
                        + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]);
            }

        }
Exemplo n.º 10
0
        /// <summary>
        /// Construct a cross validation trainer.
        /// </summary>
        ///
        /// <param name="train">The training</param>
        /// <param name="k">The number of folds.</param>
        public CrossValidationKFold(IMLTrain train, int k) : base(train.Method, (FoldedDataSet) train.Training)
        {
            _train = train;
            Folded.Fold(k);

            _flatNetwork = ((BasicNetwork) train.Method).Structure.Flat;

            _networks = new NetworkFold[k];
            for (int i = 0; i < _networks.Length; i++)
            {
                _networks[i] = new NetworkFold(_flatNetwork);
            }
        }
Exemplo n.º 11
0
 public GradientWorker(FlatNetwork theNetwork, TrainFlatNetworkProp theOwner, IMLDataSet theTraining, int theLow, int theHigh, double[] theFlatSpots, IErrorFunction ef)
 {
     goto Label_0155;
     Label_0114:
     this._x071bde1041617fce = theOwner;
     this._x0ba854627e1326f9 = theFlatSpots;
     this._x58c3d5da5c5c72db = new double[this._x87a7fc6a72741c2e.LayerOutput.Length];
     this._xe05127febf8b7904 = new double[this._x87a7fc6a72741c2e.Weights.Length];
     this._xd505507cf33ae543 = new double[this._x87a7fc6a72741c2e.OutputCount];
     if (0 == 0)
     {
         this._x2f33d779e5a20b28 = this._x87a7fc6a72741c2e.Weights;
         if ((((uint) theHigh) + ((uint) theLow)) <= uint.MaxValue)
         {
             this._xb25095f37f20a1c1 = this._x87a7fc6a72741c2e.LayerIndex;
             if (((uint) theLow) <= uint.MaxValue)
             {
                 this._xe05f7b8f952f0ba4 = this._x87a7fc6a72741c2e.LayerCounts;
                 this._x7d5bf19d36074a85 = this._x87a7fc6a72741c2e.WeightIndex;
                 this._x5e72e5e601f79c78 = this._x87a7fc6a72741c2e.LayerOutput;
                 this._x59e01312f2f4aa96 = this._x87a7fc6a72741c2e.LayerSums;
                 this._xc99b49dd213196ca = this._x87a7fc6a72741c2e.LayerFeedCounts;
                 this._x2cb049236d33bbda = ef;
             }
         }
     }
     this._x61830ac74d65acc3 = BasicMLDataPair.CreatePair(this._x87a7fc6a72741c2e.InputCount, this._x87a7fc6a72741c2e.OutputCount);
     if (0 == 0)
     {
         return;
     }
     Label_0155:
     this._x84e81691256999b2 = new ErrorCalculation();
     this._x87a7fc6a72741c2e = theNetwork;
     this._x823a2b9c8bf459c5 = theTraining;
     if (0xff == 0)
     {
         return;
     }
     do
     {
         if ((((uint) theHigh) + ((uint) theLow)) > uint.MaxValue)
         {
             goto Label_0114;
         }
         this._xd12d1dba8a023d95 = theLow;
     }
     while (0 != 0);
     this._x628ea9b89457a2a9 = theHigh;
     goto Label_0114;
 }
Exemplo n.º 12
0
 public TrainFlatNetworkResilient(FlatNetwork network, IMLDataSet training, double zeroTolerance, double initialUpdate, double maxStep)
     : base(network, training)
 {
     if ((((uint) initialUpdate) + ((uint) initialUpdate)) <= uint.MaxValue)
     {
         this._x3cb1c2ba1a55e11e = new double[network.Weights.Length];
         this._x04273e480202ea1d = zeroTolerance;
         this._xc63f21bb42b68cf5 = maxStep;
         if ((((uint) zeroTolerance) + ((uint) initialUpdate)) >= 0)
         {
             this._xa3001a0faa4f9279 = new double[base.Network.Weights.Length];
             this._xe4def4d471bbc130 = new double[base.Network.Weights.Length];
         }
     }
     for (int i = 0; i < this._x3cb1c2ba1a55e11e.Length; i++)
     {
         this._x3cb1c2ba1a55e11e[i] = initialUpdate;
     }
 }
Exemplo n.º 13
0
 protected TrainFlatNetworkProp(FlatNetwork network, IMLDataSet training)
 {
     Label_0036:
     this._x823a2b9c8bf459c5 = training;
     if (0x7fffffff != 0)
     {
         this._x87a7fc6a72741c2e = network;
         this.Gradients = new double[this._x87a7fc6a72741c2e.Weights.Length];
     }
     this._x135f4ca6b0437cfc = new double[this._x87a7fc6a72741c2e.Weights.Length];
     if (4 != 0)
     {
         this._x9d091b0a73271a88 = training;
         this._xc2715388adc0d1f8 = 0;
         this._xb8f73f2bca8b57ea = null;
         this.FixFlatSpot = true;
         this.ErrorFunction = new LinearErrorFunction();
         if (0xff == 0)
         {
             goto Label_0036;
         }
     }
 }
Exemplo n.º 14
0
 public CrossValidationKFold(IMLTrain train, int k)
     : base(train.Method, (FoldedDataSet) train.Training)
 {
     int num;
     if ((((uint) k) | 1) != 0)
     {
         this._xd87f6a9c53c2ed9f = train;
         base.Folded.Fold(k);
         goto Label_0083;
     }
     if (0xff != 0)
     {
         goto Label_0083;
     }
     Label_0039:
     num = 0;
     while (num < this._x5f6ed0047d99f4b6.Length)
     {
         this._x5f6ed0047d99f4b6[num] = new NetworkFold(this._xef94864849922d07);
         if (((uint) k) >= 0)
         {
         }
         num++;
     }
     if (((uint) num) <= uint.MaxValue)
     {
         return;
     }
     Label_0083:
     this._xef94864849922d07 = ((BasicNetwork) train.Method).Structure.Flat;
     this._x5f6ed0047d99f4b6 = new NetworkFold[k];
     if (8 == 0)
     {
         return;
     }
     goto Label_0039;
 }
Exemplo n.º 15
0
 /// <summary>
 /// Construct a fold from the specified flat network.
 /// </summary>
 ///
 /// <param name="flat">THe flat network.</param>
 public NetworkFold(FlatNetwork flat)
 {
     _weights = EngineArray.ArrayCopy(flat.Weights);
     _output = EngineArray.ArrayCopy(flat.LayerOutput);
 }
Exemplo n.º 16
0
 /// <summary>
 /// Copy weights and output to the network.
 /// </summary>
 ///
 /// <param name="target">The network to copy to.</param>
 public void CopyToNetwork(FlatNetwork target)
 {
     EngineArray.ArrayCopy(_weights, target.Weights);
     EngineArray.ArrayCopy(_output, target.LayerOutput);
 }
        /// <summary>
        ///     Create an array of activations based on a flat network.
        /// </summary>
        /// <param name="flat">The flat network.</param>
        /// <returns></returns>
        public int[] CreateActivations(FlatNetwork flat)
        {
            var result = new int[flat.ActivationFunctions.Length];
            for (int i = 0; i < flat.ActivationFunctions.Length; i++)
            {
                IActivationFunction af = flat.ActivationFunctions[i];

                if (af is ActivationLinear)
                {
                    result[i] = 0;
                }
                else if (af is ActivationTANH)
                {
                    result[i] = 1;
                }
                if (af is ActivationSigmoid)
                {
                    result[i] = 2;
                }
                if (af is ActivationElliottSymmetric)
                {
                    result[i] = 3;
                }
                if (af is ActivationElliott)
                {
                    result[i] = 4;
                }
            }

            return result;
        }
 /// <summary>
 ///     Create an array of doubles to hold the specified flat network.
 /// </summary>
 /// <param name="flat">The flat network to use as a model.</param>
 /// <returns>The new array.</returns>
 public double[] CreateParams(FlatNetwork flat)
 {
     var result = new double[flat.ActivationFunctions.Length];
     EngineArray.Fill(result, 1);
     return result;
 }
Exemplo n.º 19
0
        /// <summary>
        /// Construct a propagation object.
        /// </summary>
        ///
        /// <param name="network">The network.</param>
        /// <param name="training">The training set.</param>
        protected Propagation(IContainsFlat network, IMLDataSet training)
            : base(TrainingImplementationType.Iterative)
        {
            _network = network;
            _flat = network.Flat;
            _training = training;

            Gradients = new double[_flat.Weights.Length];
            _lastGradient = new double[_flat.Weights.Length];

            _indexable = training;
            _numThreads = 0;
            _reportedException = null;
            FixFlatSpot = true;
            ErrorFunction = new LinearErrorFunction();
        }
Exemplo n.º 20
0
 public void FinalizeStructure()
 {
     FlatLayer[] layerArray;
     int num;
     BasicLayer layer;
     if (this._layers.Count >= 2)
     {
         goto Label_00D4;
     }
     throw new NeuralNetworkError("There must be at least two layers before the structure is finalized.");
     Label_001A:
     if (num < this._layers.Count)
     {
         goto Label_009A;
     }
     this._flat = new FlatNetwork(layerArray);
     this.FinalizeLimit();
     Label_003D:
     this._layers.Clear();
     this.EnforceLimit();
     return;
     Label_0062:
     layerArray[num] = layer;
     num++;
     goto Label_001A;
     Label_006F:
     if (2 != 0)
     {
         goto Label_0062;
     }
     Label_0088:
     while (layer.Activation == null)
     {
         layer.Activation = new ActivationLinear();
         if (0 == 0)
         {
             goto Label_006F;
         }
     }
     if (((uint) num) > uint.MaxValue)
     {
         goto Label_006F;
     }
     goto Label_0062;
     Label_009A:
     layer = (BasicLayer) this._layers[num];
     goto Label_0088;
     if ((((uint) num) - ((uint) num)) < 0)
     {
         goto Label_003D;
     }
     Label_00D4:
     layerArray = new FlatLayer[this._layers.Count];
     num = 0;
     if (((uint) num) < 0)
     {
         goto Label_009A;
     }
     if (0 != 0)
     {
         goto Label_006F;
     }
     goto Label_001A;
 }
Exemplo n.º 21
0
        /// <summary>
        /// Clone into the flat network passed in.
        /// </summary>
        ///
        /// <param name="result">The network to copy into.</param>
        public void CloneFlatNetwork(FlatNetwork result)
        {
            result._inputCount = _inputCount;
            result._layerCounts = EngineArray.ArrayCopy(_layerCounts);
            result._layerIndex = EngineArray.ArrayCopy(_layerIndex);
            result._layerOutput = EngineArray.ArrayCopy(_layerOutput);
            result._layerSums = EngineArray.ArrayCopy(_layerSums);
            result._layerFeedCounts = EngineArray.ArrayCopy(_layerFeedCounts);
            result._contextTargetOffset = EngineArray
                .ArrayCopy(_contextTargetOffset);
            result._contextTargetSize = EngineArray
                .ArrayCopy(_contextTargetSize);
            result._layerContextCount = EngineArray
                .ArrayCopy(_layerContextCount);
            result._biasActivation = EngineArray.ArrayCopy(_biasActivation);
            result._outputCount = _outputCount;
            result._weightIndex = _weightIndex;
            result._weights = _weights;

            result._activationFunctions = new IActivationFunction[_activationFunctions.Length];
            for (int i = 0; i < result._activationFunctions.Length; i++)
            {
                result._activationFunctions[i] = (IActivationFunction) _activationFunctions[i].Clone();
            }

            result._beginTraining = _beginTraining;
            result._endTraining = _endTraining;
        }
        /// <summary>
        /// Build the synapse and layer structure. This method should be called after
        /// you are done adding layers to a network, or change the network's logic
        /// property.
        /// </summary>
        ///
        public void FinalizeStructure()
        {
            if (_layers.Count < 2)
            {
                throw new NeuralNetworkError(
                    "There must be at least two layers before the structure is finalized.");
            }

            var flatLayers = new FlatLayer[_layers.Count];

            for (int i = 0; i < _layers.Count; i++)
            {
                var layer = (BasicLayer) _layers[i];
                if (layer.Activation == null)
                {
                    layer.Activation = new ActivationLinear();
                }

                flatLayers[i] = layer;
            }

            _flat = new FlatNetwork(flatLayers);

            FinalizeLimit();
            _layers.Clear();
            EnforceLimit();
        }
 private void CheckWithModel(FlatNetwork model, FlatNetwork pruned)
 {
     Assert.AreEqual(model.Weights.Length, pruned.Weights.Length);
     Assert.AreEqual(model.ContextTargetOffset, pruned.ContextTargetOffset);
     Assert.AreEqual(model.ContextTargetSize, pruned.ContextTargetSize);
     Assert.AreEqual(model.LayerCounts, pruned.LayerCounts);
     Assert.AreEqual(model.LayerFeedCounts, pruned.LayerFeedCounts);
     Assert.AreEqual(model.LayerIndex, pruned.LayerIndex);
     Assert.AreEqual(model.LayerOutput.Length, pruned.LayerOutput.Length);
     Assert.AreEqual(model.WeightIndex, pruned.WeightIndex);
 }
Exemplo n.º 24
0
        public static long BenchmarkEncogFlat(double[][] input, double[][] output)
        {
            var network = new FlatNetwork(input[0].Length, HIDDEN_COUNT, 0,
                                          output[0].Length, false);
            network.Randomize();
            var trainingSet = new BasicMLDataSet(input, output);

            var train = new TrainFlatNetworkBackPropagation(
                network, trainingSet, 0.7, 0.7);

            var a = new double[2];
            var b = new double[1];

            var sw = new Stopwatch();
            sw.Start();
            // run epoch of learning procedure
            for (int i = 0; i < ITERATIONS; i++)
            {
                train.Iteration();
            }
            sw.Stop();

            return sw.ElapsedMilliseconds;
        }
Exemplo n.º 25
0
 public void CloneFlatNetwork(FlatNetwork result)
 {
     int num;
     result._inputCount = this._inputCount;
     goto Label_0172;
     Label_001F:
     if (num < result._activationFunctions.Length)
     {
         result._activationFunctions[num] = (IActivationFunction) this._activationFunctions[num].Clone();
         if ((((uint) num) & 0) == 0)
         {
             if (-2 == 0)
             {
                 goto Label_0133;
             }
             num++;
             goto Label_001F;
         }
         if ((((uint) num) | 4) != 0)
         {
             goto Label_0172;
         }
         goto Label_0122;
     }
     result._beginTraining = this._beginTraining;
     result._endTraining = this._endTraining;
     return;
     Label_00BF:
     result._weights = this._weights;
     result._activationFunctions = new IActivationFunction[this._activationFunctions.Length];
     num = 0;
     goto Label_001F;
     Label_0122:
     result._layerIndex = EngineArray.ArrayCopy(this._layerIndex);
     Label_0133:
     result._layerOutput = EngineArray.ArrayCopy(this._layerOutput);
     result._layerSums = EngineArray.ArrayCopy(this._layerSums);
     result._layerFeedCounts = EngineArray.ArrayCopy(this._layerFeedCounts);
     result._contextTargetOffset = EngineArray.ArrayCopy(this._contextTargetOffset);
     result._contextTargetSize = EngineArray.ArrayCopy(this._contextTargetSize);
     result._layerContextCount = EngineArray.ArrayCopy(this._layerContextCount);
     result._biasActivation = EngineArray.ArrayCopy(this._biasActivation);
     result._outputCount = this._outputCount;
     result._weightIndex = this._weightIndex;
     if (3 == 0)
     {
         goto Label_001F;
     }
     goto Label_00BF;
     Label_0172:
     result._layerCounts = EngineArray.ArrayCopy(this._layerCounts);
     if ((((uint) num) - ((uint) num)) > uint.MaxValue)
     {
         goto Label_00BF;
     }
     goto Label_0122;
 }
Exemplo n.º 26
0
        /// <summary>
        /// Construct the chain rule worker. 
        /// </summary>
        /// <param name="theNetwork">The network to calculate a Hessian for.</param>
        /// <param name="theTraining">The training data.</param>
        /// <param name="theLow">The low range.</param>
        /// <param name="theHigh">The high range.</param>
        public ChainRuleWorker(FlatNetwork theNetwork, IMLDataSet theTraining, int theLow, int theHigh)
        {
            int weightCount = theNetwork.Weights.Length;

            _training = theTraining;
            _flat = theNetwork;

            _layerDelta = new double[_flat.LayerOutput.Length];
            _actual = new double[_flat.OutputCount];
            _derivative = new double[weightCount];
            _totDeriv = new double[weightCount];
            _gradients = new double[weightCount];

            _weights = _flat.Weights;
            _layerIndex = _flat.LayerIndex;
            _layerCounts = _flat.LayerCounts;
            _weightIndex = _flat.WeightIndex;
            _layerOutput = _flat.LayerOutput;
            _layerSums = _flat.LayerSums;
            _layerFeedCounts = _flat.LayerFeedCounts;
            _low = theLow;
            _high = theHigh;
        }
Exemplo n.º 27
0
 /// <summary>
 /// Copy the weights and output from the network.
 /// </summary>
 ///
 /// <param name="source">The network to copy from.</param>
 public void CopyFromNetwork(FlatNetwork source)
 {
     EngineArray.ArrayCopy(source.Weights, _weights);
     EngineArray.ArrayCopy(source.LayerOutput, _output);
 }
        /// <summary>
        /// Read an object.
        /// </summary>
        ///
        public Object Read(Stream mask0)
        {
            var result = new BasicNetwork();
            var flat = new FlatNetwork();
            var ins0 = new EncogReadHelper(mask0);
            EncogFileSection section;

            while ((section = ins0.ReadNextSection()) != null)
            {
                if (section.SectionName.Equals("BASIC")
                    && section.SubSectionName.Equals("PARAMS"))
                {
                    IDictionary<String, String> paras = section.ParseParams();
                    EngineArray.PutAll(paras, result.Properties);
                }
                if (section.SectionName.Equals("BASIC")
                    && section.SubSectionName.Equals("NETWORK"))
                {
                    IDictionary<String, String> p = section.ParseParams();

                    flat.BeginTraining = EncogFileSection.ParseInt(p,
                                                                   BasicNetwork.TagBeginTraining);
                    flat.ConnectionLimit = EncogFileSection.ParseDouble(p,
                                                                        BasicNetwork.TagConnectionLimit);
                    flat.ContextTargetOffset = EncogFileSection.ParseIntArray(
                        p, BasicNetwork.TagContextTargetOffset);
                    flat.ContextTargetSize = EncogFileSection.ParseIntArray(
                        p, BasicNetwork.TagContextTargetSize);
                    flat.EndTraining = EncogFileSection.ParseInt(p,
                                                                 BasicNetwork.TagEndTraining);
                    flat.HasContext = EncogFileSection.ParseBoolean(p,
                                                                    BasicNetwork.TagHasContext);
                    flat.InputCount = EncogFileSection.ParseInt(p,
                                                                PersistConst.InputCount);
                    flat.LayerCounts = EncogFileSection.ParseIntArray(p,
                                                                      BasicNetwork.TagLayerCounts);
                    flat.LayerFeedCounts = EncogFileSection.ParseIntArray(p,
                                                                          BasicNetwork.TagLayerFeedCounts);
                    flat.LayerContextCount = EncogFileSection.ParseIntArray(
                        p, BasicNetwork.TagLayerContextCount);
                    flat.LayerIndex = EncogFileSection.ParseIntArray(p,
                                                                     BasicNetwork.TagLayerIndex);
                    flat.LayerOutput = section.ParseDoubleArray(p,PersistConst.Output);
                    flat.LayerSums = new double[flat.LayerOutput.Length];
                    flat.OutputCount = EncogFileSection.ParseInt(p,
                                                                 PersistConst.OutputCount);
                    flat.WeightIndex = EncogFileSection.ParseIntArray(p,
                                                                      BasicNetwork.TagWeightIndex);
                    flat.Weights = section.ParseDoubleArray(p, PersistConst.Weights);
                    flat.BiasActivation = section.ParseDoubleArray(p, BasicNetwork.TagBiasActivation);
                }
                else if (section.SectionName.Equals("BASIC")
                         && section.SubSectionName.Equals("ACTIVATION"))
                {
                    int index = 0;

                    flat.ActivationFunctions = new IActivationFunction[flat.LayerCounts.Length];


                    foreach (String line  in  section.Lines)
                    {
                        IActivationFunction af;
                        IList<String> cols = EncogFileSection
                            .SplitColumns(line);
                        String name = ReflectionUtil.AfPath
                                      + cols[0];
                        try
                        {
                            af = (IActivationFunction) ReflectionUtil.LoadObject(name);
                        }
                        catch (TypeLoadException e)
                        {
                            throw new PersistError(e);
                        }
                        catch (TargetException e)
                        {
                            throw new PersistError(e);
                        }
                        catch (MemberAccessException e)
                        {
                            throw new PersistError(e);
                        }

                        for (int i = 0; i < af.ParamNames.Length; i++)
                        {
                            af.Params[i] = 
                                        CSVFormat.EgFormat.Parse(cols[i + 1]);
                        }

                        flat.ActivationFunctions[index++] = af;
                    }
                }
            }

            result.Structure.Flat = flat;

            return result;
        }
Exemplo n.º 29
0
 public virtual object Clone()
 {
     FlatNetwork result = new FlatNetwork();
     this.CloneFlatNetwork(result);
     return result;
 }
Exemplo n.º 30
0
 /// <summary>
 /// Clone the network.
 /// </summary>
 ///
 /// <returns>A clone of the network.</returns>
 public virtual Object Clone()
 {
     var result = new FlatNetwork();
     CloneFlatNetwork(result);
     return result;
 }
        /// <summary>
        ///     Construct the chain rule worker.
        /// </summary>
        /// <param name="theNetwork">The network to calculate a Hessian for.</param>
        /// <param name="theTraining">The training data.</param>
        /// <param name="theLow">The low range.</param>
        /// <param name="theHigh">The high range.</param>
        public ChainRuleWorker(FlatNetwork theNetwork, IMLDataSet theTraining, int theLow, int theHigh)
        {
            _weightCount = theNetwork.Weights.Length;
            _hessian = EngineArray.AllocateDouble2D(_weightCount,_weightCount);

            _training = theTraining;
            _flat = theNetwork;

            _layerDelta = new double[_flat.LayerOutput.Length];
            _actual = new double[_flat.OutputCount];
            _totDeriv = new double[_weightCount];
            _gradients = new double[_weightCount];

            _weights = _flat.Weights;
            _layerIndex = _flat.LayerIndex;
            _layerCounts = _flat.LayerCounts;
            _weightIndex = _flat.WeightIndex;
            _layerOutput = _flat.LayerOutput;
            _layerSums = _flat.LayerSums;
            _layerFeedCounts = _flat.LayerFeedCounts;
            _low = theLow;
            _high = theHigh;
        }
Exemplo n.º 32
0
        /// <summary>
        /// Construct a gradient worker.
        /// </summary>
        ///
        /// <param name="theNetwork">The network to train.</param>
        /// <param name="theOwner">The owner that is doing the training.</param>
        /// <param name="theTraining">The training data.</param>
        /// <param name="theLow">The low index to use in the training data.</param>
        /// <param name="theHigh">The high index to use in the training data.</param>
        /// <param name="theFlatSpots">Holds an array of flat spot constants.</param>
        public GradientWorker(FlatNetwork theNetwork,
                                 Propagation theOwner, IMLDataSet theTraining,
                                 int theLow, int theHigh, double[] theFlatSpots, IErrorFunction ef)
        {
            _errorCalculation = new ErrorCalculation();
            _network = theNetwork;
            _training = theTraining;
            _low = theLow;
            _high = theHigh;
            _owner = theOwner;
            _flatSpot = theFlatSpots;

            _layerDelta = new double[_network.LayerOutput.Length];
            _gradients = new double[_network.Weights.Length];
            _actual = new double[_network.OutputCount];

            _weights = _network.Weights;
            _layerIndex = _network.LayerIndex;
            _layerCounts = _network.LayerCounts;
            _weightIndex = _network.WeightIndex;
            _layerOutput = _network.LayerOutput;
            _layerSums = _network.LayerSums;
            _layerFeedCounts = _network.LayerFeedCounts;
            _ef = ef;

            _pair = BasicMLDataPair.CreatePair(_network.InputCount,
                                              _network.OutputCount);
        }