Esempio n. 1
0
        internal Training(NeuralNetwork network, BufferAllocator allocator, TrainingMode mode)
            : base(network, DetermineIterationRepeat(network), allocator)
        {
            Contract.Requires(network != null);
            Contract.Requires(allocator != null);

            Mode = mode;

            if (Mode == TrainingMode.Streamed && (GCAlgo != GradientComputingAlgorithm.RTLR || GCAlgo != GradientComputingAlgorithm.None))
            {
                throw new InvalidOperationException("Only RTLR allowed for Streamed training. You have to use Recurrent NN with RTLR Algorithm in RecurrentOptions.");
            }

            if ((network.StructuralElementFlags & NNStructuralElement.GradientInformation) != 0)
            {
                if (network.IsRecurrent)
                {
                    GCAlgo = network.RecurrentOptions.Algorithm == RLAlgorithm.BPTT ? GradientComputingAlgorithm.BPTT : GradientComputingAlgorithm.RTLR;
                }
                else
                {
                    GCAlgo = GradientComputingAlgorithm.BP;
                }
            }
            else
            {
                GCAlgo = GradientComputingAlgorithm.None;
            }

            if (GCAlgo == GradientComputingAlgorithm.BPTT)
            {
                savedErrorVectors = new ErrorVectorStack(network, allocator);
            }
        }
        protected override void Build(BufferAllocator allocator, ConnectedLayerGroups connectedLayerGroups, NNInitParameters initPars)
        {
            InitializeInputAndOutput(connectedLayerGroups);

            BuildForwardComputation(allocator, connectedLayerGroups, (ManagedNNInitParameters)initPars);
            if (IsBackwardEnabled) BuildBackwardComputation(connectedLayerGroups);
        }
Esempio n. 3
0
        internal void InitializeAlgo(BufferAllocator allocator, LearningRule rule, ConnectedLayer[] connectedLayers, ManagedNNInitParameters initPars)
        {
            Contract.Requires(rule != null);
            Contract.Requires(connectedLayers != null);
            Contract.Requires(connectedLayers.Length > 0);
            Contract.Requires(initPars != null);

            Rule = rule;
            ConnectedLayers = connectedLayers;
            RunParallel = initPars.RunParallel;
            Ininitalize(allocator);
        }
 private void BuildForwardComputation(BufferAllocator allocator, ConnectedLayerGroups connectedLayerGroups, ManagedNNInitParameters initPars)
 {
     forwardComputeGroups = new LayerForwardCompute[connectedLayerGroups.Groups.Count][];
     for (int groupIndex = 0; groupIndex < connectedLayerGroups.Groups.Count; groupIndex++)
     {
         var group = connectedLayerGroups.Groups[groupIndex];
         forwardComputeGroups[groupIndex] = new LayerForwardCompute[group.Count];
         for (int layerIndex = 0; layerIndex < group.Count; layerIndex++)
         {
             forwardComputeGroups[groupIndex][layerIndex] = CreateLayerForwardCompute(group[layerIndex], initPars);
         }
     }
 }
Esempio n. 5
0
        internal ErrorVectorStack(NeuralNetwork network, BufferAllocator allocator)
        {
            Contract.Requires(network != null);
            Contract.Requires(allocator != null);

            this.network = network;
            int maxSize = network.RecurrentOptions.MaxIterations;
            int errorVectorLength = network.OutputInterfaceLength;

            errorVectors = new IntRange[maxSize];
            for (int idx = 0; idx < maxSize; idx++)
            {
                errorVectors[idx] = allocator.Alloc(errorVectorLength);
            }
        }
        public WeightRelatedValues(BufferAllocator allocator, ConnectedLayer[] layers)
        {
            Contract.Requires(allocator != null);
            Contract.Requires(layers != null);
            Contract.Requires(layers.Length > 0);

            buffers = new IntRange[layers.Length][];
            for (int layerIndex = 0; layerIndex < layers.Length; layerIndex++)
            {
                var wib = layers[layerIndex].WeightedInputBuffers;

                buffers[layerIndex] = new IntRange[wib.Length];
                for (int buffIndex = 0; buffIndex < wib.Length; buffIndex++)
                {
                    buffers[layerIndex][buffIndex] = allocator.Alloc(wib[buffIndex].WeightBuffer.Size);
                }
            }
            this.layers = layers;
        }
Esempio n. 7
0
 internal Validation(NeuralNetwork network, BufferAllocator allocator)
     : base(network, 1, allocator)
 {
     Contract.Requires(network != null);
     Contract.Requires(allocator != null);
 }
Esempio n. 8
0
 internal StreamedTraining(NeuralNetwork network, BufferAllocator allocator) :
     base(network, allocator, TrainingMode.Streamed)
 {
     Contract.Requires(network != null);
     Contract.Requires(allocator != null);
 }
Esempio n. 9
0
 protected virtual void Ininitalize(BufferAllocator allocator) { }
Esempio n. 10
0
        protected override void InitializeLearningAlgorithms(BufferAllocator allocator, LearningLayerGroups learningLayerGroups, NNInitParameters initPars)
        {
            algorithms = new LearningAlgorithm[learningLayerGroups.Count];
            var biAlgos = new LinkedList<LearningAlgorithm>();
            var aeAlgos = new LinkedList<LearningAlgorithm>();

            int idx = 0;
            foreach (var group in learningLayerGroups)
            {
                var algo = CreateAlgorithmForRule(group.Rule);
                algo.InitializeAlgo(allocator, group.Rule, group.ConnectedLayers.ToArray(), (ManagedNNInitParameters)initPars);
                algorithms[idx++] = algo;
                if (algo.Rule.IsBeforeIterationRule) biAlgos.AddLast(algo);
                if (algo.Rule.IsErrorBasedRule) aeAlgos.AddLast(algo);
            }

            beforeIterationAlgorithms = biAlgos.ToArray();
            errorBasedAlgorithms = aeAlgos.ToArray();
        }
Esempio n. 11
0
        protected override void Built(BufferAllocator allocator, ConnectedLayerGroups connectedLayerGroups, NNInitParameters initPars)
        {
            // Store buff size:
            allocatedBuffSize = allocator.Size;

            // RTLR:
            if ((StructuralElementFlags & NNStructuralElement.RTLRInformation) != 0)
            {
                pValProp = new PValuePropagator(connectedLayerGroups.IndexTable, forwardComputeGroups.SelectMany(g => g));
            }
        }