Used to configure a flat layer. Flat layers are not kept by a Flat Network, beyond setup.
Ejemplo n.º 1
0
 /// <summary>
 /// Construct a flat layer.
 /// </summary>
 ///
 /// <param name="activation">The activation function.</param>
 /// <param name="count">The neuron count.</param>
 /// <param name="biasActivation">The bias activation.</param>
 /// <param name="paras">The parameters.</param>
 public FlatLayer(IActivationFunction activation, int count,
                  double biasActivation, double[] paras)
 {
     this.activation     = activation;
     this.count          = count;
     this.biasActivation = biasActivation;
     this.contextFedBy   = null;
 }
Ejemplo n.º 2
0
 /// <summary>
 /// Construct a flat layer.
 /// </summary>
 ///
 /// <param name="activation">The activation function.</param>
 /// <param name="count">The neuron count.</param>
 /// <param name="biasActivation">The bias activation.</param>
 /// <param name="paras">The parameters.</param>
 public FlatLayer(IActivationFunction activation, int count,
         double biasActivation, double[] paras)
 {
     this.activation = activation;
     this.count = count;
     this.biasActivation = biasActivation;
     this.contextFedBy = null;
 }
Ejemplo n.º 3
0
        /// <summary>
        /// Construct a flat neural network.
        /// </summary>
        ///
        /// <param name="input">Neurons in the input layer.</param>
        /// <param name="hidden1">Neurons in the first hidden layer. Zero for no first hiddenlayer.</param>
        /// <param name="hidden2">Neurons in the second hidden layer. Zero for no second hiddenlayer.</param>
        /// <param name="output">Neurons in the output layer.</param>
        /// <param name="tanh">True if this is a tanh activation, false for sigmoid.</param>
        public FlatNetwork(int input, int hidden1, int hidden2,
                           int output, bool tanh)
        {
            double[]            paras = new double[1];
            FlatLayer[]         layers;
            IActivationFunction act = (tanh) ? (IActivationFunction)(new ActivationTANH())
                    : (IActivationFunction)(new ActivationSigmoid());

            paras[0] = 1; // slope

            if ((hidden1 == 0) && (hidden2 == 0))
            {
                layers    = new FlatLayer[2];
                layers[0] = new FlatLayer(act, input,
                                          FlatNetwork.DEFAULT_BIAS_ACTIVATION, paras);
                layers[1] = new FlatLayer(act, output,
                                          FlatNetwork.NO_BIAS_ACTIVATION, paras);
            }
            else if ((hidden1 == 0) || (hidden2 == 0))
            {
                int count = Math.Max(hidden1, hidden2);
                layers    = new FlatLayer[3];
                layers[0] = new FlatLayer(act, input,
                                          FlatNetwork.DEFAULT_BIAS_ACTIVATION, paras);
                layers[1] = new FlatLayer(act, count,
                                          FlatNetwork.DEFAULT_BIAS_ACTIVATION, paras);
                layers[2] = new FlatLayer(act, output,
                                          FlatNetwork.NO_BIAS_ACTIVATION, paras);
            }
            else
            {
                layers    = new FlatLayer[4];
                layers[0] = new FlatLayer(act, input,
                                          FlatNetwork.DEFAULT_BIAS_ACTIVATION, paras);
                layers[1] = new FlatLayer(act, hidden1,
                                          FlatNetwork.DEFAULT_BIAS_ACTIVATION, paras);
                layers[2] = new FlatLayer(act, hidden2,
                                          FlatNetwork.DEFAULT_BIAS_ACTIVATION, paras);
                layers[3] = new FlatLayer(act, output,
                                          FlatNetwork.NO_BIAS_ACTIVATION, paras);
            }

            this.isLimited       = false;
            this.connectionLimit = 0.0d;

            Init(layers);
        }
Ejemplo n.º 4
0
        /// <summary>
        /// Construct an RBF flat network. 
        /// </summary>
        /// <param name="inputCount">The number of input neurons. (also the number of dimensions)</param>
        /// <param name="hiddenCount">The number of hidden neurons.</param>
        /// <param name="outputCount">The number of output neurons.</param>
        /// <param name="rbf">The RBF's to use.</param>
        public FlatNetworkRBF(int inputCount, int hiddenCount,
                 int outputCount, IRadialBasisFunction[] rbf)
        {
            this.rbf = rbf;
            FlatLayer[] layers = new FlatLayer[3];

            double[] slope = new double[1];
            slope[0] = 1.0;

            layers[0] = new FlatLayer(new ActivationLinear(), inputCount, 0.0,
                    slope);
            layers[1] = new FlatLayer(new ActivationLinear(), hiddenCount, 0.0,
                    slope);
            layers[2] = new FlatLayer(new ActivationLinear(), outputCount, 0.0,
                    slope);

            Init(layers);
        }
Ejemplo n.º 5
0
        /// <summary>
        /// Construct an RBF flat network.
        /// </summary>
        /// <param name="inputCount">The number of input neurons. (also the number of dimensions)</param>
        /// <param name="hiddenCount">The number of hidden neurons.</param>
        /// <param name="outputCount">The number of output neurons.</param>
        /// <param name="rbf">The RBF's to use.</param>
        public FlatNetworkRBF(int inputCount, int hiddenCount,
                              int outputCount, IRadialBasisFunction[] rbf)
        {
            this.rbf = rbf;
            FlatLayer[] layers = new FlatLayer[3];

            double[] slope = new double[1];
            slope[0] = 1.0;

            layers[0] = new FlatLayer(new ActivationLinear(), inputCount, 0.0,
                                      slope);
            layers[1] = new FlatLayer(new ActivationLinear(), hiddenCount, 0.0,
                                      slope);
            layers[2] = new FlatLayer(new ActivationLinear(), outputCount, 0.0,
                                      slope);

            Init(layers);
        }
Ejemplo n.º 6
0
        /// <summary>
        /// Construct a flat network.
        /// </summary>
        ///
        /// <param name="layers">The layers of the network to create.</param>
        public void Init(FlatLayer[] layers)
        {
            int layerCount = layers.Length;

            this.inputCount = layers[0].Count;
            this.outputCount = layers[layerCount - 1].Count;

            this.layerCounts = new int[layerCount];
            this.layerContextCount = new int[layerCount];
            this.weightIndex = new int[layerCount];
            this.layerIndex = new int[layerCount];
            this.activationFunctions = new IActivationFunction[layerCount];
            this.layerFeedCounts = new int[layerCount];
            this.contextTargetOffset = new int[layerCount];
            this.contextTargetSize = new int[layerCount];
            this.biasActivation = new double[layerCount];

            int index = 0;
            int neuronCount = 0;
            int weightCount = 0;

            for (int i = layers.Length - 1; i >= 0; i--)
            {

                FlatLayer layer = layers[i];
                FlatLayer nextLayer = null;

                if (i > 0)
                {
                    nextLayer = layers[i - 1];
                }

                this.biasActivation[index] = layer.BiasActivation;
                this.layerCounts[index] = layer.TotalCount;
                this.layerFeedCounts[index] = layer.Count;
                this.layerContextCount[index] = layer.ContectCount;
                this.activationFunctions[index] = layer.Activation;

                neuronCount += layer.TotalCount;

                if (nextLayer != null)
                {
                    weightCount += layer.Count * nextLayer.TotalCount;
                }

                if (index == 0)
                {
                    this.weightIndex[index] = 0;
                    this.layerIndex[index] = 0;
                }
                else
                {
                    this.weightIndex[index] = this.weightIndex[index - 1]
                            + (this.layerCounts[index] * this.layerFeedCounts[index - 1]);
                    this.layerIndex[index] = this.layerIndex[index - 1]
                            + this.layerCounts[index - 1];
                }

                int neuronIndex = 0;
                for (int j = layers.Length - 1; j >= 0; j--)
                {
                    if (layers[j].ContextFedBy == layer)
                    {
                        this.contextTargetSize[i] = layers[j].ContectCount;
                        this.contextTargetOffset[i] = neuronIndex
                                + layers[j].TotalCount
                                - layers[j].ContectCount;
                    }
                    neuronIndex += layers[j].TotalCount;
                }

                index++;
            }

            this.beginTraining = 0;
            this.endTraining = this.layerCounts.Length - 1;

            this.weights = new double[weightCount];
            this.layerOutput = new double[neuronCount];

            ClearContext();
        }
Ejemplo n.º 7
0
        /// <summary>
        /// Construct a flat neural network.
        /// </summary>
        ///
        /// <param name="input">Neurons in the input layer.</param>
        /// <param name="hidden1">Neurons in the first hidden layer. Zero for no first hiddenlayer.</param>
        /// <param name="hidden2">Neurons in the second hidden layer. Zero for no second hiddenlayer.</param>
        /// <param name="output">Neurons in the output layer.</param>
        /// <param name="tanh">True if this is a tanh activation, false for sigmoid.</param>
        public FlatNetwork(int input, int hidden1, int hidden2,
                int output, bool tanh)
        {
            double[] paras = new double[1];
            FlatLayer[] layers;
            IActivationFunction act = (tanh) ? (IActivationFunction)(new ActivationTANH())
                    : (IActivationFunction)(new ActivationSigmoid());
            paras[0] = 1; // slope

            if ((hidden1 == 0) && (hidden2 == 0))
            {
                layers = new FlatLayer[2];
                layers[0] = new FlatLayer(act, input,
                        FlatNetwork.DEFAULT_BIAS_ACTIVATION, paras);
                layers[1] = new FlatLayer(act, output,
                        FlatNetwork.NO_BIAS_ACTIVATION, paras);
            }
            else if ((hidden1 == 0) || (hidden2 == 0))
            {
                int count = Math.Max(hidden1, hidden2);
                layers = new FlatLayer[3];
                layers[0] = new FlatLayer(act, input,
                        FlatNetwork.DEFAULT_BIAS_ACTIVATION, paras);
                layers[1] = new FlatLayer(act, count,
                        FlatNetwork.DEFAULT_BIAS_ACTIVATION, paras);
                layers[2] = new FlatLayer(act, output,
                        FlatNetwork.NO_BIAS_ACTIVATION, paras);
            }
            else
            {
                layers = new FlatLayer[4];
                layers[0] = new FlatLayer(act, input,
                        FlatNetwork.DEFAULT_BIAS_ACTIVATION, paras);
                layers[1] = new FlatLayer(act, hidden1,
                        FlatNetwork.DEFAULT_BIAS_ACTIVATION, paras);
                layers[2] = new FlatLayer(act, hidden2,
                        FlatNetwork.DEFAULT_BIAS_ACTIVATION, paras);
                layers[3] = new FlatLayer(act, output,
                        FlatNetwork.NO_BIAS_ACTIVATION, paras);
            }

            this.isLimited = false;
            this.connectionLimit = 0.0d;

            Init(layers);
        }
Ejemplo n.º 8
0
 /// <summary>
 /// Create a flat network from an array of layers.
 /// </summary>
 ///
 /// <param name="layers">The layers.</param>
 public FlatNetwork(FlatLayer[] layers)
 {
     Init(layers);
 }
Ejemplo n.º 9
0
        /// <summary>
        /// Construct a flat network.
        /// </summary>
        ///
        /// <param name="layers">The layers of the network to create.</param>
        public void Init(FlatLayer[] layers)
        {
            int layerCount = layers.Length;

            this.inputCount  = layers[0].Count;
            this.outputCount = layers[layerCount - 1].Count;

            this.layerCounts         = new int[layerCount];
            this.layerContextCount   = new int[layerCount];
            this.weightIndex         = new int[layerCount];
            this.layerIndex          = new int[layerCount];
            this.activationFunctions = new IActivationFunction[layerCount];
            this.layerFeedCounts     = new int[layerCount];
            this.contextTargetOffset = new int[layerCount];
            this.contextTargetSize   = new int[layerCount];
            this.biasActivation      = new double[layerCount];

            int index       = 0;
            int neuronCount = 0;
            int weightCount = 0;

            for (int i = layers.Length - 1; i >= 0; i--)
            {
                FlatLayer layer     = layers[i];
                FlatLayer nextLayer = null;

                if (i > 0)
                {
                    nextLayer = layers[i - 1];
                }

                this.biasActivation[index]      = layer.BiasActivation;
                this.layerCounts[index]         = layer.TotalCount;
                this.layerFeedCounts[index]     = layer.Count;
                this.layerContextCount[index]   = layer.ContectCount;
                this.activationFunctions[index] = layer.Activation;

                neuronCount += layer.TotalCount;

                if (nextLayer != null)
                {
                    weightCount += layer.Count * nextLayer.TotalCount;
                }

                if (index == 0)
                {
                    this.weightIndex[index] = 0;
                    this.layerIndex[index]  = 0;
                }
                else
                {
                    this.weightIndex[index] = this.weightIndex[index - 1]
                                              + (this.layerCounts[index] * this.layerFeedCounts[index - 1]);
                    this.layerIndex[index] = this.layerIndex[index - 1]
                                             + this.layerCounts[index - 1];
                }

                int neuronIndex = 0;
                for (int j = layers.Length - 1; j >= 0; j--)
                {
                    if (layers[j].ContextFedBy == layer)
                    {
                        this.contextTargetSize[i]   = layers[j].ContectCount;
                        this.contextTargetOffset[i] = neuronIndex
                                                      + layers[j].TotalCount
                                                      - layers[j].ContectCount;
                    }
                    neuronIndex += layers[j].TotalCount;
                }

                index++;
            }

            this.beginTraining = 0;
            this.endTraining   = this.layerCounts.Length - 1;

            this.weights     = new double[weightCount];
            this.layerOutput = new double[neuronCount];

            ClearContext();
        }
Ejemplo n.º 10
0
        /// <summary>
        /// Create the flat neural network.
        /// </summary>
        public void Flatten()
        {
            bool isRBF = false;
            IDictionary<ILayer, FlatLayer> regular2flat = new Dictionary<ILayer, FlatLayer>();
            IDictionary<FlatLayer, ILayer> flat2regular = new Dictionary<FlatLayer, ILayer>();            
            IList<ObjectPair<ILayer, ILayer>> contexts = new List<ObjectPair<ILayer, ILayer>>();            
            this.flat = null;

            ValidateForFlat val = new ValidateForFlat();

            if (val.IsValid(this.network) == null)
            {
                if (this.layers.Count == 3
                        && this.layers[1] is RadialBasisFunctionLayer)
                {
                    RadialBasisFunctionLayer rbf = (RadialBasisFunctionLayer)this.layers[1];
                    this.flat = new FlatNetworkRBF(this.network.InputCount,
                            rbf.NeuronCount, this.network.OutputCount,
                            rbf.RadialBasisFunction);
                    FlattenWeights();
                    this.flatUpdate = FlatUpdateNeeded.None;
                    return;
                }

                int flatLayerCount = CountNonContext();
                FlatLayer[] flatLayers = new FlatLayer[flatLayerCount];                

                int index = flatLayers.Length - 1;
                foreach (ILayer layer in this.layers)
                {

                    if (layer is ContextLayer)
                    {
                        ISynapse inboundSynapse = network.Structure
                                .FindPreviousSynapseByLayerType(layer,
                                        typeof(BasicLayer));
                        ISynapse outboundSynapse = network
                                .Structure
                                .FindNextSynapseByLayerType(layer, typeof(BasicLayer));

                        if (inboundSynapse == null)
                            throw new NeuralNetworkError(
                                    "Context layer must be connected to by one BasicLayer.");

                        if (outboundSynapse == null)
                            throw new NeuralNetworkError(
                                    "Context layer must connect to by one BasicLayer.");

                        ILayer inbound = inboundSynapse.FromLayer;
                        ILayer outbound = outboundSynapse.ToLayer;

                        contexts
                                .Add(new ObjectPair<ILayer, ILayer>(inbound, outbound));
                    }
                    else
                    {
                        double bias = this.FindNextBias(layer);

                        IActivationFunction activationType;
                        double[] param = new double[1];

                        if (layer.ActivationFunction == null)
                        {
                            activationType = new ActivationLinear();
                            param = new double[1];
                            param[0] = 1;
                        }
                        else
                        {
                            activationType = layer.ActivationFunction;
                            param = layer.ActivationFunction.Params;
                        }

                        FlatLayer flatLayer = new FlatLayer(activationType, layer
                                .NeuronCount, bias, param);

                        regular2flat[layer] = flatLayer;
                        flat2regular[flatLayer] = layer;
                        flatLayers[index--] = flatLayer;
                    }
                }

                // now link up the context layers
                foreach (ObjectPair<ILayer, ILayer> context in contexts)
                {
                    // link the context layer on the FlatLayer
                    ILayer layer = context.B;
                    ISynapse synapse = this.network
                            .Structure
                            .FindPreviousSynapseByLayerType(layer, typeof(BasicLayer));
                    FlatLayer from = regular2flat[context.A];
                    FlatLayer to = regular2flat[synapse.FromLayer];
                    to.ContextFedBy = from;
                }

                this.flat = new FlatNetwork(flatLayers);

                // update the context indexes on the non-flat network
                for (int i = 0; i < flatLayerCount; i++)
                {
                    FlatLayer fedBy = flatLayers[i].ContextFedBy;
                    if (fedBy != null)
                    {
                        ILayer fedBy2 = flat2regular[flatLayers[i + 1]];
                        ISynapse synapse = FindPreviousSynapseByLayerType(fedBy2, typeof(ContextLayer));
                        if (synapse == null)
                            throw new NeuralNetworkError("Can't find parent synapse to context layer.");
                        ContextLayer context = (ContextLayer)synapse.FromLayer;

                        // find fedby index
                        int fedByIndex = -1;
                        for(int j=0;j<flatLayerCount;j++)
                        {
                            if( flatLayers[j]==fedBy )
                            {
                                fedByIndex = j;
                                break;
                            }
                        }

                        if (fedByIndex == -1)
                            throw new NeuralNetworkError("Can't find layer feeding context.");

                        context.FlatContextIndex = this.flat.ContextTargetOffset[fedByIndex];
                    }
                }

                // RBF networks will not train every layer
                if (isRBF)
                {
                    this.flat.EndTraining = flatLayers.Length - 1;
                }

                FlattenWeights();

                if (this.IsConnectionLimited)
                {

                }

                this.flatUpdate = FlatUpdateNeeded.None;
            }
            else
                this.flatUpdate = FlatUpdateNeeded.Never;
        }