/// <summary> /// Build the synapse and layer structure. This method should be called after /// you are done adding layers to a network, or change the network's logic /// property. /// </summary> /// public void FinalizeStructure() { if (_layers.Count < 2) { throw new NeuralNetworkError( "There must be at least two layers before the structure is finalized."); } var flatLayers = new FlatLayer[_layers.Count]; for (int i = 0; i < _layers.Count; i++) { var layer = (BasicLayer)_layers[i]; if (layer.Activation == null) { layer.Activation = new ActivationLinear(); } flatLayers[i] = layer; } _flat = new FlatNetwork(flatLayers); FinalizeLimit(); _layers.Clear(); EnforceLimit(); }
/// <inheritdoc /> public string Part2() { int imageSize = 25 * 6; //List<Layer> layers = new List<Layer>(ints.Count / imageSize); List <FlatLayer> layers = new List <FlatLayer>(ints.Count / imageSize); for (int i = 0; i < ints.Count / imageSize; i++) { //layers.Add(new Layer(25, ints.Skip(i * imageSize).Take(imageSize).ToList())); layers.Add(new FlatLayer(ints.Skip(i * imageSize).Take(imageSize).ToList())); } FlatLayer output = new FlatLayer(new List <int>()); for (int i = 0; i < imageSize; i++) { for (int j = 0; j < layers.Count; j++) { if (layers[j].Pixels[i] < 2) { output.Pixels.Add(layers[j].Pixels[i]); break; } } // transparent all the way down if (output.Pixels.Count < i) { output.Pixels.Add(2); } } for (int i = 0; i < 6; i++) { for (int j = 0; j < 25; j++) { switch (output.Pixels[i * 25 + j]) { case 0: Console.BackgroundColor = ConsoleColor.Black; Console.ForegroundColor = ConsoleColor.White; Console.Write(output.Pixels[i * 25 + j]); Console.ResetColor(); break; case 1: Console.BackgroundColor = ConsoleColor.White; Console.ForegroundColor = ConsoleColor.Black; Console.Write(output.Pixels[i * 25 + j]); Console.ResetColor(); break; } } Console.Write("\n"); } return($""); }
public static FlatNetwork ToFlatNetwork(this MatrixNetwork @this) { var net = new FlatNetwork(); foreach (var layer in @this.Layers) { var flatLayer = new FlatLayer(); for (var i = 0; i < layer.Weights.RowCount; i++) { var el = new { w = layer.Weights.Row(i), b = layer.Biases[i] }; var flatNeuron = new FlatNeuron(); flatNeuron.Weights.AddRange(el.w); flatNeuron.Bias = el.b; flatLayer.Neurons.Add(flatNeuron); } net.Layers.Add(flatLayer); } return(net); }
/// <summary> /// Create the flat neural network. /// </summary> public void Flatten() { bool isRBF = false; IDictionary <ILayer, FlatLayer> regular2flat = new Dictionary <ILayer, FlatLayer>(); IDictionary <FlatLayer, ILayer> flat2regular = new Dictionary <FlatLayer, ILayer>(); IList <ObjectPair <ILayer, ILayer> > contexts = new List <ObjectPair <ILayer, ILayer> >(); this.flat = null; ValidateForFlat val = new ValidateForFlat(); if (val.IsValid(this.network) == null) { if (this.layers.Count == 3 && this.layers[1] is RadialBasisFunctionLayer) { RadialBasisFunctionLayer rbf = (RadialBasisFunctionLayer)this.layers[1]; this.flat = new FlatNetworkRBF(this.network.InputCount, rbf.NeuronCount, this.network.OutputCount, rbf.RadialBasisFunction); FlattenWeights(); this.flatUpdate = FlatUpdateNeeded.None; return; } int flatLayerCount = CountNonContext(); FlatLayer[] flatLayers = new FlatLayer[flatLayerCount]; int index = flatLayers.Length - 1; foreach (ILayer layer in this.layers) { if (layer is ContextLayer) { ISynapse inboundSynapse = network.Structure .FindPreviousSynapseByLayerType(layer, typeof(BasicLayer)); ISynapse outboundSynapse = network .Structure .FindNextSynapseByLayerType(layer, typeof(BasicLayer)); if (inboundSynapse == null) { throw new NeuralNetworkError( "Context layer must be connected to by one BasicLayer."); } if (outboundSynapse == null) { throw new NeuralNetworkError( "Context layer must connect to by one BasicLayer."); } ILayer inbound = inboundSynapse.FromLayer; ILayer outbound = outboundSynapse.ToLayer; contexts .Add(new ObjectPair <ILayer, ILayer>(inbound, outbound)); } else { double bias = this.FindNextBias(layer); IActivationFunction activationType; double[] param = new double[1]; if (layer.ActivationFunction == null) { activationType = new ActivationLinear(); param = new double[1]; param[0] = 1; } else { activationType = layer.ActivationFunction; param = layer.ActivationFunction.Params; } FlatLayer flatLayer = new FlatLayer(activationType, layer .NeuronCount, bias, param); regular2flat[layer] = flatLayer; flat2regular[flatLayer] = layer; flatLayers[index--] = flatLayer; } } // now link up the context layers foreach (ObjectPair <ILayer, ILayer> context in contexts) { // link the context layer on the FlatLayer ILayer layer = context.B; ISynapse synapse = this.network .Structure .FindPreviousSynapseByLayerType(layer, typeof(BasicLayer)); FlatLayer from = regular2flat[context.A]; FlatLayer to = regular2flat[synapse.FromLayer]; to.ContextFedBy = from; } this.flat = new FlatNetwork(flatLayers); // update the context indexes on the non-flat network for (int i = 0; i < flatLayerCount; i++) { FlatLayer fedBy = flatLayers[i].ContextFedBy; if (fedBy != null) { ILayer fedBy2 = flat2regular[flatLayers[i + 1]]; ISynapse synapse = FindPreviousSynapseByLayerType(fedBy2, typeof(ContextLayer)); if (synapse == null) { throw new NeuralNetworkError("Can't find parent synapse to context layer."); } ContextLayer context = (ContextLayer)synapse.FromLayer; // find fedby index int fedByIndex = -1; for (int j = 0; j < flatLayerCount; j++) { if (flatLayers[j] == fedBy) { fedByIndex = j; break; } } if (fedByIndex == -1) { throw new NeuralNetworkError("Can't find layer feeding context."); } context.FlatContextIndex = this.flat.ContextTargetOffset[fedByIndex]; } } // RBF networks will not train every layer if (isRBF) { this.flat.EndTraining = flatLayers.Length - 1; } FlattenWeights(); if (this.IsConnectionLimited) { } this.flatUpdate = FlatUpdateNeeded.None; } else { this.flatUpdate = FlatUpdateNeeded.Never; } }
public examML() { layer1 = new FlatLayer(2); layer2 = new NodeLayer(2); }