public static NeuralNetwork<Matrix> Create(IDataSet<Matrix, Vector> dataSet) { var count = 5; var a = new ISingleLayer<Matrix, Matrix>[count]; for (var i = 0; i < count; ++i) a[i] = new MatrixConvolutor(28, 28, 24, 24, new Tanh()); var b = new ISingleLayer<Matrix, Matrix>[count]; for (var i = 0; i < count; ++i) b[i] = new MatrixSubsampler(24, 24, 12, 12, new Tanh()); var c = new ISingleLayer<Matrix, Matrix>[count]; for (var i = 0; i < count; ++i) c[i] = new MatrixConvolutor(12, 12, 8, 8, new Tanh()); var d = new ISingleLayer<Matrix, Matrix>[count]; for (var i = 0; i < count; ++i) d[i] = new MatrixSubsampler(8, 8, 4, 4, new Tanh()); var splitter = new Splitter<Matrix, Matrix>(a); var applicator1 = new Applicator<Matrix, Matrix>(b); var applicator2 = new Applicator<Matrix, Matrix>(c); var merger = new MatrixMerger<Matrix>(d); var classif = new FullyConnectedLayer(16 * count, 10, new Tanh()); var comp = CompositeLayer<Vector, Vector[], Vector>.Compose(splitter, applicator1, applicator2, merger, classif); return new NeuralNetwork<Matrix>(comp); }
public NeuralGenTrainTask() { ISingleLayer <double>[] layers = new ISingleLayer <double> [2]; layers[0] = new SingleLayer(6, 6, new Neuro.MLP.ActivateFunction.BipolarTreshhold(), new Random()); layers[1] = new SingleLayer(6, 1, new Neuro.MLP.ActivateFunction.BipolarTreshhold(), new Random()); MultiLayer mLayer = new MultiLayer(layers); DifferintiableLearningConfig config = new DifferintiableLearningConfig(new Neuro.MLP.ErrorFunction.HalfEuclid()); config.Step = 0.1; config.OneImageMinError = 0.01; config.MinError = 0.5; config.MinChangeError = 0.0000001; config.UseRandomShuffle = true; config.MaxEpoch = 10000; SimpleBackPropogation learn = new SimpleBackPropogation(config); network = new MultiLayerNeuralNetwork(mLayer, learn); }
public static NeuralNetwork<Vector> Create(IDataSet<Vector, Vector> dataSet, IActivator activator, List<int> hiddenSizes) { if (hiddenSizes.Count == 0) return Create(dataSet, activator); var inputSize = dataSet.FirstInput.Size; var outputSize = dataSet.FirstOutput.Size; var sizes = new List<int>{inputSize}; sizes.AddRange(hiddenSizes); sizes.Add(outputSize); var layerCount = sizes.Count - 1; var layers = new ISingleLayer<Vector, Vector>[layerCount]; for (var i = 0; i < layerCount; ++i) layers[i] = new FullyConnectedLayer(sizes[i], sizes[i + 1], activator); var compositeLayer = LayerCompositor.ComposeGeteroneneous(layers); return new NeuralNetwork<Vector>(compositeLayer); }
public static NeuralNetwork<Matrix> CreateSemi(IDataSet<Matrix, Vector> dataSet) { var count = 5; var a = new ISingleLayer<Matrix, Matrix>[count]; for (var i = 0; i < count; ++i) a[i] = new MatrixConvolutor(28, 28, 24, 24, new Tanh()); var b = new ISingleLayer<Matrix, Matrix>[count]; for (var i = 0; i < count; ++i) b[i] = new MatrixSubsampler(24, 24, 12, 12, new Tanh()); var splitter = new Splitter<Matrix, Matrix>(a); var merger = new MatrixMerger<Matrix>(b); var classif = new FullyConnectedLayer(144 * count, 50, new Tanh()); var classif2 = new FullyConnectedLayer(100, 10, new Tanh()); var comp = CompositeLayer<Vector, Vector[], Vector>.Compose(splitter, merger, classif, classif2); return new NeuralNetwork<Matrix>(comp); }
public static NeuralNetwork<Matrix> CreateNorb(IDataSet<Matrix, Vector> dataSet) { var count = 12; var branch = 5; var a = new ISingleLayer<Matrix, Matrix>[count]; for (var i = 0; i < count; ++i) a[i] = new MatrixConvolutor(96, 96, 92, 92, new Tanh()); var b = new ISingleLayer<Matrix, Matrix>[count]; for (var i = 0; i < count; ++i) b[i] = new MatrixSubsampler(92, 92, 46, 46, new Tanh()); var c = new ISingleLayer<Matrix, Matrix>[count]; for (var i = 0; i < count; ++i) c[i] = new MatrixSubsampler(46, 46, 23, 23, new Tanh()); var splitter = new Splitter<Matrix, Matrix>(a); var applicator1 = new Applicator<Matrix, Matrix>(b); var merger = new MatrixMerger<Matrix>(c); var classif = new FullyConnectedLayer(23 * 23 * count, 5, new Tanh()); var comp = CompositeLayer<Vector, Vector[], Vector>.Compose(splitter, applicator1, merger, classif ); return new NeuralNetwork<Matrix>(comp); }