public FuNet1() { try { foreach (UIManager.LookAndFeelInfo info in UIManager.InstalledLookAndFeels) { if ("Nimbus".Equals(info.Name)) { UIManager.LookAndFeel = info.ClassName; break; } } } catch (Exception) { // If Nimbus is not available, you can set the GUI to another look and feel. } try { network = network.load(new FileInputStream("/home/mithquissir/Desktop/cnn/5-50-100/30.nnet")); testSet = MNISTDataSet.createFromFile(MNISTDataSet.TEST_LABEL_NAME, MNISTDataSet.TEST_IMAGE_NAME, 10000); } catch (Exception e) { Console.WriteLine(e.ToString()); Console.Write(e.StackTrace); } initComponents(); }
/// <param name="args"> Command line parameters used to initialize parameters of multi layer neural network optimizer /// [0] - maximal number of epochs during learning /// [1] - learning error stop condition /// [2] - learning rate used during learning process /// [3] - number of validation folds /// [4] - max number of layers in neural network /// [5] - min neuron count per layer /// [6] - max neuron count per layer /// [7] - neuron increment count </param> //JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: public static void main(String[] args) throws java.io.IOException public static void Main(string[] args) { int maxIter = 10000; //Integer.parseInt(args[0]); double maxError = 0.01; // Double.parseDouble(args[1]); double learningRate = 0.2; // Double.parseDouble(args[2]); int validationFolds = Convert.ToInt32(args[3]); int maxLayers = Convert.ToInt32(args[4]); int minNeuronCount = Convert.ToInt32(args[5]); int maxNeuronCount = Convert.ToInt32(args[6]); int neuronIncrement = Convert.ToInt32(args[7]); LOG.info("MLP learning for MNIST started....."); DataSet trainSet = MNISTDataSet.createFromFile(MNISTDataSet.TRAIN_LABEL_NAME, MNISTDataSet.TRAIN_IMAGE_NAME, 60000); DataSet testSet = MNISTDataSet.createFromFile(MNISTDataSet.TEST_LABEL_NAME, MNISTDataSet.TEST_IMAGE_NAME, 10000); BackPropagation bp = new BackPropagation(); bp.MaxIterations = maxIter; bp.MaxError = maxError; bp.LearningRate = learningRate; // commented out due to errors // KFoldCrossValidation errorEstimationMethod = new KFoldCrossValidation(neuralNet, trainSet, validationFolds); // // NeuralNetwork neuralNet = new MultilayerPerceptronOptimazer<>() // .withLearningRule(bp) // .withErrorEstimationMethod(errorEstimationMethod) // .withMaxLayers(maxLayers) // .withMaxNeurons(maxNeuronCount) // .withMinNeurons(minNeuronCount) // .withNeuronIncrement(neuronIncrement) // .createOptimalModel(trainSet); LOG.info("Evaluating model on Test Set....."); // commented out due to errors // Evaluation.runFullEvaluation(neuralNet, testSet); LOG.info("MLP learning for MNIST successfully finished....."); }
/// <param name="args"> Command line parameters used to initialize parameters of convolutional network /// [0] - maximal number of epochs during learning /// [1] - learning error stop condition /// [2] - learning rate used during learning process /// [3] - number of feature maps in 1st convolutional layer /// [4] - number of feature maps in 2nd convolutional layer /// [5] - number of feature maps in 3rd convolutional layer </param> public static void Main(string[] args) { try { int maxIter = 10000; // Integer.parseInt(args[0]); double maxError = 0.01; //Double.parseDouble(args[1]); double learningRate = 0.2; // Double.parseDouble(args[2]); int layer1 = Convert.ToInt32(args[3]); int layer2 = Convert.ToInt32(args[4]); int layer3 = Convert.ToInt32(args[5]); LOG.info("{}-{}-{}", layer1, layer2, layer3); DataSet trainSet = MNISTDataSet.createFromFile(MNISTDataSet.TRAIN_LABEL_NAME, MNISTDataSet.TRAIN_IMAGE_NAME, 100); DataSet testSet = MNISTDataSet.createFromFile(MNISTDataSet.TEST_LABEL_NAME, MNISTDataSet.TEST_IMAGE_NAME, 10000); Dimension2D inputDimension = new Dimension2D(32, 32); Dimension2D convolutionKernel = new Dimension2D(5, 5); Dimension2D poolingKernel = new Dimension2D(2, 2); ConvolutionalNetwork convolutionNetwork = (new ConvolutionalNetwork.Builder()).withInputLayer(32, 32, 1).withConvolutionLayer(5, 5, layer1).withPoolingLayer(2, 2).withConvolutionLayer(5, 5, layer2).withPoolingLayer(2, 2).withConvolutionLayer(5, 5, layer3).withFullConnectedLayer(10).build(); ConvolutionalBackpropagation backPropagation = new ConvolutionalBackpropagation(); backPropagation.LearningRate = learningRate; backPropagation.MaxError = maxError; backPropagation.MaxIterations = maxIter; backPropagation.addListener(new LearningListener(convolutionNetwork, testSet)); backPropagation.ErrorFunction = new MeanSquaredError(); convolutionNetwork.LearningRule = backPropagation; convolutionNetwork.learn(trainSet); Evaluation.runFullEvaluation(convolutionNetwork, testSet); } catch (IOException e) { Console.WriteLine(e.ToString()); Console.Write(e.StackTrace); } }