/// <param name="args"> command line arguments which represent paths to persisted neural network /// [0] - location of neural network </param> //JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: public static void main(String[] args) throws java.io.IOException public static void Main(string[] args) { DataSet testSet = MNISTDataSet.createFromFile(MNISTDataSet.TEST_LABEL_NAME, MNISTDataSet.TEST_IMAGE_NAME, 10000); NeuralNetwork nn = NeuralNetwork.load(new FileInputStream(args[0])); Evaluation.runFullEvaluation(nn, testSet); }
/// <summary> /// Calculates weights for the hopfield net to learn the specified training /// set /// </summary> /// <param name="trainingSet"> /// training set to learn </param> public override void learn(DataSet trainingSet) { int M = trainingSet.size(); int N = neuralNetwork.getLayerAt(0).NeuronsCount; Layer hopfieldLayer = neuralNetwork.getLayerAt(0); for (int i = 0; i < N; i++) { for (int j = 0; j < N; j++) { if (j == i) { continue; } Neuron ni = hopfieldLayer.getNeuronAt(i); Neuron nj = hopfieldLayer.getNeuronAt(j); Connection cij = nj.getConnectionFrom(ni); Connection cji = ni.getConnectionFrom(nj); double w = 0; for (int k = 0; k < M; k++) { DataSetRow trainingSetRow = trainingSet.getRowAt(k); double pki = trainingSetRow.Input[i]; double pkj = trainingSetRow.Input[j]; w = w + pki * pkj; } // k cij.Weight.Value = w; cji.Weight.Value = w; } // j } // i }
/// <param name="args"> the command line arguments </param> public static void Main(string[] args) { NeuralNetwork nnet = NeuralNetwork.createFromFile("MicrNetwork.nnet"); DataSet dataSet = DataSet.load("MicrDataColor.tset"); Evaluation.runFullEvaluation(nnet, dataSet); }
protected internal override void populateInternalDataStructure(DataSet dataSet) { foreach (DataSetRow r in dataSet.Rows) { dataDeque.Enqueue(r); } }
private static DataSet loadDataSet() { DataSet irisDataSet = DataSet.createFromFile(inputFileName, 4, 3, ",", false); irisDataSet.shuffle(); return(irisDataSet); }
/// <param name="args"> command line arguments which represent paths to persisted neural networks /// [0] - location of first neural network /// [1] - location of second neural network </param> //JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: public static void main(String[] args) throws java.io.IOException public static void Main(string[] args) { DataSet testSet = MNISTDataSet.createFromFile(MNISTDataSet.TEST_LABEL_NAME, MNISTDataSet.TEST_IMAGE_NAME, 10000); NeuralNetwork nn1 = NeuralNetwork.load(new FileInputStream(args[0])); NeuralNetwork nn2 = NeuralNetwork.load(new FileInputStream(args[1])); (new McNemarTest()).evaluateNetworks(nn1, nn2, testSet); }
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: public static void main(String[] args) throws java.io.IOException public static void Main(string[] args) { DataSet trainSet = MNISTDataSet.createFromFile(MNISTDataSet.TRAIN_LABEL_NAME, MNISTDataSet.TRAIN_IMAGE_NAME, 200); DataSet testSet = MNISTDataSet.createFromFile(MNISTDataSet.TEST_LABEL_NAME, MNISTDataSet.TEST_IMAGE_NAME, 10000); BackPropagation learningRule = createLearningRule(); NeuralNetwork neuralNet = (new MultilayerPerceptronOptimazer <>()).withLearningRule(learningRule).createOptimalModel(trainSet); Evaluation.runFullEvaluation(neuralNet, testSet); }
public static void Main(string[] args) { DataSet irisDataSet = loadDataSet(); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(4, 15, 3); configureLearningRule(neuralNet); neuralNet.learn(irisDataSet); Evaluation.runFullEvaluation(neuralNet, irisDataSet); }
public static void Main(string[] args) { string inputFileName = "/iris_data.txt"; DataSet irisDataSet = DataSet.createFromFile(inputFileName, 4, 3, ",", false); BackPropagation learningRule = createLearningRule(); NeuralNetwork neuralNet = (new MultilayerPerceptronOptimazer <>()).withLearningRule(learningRule).createOptimalModel(irisDataSet); neuralNet.learn(irisDataSet); Evaluation.runFullEvaluation(neuralNet, irisDataSet); }
private void findMaxAndMinVectors(DataSet dataSet) { int inputSize = dataSet.InputSize; int outputSize = dataSet.OutputSize; maxIn = new double[inputSize]; minIn = new double[inputSize]; for (int i = 0; i < inputSize; i++) { maxIn[i] = double.Epsilon; minIn[i] = double.MaxValue; } maxOut = new double[outputSize]; minOut = new double[outputSize]; for (int i = 0; i < outputSize; i++) { maxOut[i] = double.Epsilon; minOut[i] = double.MaxValue; } foreach (DataSetRow dataSetRow in dataSet.Rows) { double[] input = dataSetRow.Input; for (int i = 0; i < inputSize; i++) { if (input[i] > maxIn[i]) { maxIn[i] = input[i]; } if (input[i] < minIn[i]) { minIn[i] = input[i]; } } double[] output = dataSetRow.DesiredOutput; for (int i = 0; i < outputSize; i++) { if (output[i] > maxOut[i]) { maxOut[i] = output[i]; } if (output[i] < minOut[i]) { minOut[i] = output[i]; } } } }
public static void Main(string[] args) { DataSet trainingSet = new DataSet(2, 1); trainingSet.addRow(new DataSetRow(new double[] { 0, 0 }, new double[] { 0 })); trainingSet.addRow(new DataSetRow(new double[] { 0, 1 }, new double[] { 1 })); trainingSet.addRow(new DataSetRow(new double[] { 1, 0 }, new double[] { 1 })); trainingSet.addRow(new DataSetRow(new double[] { 1, 1 }, new double[] { 0 })); MultiLayerPerceptron neuralNet = new MultiLayerPerceptron(TransferFunctionType.TANH, 2, 3, 1); neuralNet.learn(trainingSet); Evaluation.runFullEvaluation(neuralNet, trainingSet); }
public static void Main(string[] args) { try { DataSet trainSet = MNISTDataSet.createFromFile(MNISTDataSet.TRAIN_LABEL_NAME, MNISTDataSet.TRAIN_IMAGE_NAME, 60); DataSet testSet = MNISTDataSet.createFromFile(MNISTDataSet.TEST_LABEL_NAME, MNISTDataSet.TEST_IMAGE_NAME, 10); ConvolutionalNetwork convolutionNetwork = (new ConvolutionalNetwork.Builder()).withInputLayer(32, 32, 1).withConvolutionLayer(5, 5, 6).withPoolingLayer(2, 2).withConvolutionLayer(5, 5, 16).withPoolingLayer(2, 2).withConvolutionLayer(5, 5, 120).withFullConnectedLayer(84).withFullConnectedLayer(10).build(); // add transfer function and its properties // we need Output RBF euclidean layer - implement original LeNet5 - and make sure it works // + kreiraj RBF Euclidean i dodaj u output layer: EuclideanRBF // - amplitude for tanh - dodaj parametar // trenutno konvolucioni sloj koristi RectifiedLinear.class - trebalo bi svi tanh // zasto dva puta okida event za learning? loguje dvaput? ConvolutionalBackpropagation backPropagation = new ConvolutionalBackpropagation(); backPropagation.LearningRate = 0.001; backPropagation.MaxError = 0.01; //backPropagation.setMaxIterations(1000); backPropagation.ErrorFunction = new MeanSquaredError(); convolutionNetwork.LearningRule = backPropagation; backPropagation.addListener(new LearningListener()); // System.out.println("Started training..."); convolutionNetwork.learn(trainSet); // System.out.println("Done training!"); // CrossValidation crossValidation = new CrossValidation(convolutionNetwork, trainSet, 6); // crossValidation.run(); // ClassificationMetrics validationResult = crossValidation.computeErrorEstimate(convolutionNetwork, trainSet); // Evaluation.runFullEvaluation(convolutionNetwork, testSet); convolutionNetwork.save("mnist.nnet"); // System.out.println(crossValidation.getResult()); } catch (IOException e) { Console.WriteLine(e.ToString()); Console.Write(e.StackTrace); } }
internal double[] maxIn, maxOut; // these contain max values for in and out columns public virtual void normalize(DataSet dataSet) { findMaxVectors(dataSet); foreach (DataSetRow row in dataSet.Rows) { double[] normalizedInput = normalizeMax(row.Input, maxIn); row.Input = normalizedInput; if (dataSet.Supervised) { double[] normalizedOutput = normalizeMax(row.DesiredOutput, maxOut); row.DesiredOutput = normalizedOutput; } } }
public override void doLearningEpoch(DataSet trainingSet) { base.doLearningEpoch(trainingSet); if (currentIteration > 0) { if (useDynamicLearningRate) { adjustLearningRate(); } if (useDynamicMomentum) { adjustMomentum(); } } }
internal double[] minIn, minOut; // contains min values for in and out columns public virtual void normalize(DataSet dataSet) { // find min i max vectors findMaxAndMinVectors(dataSet); foreach (DataSetRow row in dataSet.Rows) { double[] normalizedInput = normalizeMaxMin(row.Input, minIn, maxIn); row.Input = normalizedInput; if (dataSet.Supervised) { double[] normalizedOutput = normalizeMaxMin(row.DesiredOutput, minOut, maxOut); row.DesiredOutput = normalizedOutput; } } }
private double[] scaleFactorIn, scaleFactorOut; // holds scaling values for all columns public virtual void normalize(DataSet dataSet) { findMaxVectors(dataSet); findScaleVectors(); foreach (DataSetRow dataSetRow in dataSet.Rows) { double[] normalizedInput = normalizeScale(dataSetRow.Input, scaleFactorIn); dataSetRow.Input = normalizedInput; if (dataSet.Supervised) { double[] normalizedOutput = normalizeScale(dataSetRow.DesiredOutput, scaleFactorOut); dataSetRow.DesiredOutput = normalizedOutput; } } }
public virtual void normalize(DataSet dataSet) { double[] maxInput = DataSetStatistics.calculateMaxByColumns(dataSet); double[] minInput = DataSetStatistics.calculateMinByColumns(dataSet); double[] meanInput = DataSetStatistics.calculateMean(dataSet); foreach (DataSetRow row in dataSet.Rows) { double[] normalizedInput = row.Input; for (int i = 0; i < dataSet.InputSize; i++) { double divider = maxInput[i] - minInput[i] == 0 ? 1 : maxInput[i] - minInput[i]; normalizedInput[i] = (normalizedInput[i] - meanInput[i]) / divider; } row.Input = normalizedInput; } }
/// <summary> /// This method does one learning epoch for the unsupervised learning rules. /// It iterates through the training set and trains network weights for each /// element. Stops learning after one epoch. /// </summary> /// <param name="trainingSet"> /// training set for training network </param> public override void doLearningEpoch(DataSet trainingSet) { base.doLearningEpoch(trainingSet); stopLearning(); // stop learning ahter one learning epoch -- why ? - because we dont have any other stopping criteria for this - must limit the iterations }