static void TestANNMiner(Dataset dataset) { IClassificationMeasure measure = new AccuracyMeasure(); ILearningMethod learningMethod = new BackPropagation(0.1, 10, 0.9, false); int hiddenUnitCount = dataset.Metadata.Attributes.Length * dataset.Metadata.Target.Length; IActivationFunction activationFunction = new SigmoidActivationFunction(); ISolutionQualityEvaluator <ConnectionDC> evaluator = new NNClassificationQualityEvaluator(measure, learningMethod, hiddenUnitCount, activationFunction); IHeuristicsCalculator <ConnectionDC> calculator = new DefaultHeuristicCalculator <ConnectionDC>(); ILocalSearch <ConnectionDC> localSearch = new DefaultRemovalLocalSearch <ConnectionDC>(evaluator); IComponentInvalidator <ConnectionDC> invalidator = new NNConnectorInvalidator(); Problem <ConnectionDC> problem = new Problem <ConnectionDC>(invalidator, calculator, evaluator, localSearch); NeuralNetwork network_before = null; NeuralNetwork network_final = SingleTest.CreateNeuralNet_ANNMiner(problem, hiddenUnitCount, true, false, dataset, ref network_before); double quilty_before = SingleTest.TestClassifier(network_before, dataset, measure); double quilty_final = SingleTest.TestClassifier(network_final, dataset, measure); Console.WriteLine("ANN -" + quilty_before); Console.WriteLine("ANN -" + quilty_final); }
public void Sigmoid_ShouldReturnCorrectValues(double x, double expected) { ActivationFuntion func = new SigmoidActivationFunction(); double actual = Math.Round(func.Function(x), 2); Assert.Equal(Math.Round(expected, 2), actual); }
public void DerviativeProducesCorrectResultNegativeValue(double inputValue, double expectedResult) { var activationFunction = SigmoidActivationFunction.Create(); var derivativeResult = activationFunction.Derivative(inputValue); DoubleAssertionHelpers.AssertWithPrecision(expectedResult, derivativeResult, 10); }
public void ActivateProducesCorrectResultNegativeValue(double activationValue, double expectedResult) { var activationFunction = SigmoidActivationFunction.Create(); var activationResult = activationFunction.Activate(activationValue); DoubleAssertionHelpers.AssertWithPrecision(expectedResult, activationResult, 10); }
static void TestBackProbagation(Dataset dataset) { IActivationFunction activationFunction = new SigmoidActivationFunction(); //int hiddenUnitCount = dataset.Metadata.Attributes.Length * dataset.Metadata.Target.Length; int hiddenUnitCount = (dataset.Metadata.Attributes.Length + dataset.Metadata.Target.Length) / 2; NeuralNetwork network = SingleTest.CreateNeuralNet_BP(dataset, hiddenUnitCount, 0.9, 0.01, 1000, activationFunction); AccuracyMeasure measure = new AccuracyMeasure(); double quality = SingleTest.TestClassifier(network, dataset, measure); Console.WriteLine(measure.ToString() + ":" + Math.Round(quality * 100, 2)); Console.WriteLine("Size:" + network.Size); }
public Program() { Console.WriteLine("Activation Function Testing"); var sigmoidActivationFunction = new SigmoidActivationFunction( ); Console.WriteLine($"{sigmoidActivationFunction.Execute( 1.0 )}"); Console.WriteLine($"{sigmoidActivationFunction.ExecuteDerivative( 1.0 )}"); Console.WriteLine( ); Console.WriteLine("Running Test Network Test: "); //RunTestNeuralNetwork( ); Console.WriteLine( ); Console.WriteLine("Running Adding Network Test: "); RunXORNeuralNetwork( ); Console.WriteLine( ); while (true) { ; } }
public void DerivativeTest() { SigmoidActivationFunction saf = new SigmoidActivationFunction(); Assert.AreEqual(0.25, saf.Derivative(0.0), 0.00001); }
public void ActivationTest() { SigmoidActivationFunction saf = new SigmoidActivationFunction(); Assert.AreEqual(0.5, saf.Activation(0.0), 0.00001); }
public static void RunANNMiner_QEM() { AccuracyMeasure testMeasure = new AccuracyMeasure(); foreach (string dataset in GetDatasetFolds("datasets.txt")) { //---------------------------------------- Console.WriteLine("Data Table:" + dataset); //---------------------------------------- foreach (IClassificationMeasure measure in GetMeasures()) { for (_currentFold = 0; _currentFold < _folds; _currentFold++) { //---------------------------------------- Console.WriteLine(dataset + " - Fold:" + _currentFold.ToString() + " - " + measure.ToString()); //---------------------------------------- DataMining.Data.Dataset[] tables = LoadTrainingAndTestingData(dataset, _currentFold); DataMining.Data.Dataset trainingSet = tables[0]; DataMining.Data.Dataset testingSet = tables[1]; double quality_before = 0.0; double quality_final = 0.0; double size_before = 0.0; double size_final = 0.0; ILearningMethod learningMethod = new BackPropagation(0.05, 10, 0.9, false); int hiddenUnitCount = (trainingSet.Metadata.Attributes.Length + trainingSet.Metadata.Target.Length); IActivationFunction activationFunction = new SigmoidActivationFunction(); NNClassificationQualityEvaluator evaluator = new NNClassificationQualityEvaluator(measure, learningMethod, hiddenUnitCount, activationFunction); NNConnectionHeuristicCalculator calculator = new NNConnectionHeuristicCalculator(0.7); DefaultRemovalLocalSearch <ConnectionDC> localSearch = new DefaultRemovalLocalSearch <ConnectionDC>(evaluator); NNConnectorInvalidator invalidator = new NNConnectorInvalidator(); Problem <ConnectionDC> problem = new Problem <ConnectionDC>(invalidator, calculator, evaluator, localSearch); NeuralNetwork network_before = null; try { NeuralNetwork network_final = SingleTest.CreateNeuralNet_ANNMiner(problem, hiddenUnitCount, true, false, trainingSet, ref network_before); quality_before = SingleTest.TestClassifier(network_before, testingSet, testMeasure); quality_before = Math.Round(quality_before * 100, 2); quality_final = SingleTest.TestClassifier(network_final, testingSet, testMeasure); quality_final = Math.Round(quality_final * 100, 2); size_before = network_before.Size; size_final = network_final.Size; //---------------------------------------- Console.WriteLine("ANNMiner - before:" + dataset + "- Fold:" + _currentFold.ToString() + "=>" + measure.ToString() + ":" + quality_before.ToString()); Console.WriteLine("ANNMiner - final:" + dataset + "- Fold:" + _currentFold.ToString() + "=>" + measure.ToString() + ":" + quality_final.ToString()); Console.WriteLine("---------------------------------------------------"); //---------------------------------------- SaveResults(dataset, "ANNMiner - before", measure.ToString(), quality_before.ToString(), size_before.ToString()); SaveResults(dataset, "ANNMiner - final", measure.ToString(), quality_final.ToString(), size_final.ToString()); } catch (Exception ex) { LogError(ex); break; } } } Console.WriteLine("---------------------------------------------------"); Console.WriteLine("---------------------------------------------------"); Console.WriteLine("---------------------------------------------------"); } }
public static void RunGHCNN() { foreach (string dataset in GetDatasetFolds("datasets.txt")) { //---------------------------------------- Console.WriteLine("Data Table:" + dataset); //---------------------------------------- double avgQualityBefore = 0; double avgSizeBefore = 0; double avgQulityAfter = 0; double avgSizeAfter = 0; for (_currentFold = 0; _currentFold < _folds; _currentFold++) { //---------------------------------------- Console.WriteLine("Fold:" + _currentFold.ToString()); //---------------------------------------- DataMining.Data.Dataset[] tables = LoadTrainingAndTestingData(dataset, _currentFold); DataMining.Data.Dataset trainingSet = tables[0]; DataMining.Data.Dataset testingSet = tables[1]; double quality_before = 0.0; double quality_final = 0.0; double size_before = 0.0; double size_final = 0.0; IClassificationMeasure testMeasure = new AccuracyMeasure(); ILearningMethod learningMethod = new BackPropagation(_acoLearningRateNW, _acoEpochsNW, 0.9, false); //int hiddenUnitCount = trainingSet.Metadata.Attributes.Length * trainingSet.Metadata.Target.Length; int hiddenUnitCount = (trainingSet.Metadata.Attributes.Length + trainingSet.Metadata.Target.Length); IActivationFunction activationFunction = new SigmoidActivationFunction(); IClassificationMeasure trainingMeasure = new QLFunction(); NNClassificationQualityEvaluator evaluator = new NNClassificationQualityEvaluator(trainingMeasure, learningMethod, hiddenUnitCount, activationFunction); NNConnectionHeuristicCalculator calculator = new NNConnectionHeuristicCalculator(0.7); DefaultRemovalLocalSearch <ConnectionDC> localSearch = new DefaultRemovalLocalSearch <ConnectionDC>(evaluator); NNConnectorInvalidator invalidator = new NNConnectorInvalidator(); Problem <ConnectionDC> problem = new Problem <ConnectionDC>(invalidator, calculator, evaluator, localSearch); NeuralNetwork network_before = null; try { stopWatch.Reset(); stopWatch.Start(); NeuralNetwork network_final = SingleTest.CreateNeuralNet_GHCNN(problem, hiddenUnitCount, true, false, trainingSet, ref network_before); stopWatch.Stop(); quality_before = SingleTest.TestClassifier(network_before, testingSet, testMeasure); quality_before = Math.Round(quality_before * 100, 2); avgQualityBefore += quality_before; quality_final = SingleTest.TestClassifier(network_final, testingSet, testMeasure); quality_final = Math.Round(quality_final * 100, 2); avgQulityAfter += quality_final; size_before = network_before.Size; size_final = network_final.Size; avgSizeBefore += size_before; avgSizeAfter += avgSizeAfter; //---------------------------------------- Console.WriteLine("GHCNN - before:" + dataset + "- Fold:" + _currentFold.ToString() + "=>" + testMeasure.ToString() + ":" + quality_before.ToString()); Console.WriteLine("GHCNN - final:" + dataset + "- Fold:" + _currentFold.ToString() + "=>" + testMeasure.ToString() + ":" + quality_final.ToString()); Console.WriteLine("---------------------------------------------------"); //---------------------------------------- } catch (Exception ex) { LogError(ex); break; } } avgQualityBefore /= _folds; avgQulityAfter /= _folds; avgSizeBefore /= _folds; avgSizeAfter /= _folds; SaveResults(dataset, "GHCNN - before", avgQualityBefore.ToString(), avgSizeBefore.ToString(), stopWatch.ElapsedMilliseconds.ToString()); SaveResults(dataset, "GHCNN - final", avgQulityAfter.ToString(), avgSizeAfter.ToString(), stopWatch.ElapsedMilliseconds.ToString()); Console.WriteLine("---------------------------------------------------"); Console.WriteLine("---------------------------------------------------"); Console.WriteLine("---------------------------------------------------"); } }
public static void RunBackPropagation() { foreach (string dataset in GetDatasetFolds("datasets.txt")) { //---------------------------------------- Console.WriteLine("Data Table:" + dataset); //---------------------------------------- double avgQuality = 0; double avgSize = 0; for (_currentFold = 0; _currentFold < _folds; _currentFold++) { //---------------------------------------- Console.WriteLine("Fold:" + _currentFold.ToString()); //---------------------------------------- DataMining.Data.Dataset[] tables = LoadTrainingAndTestingData(dataset, _currentFold); DataMining.Data.Dataset trainingSet = tables[0]; DataMining.Data.Dataset testingSet = tables[1]; double quality = 0.0; double size = 0.0; AccuracyMeasure testMeasure = new AccuracyMeasure(); IActivationFunction activationFunction = new SigmoidActivationFunction(); //int hiddenUnitCount = trainingSet.Metadata.Attributes.Length * trainingSet.Metadata.Target.Length; int hiddenUnitCount = (trainingSet.Metadata.Attributes.Length + trainingSet.Metadata.Target.Length); try { stopWatch.Reset(); stopWatch.Start(); NeuralNetwork network = SingleTest.CreateNeuralNet_BP(trainingSet, hiddenUnitCount, 0.9, _bpLearningRate, _bpEpochs, activationFunction); stopWatch.Stop(); quality = SingleTest.TestClassifier(network, testingSet, testMeasure); quality = Math.Round(quality * 100, 2); size = network.Size; avgQuality += quality; avgSize += size; //---------------------------------------- Console.WriteLine("Backprop:" + dataset + "- Fold:" + _currentFold.ToString() + "=>" + testMeasure.ToString() + ":" + quality.ToString()); Console.WriteLine("---------------------------------------------------"); //---------------------------------------- } catch (Exception ex) { LogError(ex); break; } } avgQuality /= _folds; avgSize /= _folds; SaveResults(dataset, "BackProp", avgQuality.ToString(), avgSize.ToString(), stopWatch.ElapsedMilliseconds.ToString()); Console.WriteLine("---------------------------------------------------"); Console.WriteLine("---------------------------------------------------"); Console.WriteLine("---------------------------------------------------"); } }
public void TestInitialize() { sut = new SigmoidActivationFunction(); }
public void CalculateOutput_InputPassed_CorrectOutput() { var sigmoidFuncion = new SigmoidActivationFunction(1); Assert.AreEqual(0.557247854598556, sigmoidFuncion.CalculateOutput(0.23)); }
public void SigmoidActivationFunctionTrainingReturnsExpectedActivation() { var expected = new SigmoidActivationFunction().Activation(0.6d); _activationFunction.Activation(0.6).Should().BeApproximately(expected, 0.00000001d); }
public void SetUp() { _activationFunction = new SigmoidActivationFunction(); }