/// <summary> /// Function goes through collection (batch) of the network inputs and for each of them computes the output. /// Computed output is then compared with a corresponding ideal output. /// The error Abs(ideal - computed) is passed to the result error statistics. /// </summary> /// <param name="inputCollection">Collection of the network inputs (batch)</param> /// <param name="idealOutputCollection">Collection of the ideal outputs (batch)</param> /// <returns>Error statistics</returns> public BasicStat ComputeBatchErrorStat(List <double[]> inputCollection, List <double[]> idealOutputCollection) { BasicStat errStat = new BasicStat(); Parallel.For(0, inputCollection.Count, row => { double[] computedOutputVector = Compute(inputCollection[row]); for (int i = 0; i < 1; i++) { double error = idealOutputCollection[row][i] - computedOutputVector[i]; errStat.AddSampleValue(Math.Abs(error)); } }); return(errStat); }
/// <summary> /// Computes transformed value /// </summary> /// <param name="data">Collection of natural values of the already known input fields</param> public double Next(double[] data) { if (double.IsNaN(data[_fieldIdx])) { throw new InvalidOperationException($"Invalid data value at input field index {_fieldIdx} (NaN)."); } _lastValues.Enqueue(data[_fieldIdx], true); BasicStat stat = new BasicStat(); for (int i = 0; i < _lastValues.Count; i++) { stat.AddSampleValue(_lastValues.GetElementAt(i, true)); } switch (_settings.Output) { case OutputValue.Sum: return(stat.Sum); case OutputValue.NegSum: return(stat.NegSum); case OutputValue.PosSum: return(stat.PosSum); case OutputValue.SumOfSquares: return(stat.SumOfSquares); case OutputValue.Min: return(stat.Min); case OutputValue.Max: return(stat.Max); case OutputValue.Mid: return(stat.Mid); case OutputValue.Span: return(stat.Span); case OutputValue.ArithAvg: return(stat.ArithAvg); case OutputValue.MeanSquare: return(stat.MeanSquare); case OutputValue.RootMeanSquare: return(stat.RootMeanSquare); case OutputValue.Variance: return(stat.Variance); case OutputValue.StdDev: return(stat.StdDev); case OutputValue.SpanDev: return(stat.SpanDev); default: return(0); } }
/// <summary> /// Function goes through collection (batch) of the network inputs and for each of them computes the output. /// Computed output is then compared with a corresponding ideal output. /// The error Abs(ideal - computed) is passed to the result error statistics. /// </summary> /// <param name="inputCollection">Collection of the network inputs (batch)</param> /// <param name="idealOutputCollection">Collection of the ideal outputs (batch)</param> /// <param name="computedOutputCollection">Collection of the computed outputs (batch)</param> /// <returns>Error statistics</returns> public BasicStat ComputeBatchErrorStat(List <double[]> inputCollection, List <double[]> idealOutputCollection, out List <double[]> computedOutputCollection) { BasicStat errStat = new BasicStat(); double[][] computedOutputs = new double[idealOutputCollection.Count][]; Parallel.For(0, inputCollection.Count, row => { double[] computedOutputVector = Compute(inputCollection[row]); computedOutputs[row] = computedOutputVector; for (int i = 0; i < _numOfOutputValues; i++) { double error = idealOutputCollection[row][i] - computedOutputVector[i]; errStat.AddSampleValue(Math.Abs(error)); } }); computedOutputCollection = new List <double[]>(computedOutputs); return(errStat); }
/// <summary> /// Computes the rescalled range /// </summary> public double Compute() { double rescalledRange = 0; if (_valueCollection.Count > 0) { BasicStat devStat = new BasicStat(); Interval cumulRange = new Interval(); double mean = _sum / _valueCollection.Count; double cumulDeviation = 0; for (int i = 0; i < _valueCollection.Count; i++) { devStat.AddSampleValue(_valueCollection[i] - mean); cumulDeviation += _valueCollection[i] - mean; cumulRange.Adjust(cumulDeviation); } if (devStat.StdDev != 0) { rescalledRange = (cumulRange.Max - cumulRange.Min) / devStat.StdDev; } } return(rescalledRange); }
private List <INeuron> SelectNeuronsByDistance(INeuron refNeuron, INeuron[] availableNeurons, double avgDistance, int count) { List <INeuron> selectedNeurons = new List <INeuron>(count); List <INeuron> remainingNeurons = new List <INeuron>(availableNeurons); List <double> remainingDistances = new List <double>(availableNeurons.Length); //Fill and analyze all distances BasicStat allDistancesStat = new BasicStat(); for (int i = 0; i < availableNeurons.Length; i++) { double distance = refNeuron.Placement.ComputeEuclideanDistance(availableNeurons[i].Placement); remainingDistances.Add(distance); allDistancesStat.AddSampleValue(distance); } BasicStat selectedDistancesStat = new BasicStat(); for (int n = 0; n < count; n++) { double distance = _rand.NextGaussianDouble(avgDistance, 1).Bound(allDistancesStat.Min, allDistancesStat.Max); int selectedNIdx = 0; double err = Math.Abs(remainingDistances[selectedNIdx] - avgDistance); for (int i = 1; i < remainingDistances.Count; i++) { double cmpErr = Math.Abs(remainingDistances[i] - avgDistance); if (cmpErr < err) { selectedNIdx = i; err = cmpErr; } } selectedNeurons.Add(remainingNeurons[selectedNIdx]); selectedDistancesStat.AddSampleValue(remainingDistances[selectedNIdx]); remainingNeurons.RemoveAt(selectedNIdx); remainingDistances.RemoveAt(selectedNIdx); } return(selectedNeurons); }
/// <summary> /// Function checks given output features and sets general enabling/disabling switches /// </summary> /// <param name="predictorsCollection">Collection of regression predictors</param> private void InitOutputFeaturesGeneralSwitches(List <double[]> predictorsCollection) { //Allocate general switches OutputFeatureGeneralSwitchCollection = new bool[OutputFeatureDescriptorCollection.Count]; //Init general predictor switches to false OutputFeatureGeneralSwitchCollection.Populate(false); //Compute statistics on predictors Tuple <int, double>[] predictorValueSpanCollection = new Tuple <int, double> [OutputFeatureDescriptorCollection.Count]; Parallel.For(0, OutputFeatureDescriptorCollection.Count, i => { BasicStat stat = new BasicStat(); for (int row = 0; row < predictorsCollection.Count; row++) { stat.AddSampleValue(predictorsCollection[row][i]); } //Use predictor's value span as a differentiator predictorValueSpanCollection[i] = new Tuple <int, double>(i, stat.Span); }); //Sort collected predictor differentiators Array.Sort(predictorValueSpanCollection, CompareOutputFeature); //Enable predictors int numOfPredictorsToBeRejected = (int)(Math.Round(OutputFeatureDescriptorCollection.Count * _preprocessorCfg.PredictorsReductionRatio)); int firstIndexToBeRejected = predictorValueSpanCollection.Length - numOfPredictorsToBeRejected; NumOfActiveOutputFeatures = 0; for (int i = 0; i < predictorValueSpanCollection.Length; i++) { if (predictorValueSpanCollection[i].Item2 > _preprocessorCfg.PredictorValueMinSpan && i < firstIndexToBeRejected) { //Enable predictor OutputFeatureGeneralSwitchCollection[predictorValueSpanCollection[i].Item1] = true; ++NumOfActiveOutputFeatures; } } return; }
//Methods public void Run() { //Filter test RealFeatureFilter rff = new RealFeatureFilter(new Interval(-1, 1)); for (int i = 1; i <= 1500; i++) { rff.Update(_rand.NextDouble() * _rand.Next(0, 10000)); } double featureValue = 0.5; double filterValue = rff.ApplyFilter(featureValue); double reverseValue = rff.ApplyReverse(filterValue); Console.WriteLine($"Feature: {featureValue} Filter: {filterValue} Reverse: {reverseValue}"); //Pulse generator test BasicStat sampleStat = new BasicStat(); sampleStat.Reset(); PulseGeneratorSettings modSettings = new PulseGeneratorSettings(1, 1.5, PulseGeneratorSettings.TimingMode.Poisson); IGenerator generator = new PulseGenerator(modSettings); int steps = 10000; double period = 0; for (int i = 0; i < steps; i++) { ++period; double sample = generator.Next(); //Console.WriteLine(sample); if (sample != 0) { sampleStat.AddSampleValue(period); period = 0; } } Console.WriteLine($"Mean: {sampleStat.ArithAvg} StdDev: {sampleStat.StdDev} Min: {sampleStat.Min} Max: {sampleStat.Max}"); Console.ReadLine(); //Random distributions test BasicStat rStat = new BasicStat(); for (int i = 0; i < 200; i++) { double r = _rand.NextFilterredGaussianDouble(0.5, 1, -0.5, 1); rStat.AddSampleValue(r); Console.WriteLine(r); } Console.WriteLine($"Mean: {rStat.ArithAvg} StdDev: {rStat.StdDev} Min: {rStat.Min} Max: {rStat.Max}"); Console.ReadLine(); //Activation tests double fadingSum = 0; for (int i = 0; i < 1000; i++) { fadingSum *= (1d - 0.1); fadingSum += 1d; Console.WriteLine(fadingSum); } Console.ReadLine(); IActivationFunction testAF = ActivationFactory.Create(new SimpleIFSettings(refractoryPeriods: 0), new Random(0)); TestActivation(testAF, 100, 3.5, 10, 70); SimpleIFSettings setup = new SimpleIFSettings(); FindAFInputBorders(ActivationFactory.Create(setup, new Random(0)), -0.1, 20 ); //Linear algebra test double[] flatData = { 0.2, 5, 17.3, 1.01, 54, 7, 2.2, 5.5, 12.13, 11.57, 5.71, -85, -70.1, 15, -18.3, 0.3, 42, -6.25, 0.042, 1, 7.75, -81.01, -21.29, 5.44, 0.1, 4, -4.3, 18.01, 7.12, -3.14, -80.1, 24.4, 4.3, 12.03, 2.789, -13 }; Matrix testM = new Matrix(6, 6, flatData); /* * //Inversion test * Matrix resultM = new Matrix(testM); * resultM.SingleThreadInverse(); */ /* * //Transpose test * Matrix resultM = testM.Transpose(); */ /* * //Multiply test * Matrix resultM = Matrix.Multiply(testM, testM); * for (int i = 0; i < resultM.NumOfRows; i++) * { * Console.WriteLine($"{resultM.Data[i][0]}; {resultM.Data[i][1]}; {resultM.Data[i][2]}; {resultM.Data[i][3]}; {resultM.Data[i][4]}; {resultM.Data[i][5]}"); * } */ ; int numOfweights = 3; int xIdx, dIdx = 0; double[][] data = new double[3][]; data[dIdx] = new double[numOfweights]; xIdx = -1; data[dIdx][++xIdx] = 2; data[dIdx][++xIdx] = 1; data[dIdx][++xIdx] = 3; ++dIdx; data[dIdx] = new double[numOfweights]; xIdx = -1; data[dIdx][++xIdx] = 1; data[dIdx][++xIdx] = 3; data[dIdx][++xIdx] = -3; ++dIdx; data[dIdx] = new double[numOfweights]; xIdx = -1; data[dIdx][++xIdx] = -2; data[dIdx][++xIdx] = 4; data[dIdx][++xIdx] = 4; //Matrix M = new Matrix(data, true); //Matrix I = M.Inverse(false); //Matrix identity = M * I; //Must lead to identity matrix Matrix I = new Matrix(3, 3); I.AddScalarToDiagonal(1); Matrix X = new Matrix(I); X.Multiply(0.1); Matrix XT = X.Transpose(); Matrix R = XT * X; Console.ReadLine(); ///* SimpleIFSettings settings = new SimpleIFSettings(new RandomValueSettings(15, 15), new RandomValueSettings(0.05, 0.05), new RandomValueSettings(5, 5), new RandomValueSettings(20, 20), 0 ); IActivationFunction af = ActivationFactory.Create(settings, new Random(0)); //*/ TestActivation(af, 800, 0.15, 10, 600); return; }