public void DeNormalizeGeneralCorrectnessTest() { int m = 10000, n = 100; double epsilon = 0.000000001; Random rnd = new Random(); double[][] iData = new double[m][]; for (int i = 0; i < m; ++i) { iData[i] = new double[n]; for (int j = 0; j < n; ++j) { iData[i][j] = rnd.Next(); } } double iA = 0.1; double iB = 0.9; DataNormalizer target = new DataNormalizer(iData, iA, iB); double[][] normalized = target.Normalize(iData); Assert.AreEqual(iA, normalized.Min(row => row.Min()), epsilon); Assert.AreEqual(iB, normalized.Max(row => row.Max()), epsilon); double maxAllowedError = 0.1, expectedAvg = 0.5, actualAvg = normalized.Average(row => row.Average()); Assert.AreEqual(expectedAvg, actualAvg, maxAllowedError); double[][] actual = target.DeNormalize(normalized); AssertMatrixAreEqual(iData, actual); }
static void Step3() { Console.WriteLine("STEP 3: Normalize data..."); DataNormalizer normalizer = new DataNormalizer(); normalizer.Normalize(DataFilesInfoGetter.BaseFile, DataFilesInfoGetter.TrainingFile, DataFilesInfoGetter.NormalizedTrainingFile, DataFilesInfoGetter.EvaluateFile, DataFilesInfoGetter.NormalizedEvaluateFile, DataFilesInfoGetter.EncogAnalystFile); }
public void UpdateStat(Stats stat, float value) { var index = (int)stat; var position = _coordinates[index].Vector; _coordinates[index].magnitude = DataNormalizer.Normalize(stat, value); _polygon.SetPointPosition(index, position); transform.GetChild(index).localPosition = position; }
public IForecastingModel TrainNewModel(double[][] iInput, double[][] iOutput) { int inputSize = iInput[0].Length, samplesNum = iOutput.Length; if (samplesNum != iInput.Length) throw new ArgumentException(); for (int i = 0; i < samplesNum;++i) if (iInput[i].Length != inputSize || iOutput[i].Length != 1) //iInput isn't a square matrix or iOutput isn't a vector throw new ArgumentException(); int[] neuronsCount = (int[]) ModelParametersDict[NeuronsInLayersKey]; string activationFunction = (string) ModelParametersDict[ActivationFunctionKey]; long maxIterNum = (long) ModelParametersDict[MaxIterationsNumberKey]; double stopError = (double)ModelParametersDict[StopErrorKey]; ActivationNetwork netToTrain = new ActivationNetwork(ActivationFunctionsDict[activationFunction], inputSize, neuronsCount); DataNormalizer normalizer = new DataNormalizer(iInput.Concat(iOutput).ToArray()); IForecastingModel aModel = new ANNforecastingModel(netToTrain, normalizer); ISupervisedLearning teacher = new ResilientBackpropagationLearning(netToTrain); double[][] trainInputSet, trainOutputSet; TrainingSubsetGenerator.GenerateRandomly(iInput, iOutput, out trainInputSet, out trainOutputSet, iMultiplier: TrainSubsetMultiplier); trainInputSet = normalizer.Normalize(trainInputSet); trainOutputSet = normalizer.Normalize(trainOutputSet); long epochsCount = 0; double nextError = ErrorCalculator.CalculateMSE(aModel, iInput, iOutput), prevError; do { prevError = nextError; teacher.RunEpoch(trainInputSet, trainOutputSet); nextError = ErrorCalculator.CalculateMSE(aModel, iInput, iOutput); } while (epochsCount++ <= maxIterNum && Math.Abs(prevError - nextError) >= stopError); return aModel; }
public IForecastingModel TrainNewModel(double[][] iInput, double[][] iOutput) { int inputSize = iInput[0].Length, samplesNum = iOutput.Length; if (samplesNum != iInput.Length) { throw new ArgumentException(); } for (int i = 0; i < samplesNum; ++i) { if (iInput[i].Length != inputSize || iOutput[i].Length != 1) //iInput isn't a square matrix or iOutput isn't a vector { throw new ArgumentException(); } } int[] neuronsCount = (int[])ModelParametersDict[NeuronsInLayersKey]; string activationFunction = (string)ModelParametersDict[ActivationFunctionKey]; long maxIterNum = (long)ModelParametersDict[MaxIterationsNumberKey]; double stopError = (double)ModelParametersDict[StopErrorKey]; ActivationNetwork netToTrain = new ActivationNetwork(ActivationFunctionsDict[activationFunction], inputSize, neuronsCount); DataNormalizer normalizer = new DataNormalizer(iInput.Concat(iOutput).ToArray()); IForecastingModel aModel = new ANNforecastingModel(netToTrain, normalizer); ISupervisedLearning teacher = new ResilientBackpropagationLearning(netToTrain); double[][] trainInputSet, trainOutputSet; TrainingSubsetGenerator.GenerateRandomly(iInput, iOutput, out trainInputSet, out trainOutputSet, iMultiplier: TrainSubsetMultiplier); trainInputSet = normalizer.Normalize(trainInputSet); trainOutputSet = normalizer.Normalize(trainOutputSet); long epochsCount = 0; double nextError = ErrorCalculator.CalculateMSE(aModel, iInput, iOutput), prevError; do { prevError = nextError; teacher.RunEpoch(trainInputSet, trainOutputSet); nextError = ErrorCalculator.CalculateMSE(aModel, iInput, iOutput); }while (epochsCount++ <= maxIterNum && Math.Abs(prevError - nextError) >= stopError); return(aModel); }
public void GeneralCorrectnessTest() { double[][] iData = new double[][] { new double[] { 0, 2, 5 }, new double[] { 10, 3, 3 }, new double[] { 6, 2, 1 } }; double a = 0, b = 1; double[][] expected = new double[][] { new double[] { 0.0, 0.2, 0.5 }, new double[] { 1.0, 0.3, 0.3 }, new double[] { 0.6, 0.2, 0.1 } }; double[][] actual; DataNormalizer target = new DataNormalizer(iData, a, b); actual = target.Normalize(iData); AssertMatrixAreEqual(expected, actual); }
public void DeNormalizeGeneralCorrectnessTest() { int m = 10000, n = 100; double epsilon = 0.000000001; Random rnd = new Random(); double[][] iData = new double[m][]; for (int i = 0; i < m; ++i) { iData[i] = new double[n]; for (int j = 0; j < n; ++j) iData[i][j] = rnd.Next(); } double iA = 0.1; double iB = 0.9; DataNormalizer target = new DataNormalizer(iData, iA, iB); double[][] normalized = target.Normalize(iData); Assert.AreEqual(iA, normalized.Min(row => row.Min()), epsilon); Assert.AreEqual(iB, normalized.Max(row => row.Max()), epsilon); double maxAllowedError = 0.1, expectedAvg = 0.5, actualAvg = normalized.Average(row => row.Average()); Assert.AreEqual(expectedAvg, actualAvg, maxAllowedError); double[][] actual = target.DeNormalize(normalized); AssertMatrixAreEqual(iData, actual); }
public void GeneralCorrectnessTest() { double[][] iData = new double[][] { new double[] { 0, 2, 5}, new double[] {10, 3, 3}, new double[] {6, 2, 1} }; double a = 0, b = 1; double[][] expected = new double[][] { new double[] {0.0, 0.2, 0.5}, new double[] {1.0, 0.3, 0.3}, new double[] {0.6, 0.2, 0.1} }; double[][] actual; DataNormalizer target = new DataNormalizer(iData, a, b); actual = target.Normalize(iData); AssertMatrixAreEqual(expected, actual); }