Example #1
0
        public void DeNormalizeGeneralCorrectnessTest()
        {
            int    m = 10000, n = 100;
            double epsilon = 0.000000001;
            Random rnd     = new Random();

            double[][] iData = new double[m][];
            for (int i = 0; i < m; ++i)
            {
                iData[i] = new double[n];
                for (int j = 0; j < n; ++j)
                {
                    iData[i][j] = rnd.Next();
                }
            }
            double         iA     = 0.1;
            double         iB     = 0.9;
            DataNormalizer target = new DataNormalizer(iData, iA, iB);

            double[][] normalized = target.Normalize(iData);
            Assert.AreEqual(iA, normalized.Min(row => row.Min()), epsilon);
            Assert.AreEqual(iB, normalized.Max(row => row.Max()), epsilon);
            double maxAllowedError = 0.1, expectedAvg = 0.5, actualAvg = normalized.Average(row => row.Average());

            Assert.AreEqual(expectedAvg, actualAvg, maxAllowedError);
            double[][] actual = target.DeNormalize(normalized);
            AssertMatrixAreEqual(iData, actual);
        }
 public void ThrowsIsEmptyOrNull()
 {
     Assert.Throws(typeof(EmptyDataSetException), () => { DataNormalizer.SigmoidNormalizer(null); });
     Assert.Throws(typeof(EmptyDataSetException), () => { DataNormalizer.SigmoidNormalizer(new double[] {}); });
     Assert.Throws(typeof(EmptyDataSetException), () => { DataNormalizer.HyperbolicNormalizer(null); });
     Assert.Throws(typeof(EmptyDataSetException), () => { DataNormalizer.HyperbolicNormalizer(new double[] { }); });
 }
 public ANNforecastingModel(Network iNet, DataNormalizer iNormalizer)
     : base(null)
 {
     InputSize = iNet.InputsCount;
     forecastingNet = iNet;
     normalizer = iNormalizer;
 }
Example #4
0
 public ANNforecastingModel(Network iNet, DataNormalizer iNormalizer)
     : base(null)
 {
     InputSize      = iNet.InputsCount;
     forecastingNet = iNet;
     normalizer     = iNormalizer;
 }
        static void Step3()
        {
            Console.WriteLine("STEP 3: Normalize data...");
            DataNormalizer normalizer = new DataNormalizer();

            normalizer.Normalize(DataFilesInfoGetter.BaseFile, DataFilesInfoGetter.TrainingFile, DataFilesInfoGetter.NormalizedTrainingFile, DataFilesInfoGetter.EvaluateFile,
                                 DataFilesInfoGetter.NormalizedEvaluateFile, DataFilesInfoGetter.EncogAnalystFile);
        }
 public void SetUp()
 {
     initial_values = new double[]
     {
         -2, -1, 0, 1, 2, 3
     };
     sigm_normalizer  = DataNormalizer.SigmoidNormalizer(initial_values);
     hyper_normalizer = DataNormalizer.HyperbolicNormalizer(initial_values);
 }
    public void UpdateStat(Stats stat, float value)
    {
        var index    = (int)stat;
        var position = _coordinates[index].Vector;

        _coordinates[index].magnitude = DataNormalizer.Normalize(stat, value);
        _polygon.SetPointPosition(index, position);
        transform.GetChild(index).localPosition = position;
    }
Example #8
0
            public IForecastingModel TrainNewModel(double[][] iInput, double[][] iOutput)
            {
                int inputSize = iInput[0].Length, samplesNum = iOutput.Length;

                if (samplesNum != iInput.Length)
                {
                    throw new ArgumentException();
                }

                for (int i = 0; i < samplesNum; ++i)
                {
                    if (iInput[i].Length != inputSize || iOutput[i].Length != 1) //iInput isn't a square matrix or iOutput isn't a vector
                    {
                        throw new ArgumentException();
                    }
                }

                int[]  neuronsCount       = (int[])ModelParametersDict[NeuronsInLayersKey];
                string activationFunction = (string)ModelParametersDict[ActivationFunctionKey];
                long   maxIterNum         = (long)ModelParametersDict[MaxIterationsNumberKey];
                double stopError          = (double)ModelParametersDict[StopErrorKey];

                ActivationNetwork   netToTrain = new ActivationNetwork(ActivationFunctionsDict[activationFunction], inputSize, neuronsCount);
                DataNormalizer      normalizer = new DataNormalizer(iInput.Concat(iOutput).ToArray());
                IForecastingModel   aModel     = new ANNforecastingModel(netToTrain, normalizer);
                ISupervisedLearning teacher    = new ResilientBackpropagationLearning(netToTrain);

                double[][] trainInputSet, trainOutputSet;
                TrainingSubsetGenerator.GenerateRandomly(iInput, iOutput, out trainInputSet, out trainOutputSet, iMultiplier: TrainSubsetMultiplier);

                trainInputSet = normalizer.Normalize(trainInputSet); trainOutputSet = normalizer.Normalize(trainOutputSet);

                long   epochsCount = 0;
                double nextError = ErrorCalculator.CalculateMSE(aModel, iInput, iOutput), prevError;

                do
                {
                    prevError = nextError;
                    teacher.RunEpoch(trainInputSet, trainOutputSet);
                    nextError = ErrorCalculator.CalculateMSE(aModel, iInput, iOutput);
                }while (epochsCount++ <= maxIterNum && Math.Abs(prevError - nextError) >= stopError);
                return(aModel);
            }
Example #9
0
        static void Main(string[] args)
        {
            Stopwatch stopwatch = new Stopwatch();

            stopwatch.Start();

            IExcelImporter <TranslateString> excelImporter = new ExcelImporter <TranslateString>();
            string fileName = @"123.xlsx";
            var    data     = excelImporter.GetData(fileName, "LoadedString", "CellCode");

            //PrintData(data);

            IDataNormalizer normalizer = new DataNormalizer();

            //repository

            IDictionaryRepository repository = new DbDictionaryRepository();

            //IDictionaryProcessor dictionaryProcessor = new DictionaryProcessor();
            //dictionaryProcessor.UpdateOriginalNormalizedAndTranslatedNormalizedFields();

            normalizer.DataNormalize(ref data);

            ITranslateProcessor processor = new DbNormalizedTranslateProcessor();

            processor.TranslateProcess(data);

            stopwatch.Stop();

            PrintData(data, TranslateStringState.TranslatedNormalized, stopwatch.ElapsedMilliseconds / 1000.0);

            Console.ReadLine();



            Console.WriteLine("Saving...");
            IExcelExporter <TranslateString> excelExporter = new ExcelExporter <TranslateString>();

            excelExporter.ExportData(data, fileName, "TranslatedString", "CellCode");
            Console.WriteLine("Done!");
            Console.ReadLine();
        }
Example #10
0
        public void GeneralCorrectnessTest()
        {
            double[][] iData = new double[][]
            {
                new double[] { 0, 2, 5 },
                new double[] { 10, 3, 3 },
                new double[] { 6, 2, 1 }
            };
            double a = 0, b = 1;

            double[][] expected = new double[][]
            {
                new double[] { 0.0, 0.2, 0.5 },
                new double[] { 1.0, 0.3, 0.3 },
                new double[] { 0.6, 0.2, 0.1 }
            };
            double[][]     actual;
            DataNormalizer target = new DataNormalizer(iData, a, b);

            actual = target.Normalize(iData);
            AssertMatrixAreEqual(expected, actual);
        }
 public void DeNormalizeGeneralCorrectnessTest()
 {
     int m = 10000, n = 100;
     double epsilon = 0.000000001;
     Random rnd = new Random();
     double[][] iData = new double[m][];
     for (int i = 0; i < m; ++i)
     {
         iData[i] = new double[n];
         for (int j = 0; j < n; ++j)
             iData[i][j] = rnd.Next();
     }
     double iA = 0.1;
     double iB = 0.9;
     DataNormalizer target = new DataNormalizer(iData, iA, iB);
     double[][] normalized = target.Normalize(iData);
     Assert.AreEqual(iA, normalized.Min(row => row.Min()), epsilon);
     Assert.AreEqual(iB, normalized.Max(row => row.Max()), epsilon);
     double maxAllowedError = 0.1, expectedAvg = 0.5, actualAvg = normalized.Average(row => row.Average());
     Assert.AreEqual(expectedAvg, actualAvg, maxAllowedError);
     double[][] actual = target.DeNormalize(normalized);
     AssertMatrixAreEqual(iData, actual);
 }
            public IForecastingModel TrainNewModel(double[][] iInput, double[][] iOutput)
            {
                int inputSize = iInput[0].Length, samplesNum = iOutput.Length;
                if (samplesNum != iInput.Length)
                    throw new ArgumentException();

                for (int i = 0; i < samplesNum;++i)
                    if (iInput[i].Length != inputSize || iOutput[i].Length != 1) //iInput isn't a square matrix or iOutput isn't a vector
                        throw new ArgumentException();

                int[] neuronsCount = (int[]) ModelParametersDict[NeuronsInLayersKey];
                string activationFunction = (string) ModelParametersDict[ActivationFunctionKey];
                long maxIterNum = (long) ModelParametersDict[MaxIterationsNumberKey];
                double stopError = (double)ModelParametersDict[StopErrorKey];

                ActivationNetwork netToTrain = new ActivationNetwork(ActivationFunctionsDict[activationFunction], inputSize, neuronsCount);
                DataNormalizer normalizer = new DataNormalizer(iInput.Concat(iOutput).ToArray());
                IForecastingModel aModel = new ANNforecastingModel(netToTrain, normalizer);
                ISupervisedLearning teacher = new ResilientBackpropagationLearning(netToTrain);

                double[][] trainInputSet, trainOutputSet;
                TrainingSubsetGenerator.GenerateRandomly(iInput, iOutput, out trainInputSet, out trainOutputSet, iMultiplier: TrainSubsetMultiplier);

                trainInputSet = normalizer.Normalize(trainInputSet); trainOutputSet = normalizer.Normalize(trainOutputSet);

                long epochsCount = 0;
                double nextError = ErrorCalculator.CalculateMSE(aModel, iInput, iOutput), prevError;
                do
                {
                    prevError = nextError;
                    teacher.RunEpoch(trainInputSet, trainOutputSet);
                    nextError = ErrorCalculator.CalculateMSE(aModel, iInput, iOutput);
                }
                while (epochsCount++ <= maxIterNum && Math.Abs(prevError - nextError) >= stopError);
                return aModel;
            }
        public void GeneralCorrectnessTest()
        {
            double[][] iData = new double[][]
                {
                    new double[] { 0, 2, 5},
                    new double[] {10, 3, 3},
                    new double[] {6, 2, 1}
                };
            double a = 0, b = 1;
            double[][] expected = new double[][]
                {
                    new double[] {0.0, 0.2, 0.5},
                    new double[] {1.0, 0.3, 0.3},
                    new double[] {0.6, 0.2, 0.1}

                };
            double[][] actual;
            DataNormalizer target = new DataNormalizer(iData, a, b);
            actual = target.Normalize(iData);
            AssertMatrixAreEqual(expected, actual);
        }