Ejemplo n.º 1
0
        public DataNormalization Create(double[][] outputArray)
        {
            IInputField a;
            IInputField b;
            IInputField c;
            IInputField d;
            IInputField e;

            var norm = new DataNormalization();

            norm.Report          = new NullStatusReportable();
            norm.Storage         = new NormalizationStorageCSV(FILENAME.ToString());
            norm.AddInputField(a = new InputFieldCSV(false, FILENAME.ToString(), 0));
            norm.AddInputField(b = new InputFieldCSV(false, FILENAME.ToString(), 1));
            norm.AddInputField(c = new InputFieldCSV(false, FILENAME.ToString(), 2));
            norm.AddInputField(d = new InputFieldCSV(false, FILENAME.ToString(), 3));
            norm.AddInputField(e = new InputFieldCSV(false, FILENAME.ToString(), 4));
            norm.AddOutputField(new OutputFieldRangeMapped(a, 0.1, 0.9));
            norm.AddOutputField(new OutputFieldRangeMapped(b, 0.1, 0.9));
            norm.AddOutputField(new OutputFieldRangeMapped(c, 0.1, 0.9));
            norm.AddOutputField(new OutputFieldRangeMapped(d, 0.1, 0.9));
            norm.AddOutputField(new OutputFieldRangeMapped(e, 0.1, 0.9));
            norm.Storage = new NormalizationStorageArray2D(outputArray);
            return(norm);
        }
Ejemplo n.º 2
0
        /// <summary>
        /// Called internally to open the CSV file.
        /// </summary>
        private void OpenCSV()
        {
            // clear out any CSV files already there
            this.csvMap.Clear();
            this.readCSV.Clear();

            // only add each CSV once
            IDictionary <String, ReadCSV> uniqueFiles = new Dictionary <String, ReadCSV>();

            // find the unique files
            foreach (IInputField field in this.inputFields)
            {
                if (field is InputFieldCSV)
                {
                    InputFieldCSV csvField = (InputFieldCSV)field;
                    String        file     = csvField.File;
                    if (!uniqueFiles.ContainsKey(file))
                    {
                        ReadCSV csv = new ReadCSV(file, false,
                                                  this.csvFormat);
                        uniqueFiles[file] = csv;
                        this.readCSV.Add(csv);
                    }
                    this.csvMap[csvField] = uniqueFiles[file];
                }
            }
        }
Ejemplo n.º 3
0
        public void Copy(FileInfo source, FileInfo target, int start, int stop, int size)
        {
            var inputField = new IInputField[55];

            var norm = new DataNormalization {
                Report = this, Storage = new NormalizationStorageCSV(target.ToString())
            };

            for (int i = 0; i < 55; i++)
            {
                inputField[i] = new InputFieldCSV(true, source.ToString(), i);
                norm.AddInputField(inputField[i]);
                IOutputField outputField = new OutputFieldDirect(inputField[i]);
                norm.AddOutputField(outputField);
            }

            // load only the part we actually want, i.e. training or eval
            var segregator2 = new IndexSampleSegregator(start, stop, size);

            norm.AddSegregator(segregator2);

            norm.Process();
        }
Ejemplo n.º 4
0
        public void Narrow(FileInfo source, FileInfo target, int field, int count)
        {
            var inputField = new IInputField[55];

            var norm = new DataNormalization {
                Report = this, Storage = new NormalizationStorageCSV(target.ToString())
            };

            for (int i = 0; i < 55; i++)
            {
                inputField[i] = new InputFieldCSV(true, source.ToString(), i);
                norm.AddInputField(inputField[i]);
                IOutputField outputField = new OutputFieldDirect(inputField[i]);
                norm.AddOutputField(outputField);
            }

            var segregator = new IntegerBalanceSegregator(inputField[field], count);

            norm.AddSegregator(segregator);

            norm.Process();
            Console.WriteLine(@"Samples per tree type:");
            Console.WriteLine(segregator.DumpCounts());
        }
Ejemplo n.º 5
0
        /// <summary>
        /// Called internally to obtain the current value for an input field.
        /// </summary>
        /// <param name="field">The input field to determine.</param>
        /// <param name="index">The current index.</param>
        /// <returns>The value for this input field.</returns>
        private double DetermineInputFieldValue(IInputField field,
                                                int index)
        {
            double result = 0;

            if (field is InputFieldCSV)
            {
                InputFieldCSV fieldCSV = (InputFieldCSV)field;
                ReadCSV       csv      = this.csvMap[field];
                result = csv.GetDouble(fieldCSV.Offset);
            }
            else if (field is InputFieldNeuralDataSet)
            {
                InputFieldNeuralDataSet neuralField = (InputFieldNeuralDataSet)field;
                NeuralDataFieldHolder   holder      = this.dataSetFieldMap
                                                      [field];
                INeuralDataPair pair   = holder.Pair;
                int             offset = neuralField.Offset;
                if (offset < pair.Input.Count)
                {
                    result = pair.Input[offset];
                }
                else
                {
                    offset -= pair.Input.Count;
                    result  = pair.Ideal[offset];
                }
            }
            else
            {
                result = field.GetValue(index);
            }

            field.CurrentValue = result;
            return(result);
        }
Ejemplo n.º 6
0
        public void Execute(IExampleInterface app)
        {
            string            inputFile = "C:\\jth\\iris.csv";
            DataNormalization normalize = new DataNormalization();
            IInputField       a, b, c, d;

            normalize.AddInputField(a = new InputFieldCSV(true, inputFile, "sepal_l"));
            normalize.AddInputField(b = new InputFieldCSV(true, inputFile, "sepal_w"));
            normalize.AddInputField(c = new InputFieldCSV(true, inputFile, "petal_l"));
            normalize.AddInputField(d = new InputFieldCSV(true, inputFile, "petal_w"));
            normalize.AddInputField(new InputFieldCSV(false, inputFile, "species"));
            normalize.AddOutputField(new OutputFieldRangeMapped(a));
            normalize.AddOutputField(new OutputFieldRangeMapped(b));
            normalize.AddOutputField(new OutputFieldRangeMapped(c));
            normalize.AddOutputField(new OutputFieldRangeMapped(d));
            //normalize.AddOutputField(new OutputOneOf(1,0));
            NormalizationStorageMLDataSet store = new NormalizationStorageMLDataSet(4, 0);

            normalize.Storage = store;
            normalize.Report  = new ConsoleStatusReportable();

            normalize.Process(true);
            Console.WriteLine(store.DataSet.Count);
        }
Ejemplo n.º 7
0
        public void Execute(IExampleInterface app)
        {
            string            inputFile = @"C:\Development\AI\machinelearning\encog-dotnet-core-master\encog-core-test\Resources\iris.csv";
            DataNormalization normalize = new DataNormalization();
            IInputField       a, b, c, d;

            normalize.AddInputField(a = new InputFieldCSV(true, inputFile, "sepal_l"));
            normalize.AddInputField(b = new InputFieldCSV(true, inputFile, "sepal_w"));
            normalize.AddInputField(c = new InputFieldCSV(true, inputFile, "petal_l"));
            normalize.AddInputField(d = new InputFieldCSV(true, inputFile, "petal_w"));
            normalize.AddInputField(new InputFieldCSV(false, inputFile, "species"));
            normalize.AddOutputField(new OutputFieldRangeMapped(a));
            normalize.AddOutputField(new OutputFieldRangeMapped(b));
            normalize.AddOutputField(new OutputFieldRangeMapped(c));
            normalize.AddOutputField(new OutputFieldRangeMapped(d));
            //normalize.AddOutputField(new OutputOneOf(1,0));
            NormalizationStorageMLDataSet store = new NormalizationStorageMLDataSet(4, 0);

            normalize.Storage = store;
            normalize.Report  = new ConsoleStatusReportable();

            normalize.Process(true);
            Console.WriteLine(store.DataSet.Count + " Datasets validated");
        }
Ejemplo n.º 8
0
        public DataNormalization Step3(bool useOneOf)
        {
            Console.WriteLine(@"Step 3: Normalize training data");
            IInputField inputElevation;
            IInputField inputAspect;
            IInputField inputSlope;
            IInputField hWater;
            IInputField vWater;
            IInputField roadway;
            IInputField shade9;
            IInputField shade12;
            IInputField shade3;
            IInputField firepoint;
            var         wilderness = new IInputField[4];
            var         soilType   = new IInputField[40];
            IInputField coverType;

            var norm = new DataNormalization
            {
                Report  = this,
                Storage = new NormalizationStorageCSV(_config.NormalizedDataFile.ToString())
            };

            norm.AddInputField(inputElevation = new InputFieldCSV(true, _config.BalanceFile.ToString(), 0));
            norm.AddInputField(inputAspect    = new InputFieldCSV(true, _config.BalanceFile.ToString(), 1));
            norm.AddInputField(inputSlope     = new InputFieldCSV(true, _config.BalanceFile.ToString(), 2));
            norm.AddInputField(hWater         = new InputFieldCSV(true, _config.BalanceFile.ToString(), 3));
            norm.AddInputField(vWater         = new InputFieldCSV(true, _config.BalanceFile.ToString(), 4));
            norm.AddInputField(roadway        = new InputFieldCSV(true, _config.BalanceFile.ToString(), 5));
            norm.AddInputField(shade9         = new InputFieldCSV(true, _config.BalanceFile.ToString(), 6));
            norm.AddInputField(shade12        = new InputFieldCSV(true, _config.BalanceFile.ToString(), 7));
            norm.AddInputField(shade3         = new InputFieldCSV(true, _config.BalanceFile.ToString(), 8));
            norm.AddInputField(firepoint      = new InputFieldCSV(true, _config.BalanceFile.ToString(), 9));

            for (int i = 0; i < 4; i++)
            {
                norm.AddInputField(wilderness[i] = new InputFieldCSV(true, _config.BalanceFile.ToString(), 10 + i));
            }

            for (int i = 0; i < 40; i++)
            {
                norm.AddInputField(soilType[i] = new InputFieldCSV(true, _config.BalanceFile.ToString(), 14 + i));
            }

            norm.AddInputField(coverType = new InputFieldCSV(false, _config.BalanceFile.ToString(), 54));

            norm.AddOutputField(new OutputFieldRangeMapped(inputElevation));
            norm.AddOutputField(new OutputFieldRangeMapped(inputAspect));
            norm.AddOutputField(new OutputFieldRangeMapped(inputSlope));
            norm.AddOutputField(new OutputFieldRangeMapped(hWater));
            norm.AddOutputField(new OutputFieldRangeMapped(vWater));
            norm.AddOutputField(new OutputFieldRangeMapped(roadway));
            norm.AddOutputField(new OutputFieldRangeMapped(shade9));
            norm.AddOutputField(new OutputFieldRangeMapped(shade12));
            norm.AddOutputField(new OutputFieldRangeMapped(shade3));
            norm.AddOutputField(new OutputFieldRangeMapped(firepoint));

            for (int i = 0; i < 40; i++)
            {
                norm.AddOutputField(new OutputFieldDirect(soilType[i]));
            }

            if (useOneOf)
            {
                BuildOutputOneOf(norm, coverType);
            }
            else
            {
                BuildOutputEquilateral(norm, coverType);
            }

            norm.Process();
            return(norm);
        }