Exemple #1
0
        static void Main(string[] args)
        {
            String filename = AppDomain.CurrentDomain.SetupInformation.ConfigurationFile;

            RemotingConfiguration.Configure(filename);

            try
            {
                ICrossVersionTest tst = (ICrossVersionTest)RemotingHelper.CreateProxy(typeof(ICrossVersionTest));

                Console.WriteLine("Read generic");
                DataSet ds = tst.GetDataset();
                Console.WriteLine("Store generic");
                tst.StoreDataset(ds);
                Console.WriteLine("Read typed");
                TestDataset tds = tst.GetTestDataset();
                Console.WriteLine("Read untyped");
                tst.StoreTestDataset(tds);
            }
            catch (Exception ex)
            {
                Console.WriteLine("Test " + ex.ToString());
            }

            Console.WriteLine("Done.");
            Console.ReadLine();
        }
        public void TestCreateTextInstancesConstant()
        {
            TestDataset breast    = TestDatasets.breastCancerConst;
            string      trainData = GetDataPath(breast.trainFilename);

            var prefix = TestContext.TestName + "-";

            string outName   = prefix + "BreastCancer.txt";
            string statsName = prefix + "BreastCancer.stats.txt";

            string outFile   = DeleteOutputPath(Dir, outName);
            string statsFile = DeleteOutputPath(Dir, statsName);

            var argsStr =
                string.Format(
                    "c=CreateInstances {0} {1} cifile={2} cistatsfile={3}",
                    trainData, breast.extraSettings, outFile, statsFile);

            argsStr += " writer=TextInstanceWriter{stats+} disableTracking+";
            var args = new TLCArguments();

            Assert.IsTrue(CmdParser.ParseArguments(argsStr, args));

            RunExperiments.Run(args);

            CheckEquality(Dir, outName);
            CheckEquality(Dir, statsName);
            Done();
        }
        /// <summary>
        /// Run INI test for a pair of predictor and dataset.
        /// </summary>
        /// <param name="debugInformation"></param>
        /// <param name="predictor"></param>
        /// <param name="dataset"></param>
        /// <param name="evaluationOutputDirPrefix"></param>
        /// <param name="extraSettings"></param>
        /// <param name="extraTag"></param>
        public void RunIniFileEvaluationTest(
            List <IniModelTestInformation> successTestInformation,
            List <IniModelTestInformation> failureTestInformation,
            PredictorAndArgs predictor,
            TestDataset dataset,
            string evaluationOutputDirPrefix,
            string[] extraSettings = null,
            string extraTag        = ""
            )
        {
            string outName = ExpectedFilename("Train", predictor, dataset, extraTag);

            string[] extraTrainingSettings           = JoinOptions(GetInstancesSettings(dataset), extraSettings);
            string   trainDataset                    = dataset.testFilename;
            InternalLearnRunParameters runParameters = TrainForIniModel(
                predictor,
                trainDataset,
                outName,
                extraTrainingSettings,
                ModelType.ModelKind.Ini);

            CheckEqualityNormalized(runParameters.BaselineDir, runParameters.ModelFilename);
            string modelFilePath       = GetOutputPath(runParameters.BaselineDir, runParameters.ModelFilename);
            string trainDatasetPath    = GetDataPath(trainDataset);
            string evaluationOutputDir = GetOutputDir(evaluationOutputDirPrefix + @"\Dirs\" + outName);

            Assert.IsNull(EnsureEmptyDirectory(evaluationOutputDir));

            string cmd = string.Format(EvaluationCommandLineFormat, modelFilePath, evaluationOutputDir, trainDatasetPath);
            string dir = Path.GetFullPath(EvaluationExecutorDir);

            Log("Working directory for evaluation: {0}", dir);
            Log("Evaluation command line: {0}", cmd);
            ProcessDebugInformation processDebugInformation = RunCommandLine(cmd, dir);

            if (processDebugInformation.ExitCode == 0)
            {
                KeyValuePair <Exception, List <string> > baselineCheckDebugInformation =
                    DirectoryBaselineCheck(evaluationOutputDir);
                IniModelTestInformation iniModelTestInformation =
                    new IniModelTestInformation(modelFilePath, trainDatasetPath, evaluationOutputDir, cmd, runParameters, processDebugInformation, baselineCheckDebugInformation);
                if (baselineCheckDebugInformation.Key == null)
                {
                    successTestInformation.Add(iniModelTestInformation);
                }
                else
                {
                    failureTestInformation.Add(iniModelTestInformation);
                }
            }
            else
            {
                IniModelTestInformation iniModelTestInformation =
                    new IniModelTestInformation(modelFilePath, trainDatasetPath, evaluationOutputDir, cmd, runParameters, processDebugInformation, new KeyValuePair <Exception, List <string> >(null, null));
                failureTestInformation.Add(iniModelTestInformation);
            }
        }
Exemple #4
0
        private static TrainingSessionResult Optimize(
            SequentialNetwork network,
            BatchesCollection miniBatches,
            int epochs, float dropout,
            [NotNull] WeightsUpdater updater,
            [CanBeNull] IProgress <BatchProgress> batchProgress,
            [CanBeNull] IProgress <TrainingProgressEventArgs> trainingProgress,
            [CanBeNull] ValidationDataset validationDataset,
            [CanBeNull] TestDataset testDataset,
            CancellationToken token)
        {
            // Setup
            DateTime startTime = DateTime.Now;
            List <DatasetEvaluationResult>
            validationReports = new List <DatasetEvaluationResult>(),
                testReports   = new List <DatasetEvaluationResult>();

            TrainingSessionResult PrepareResult(TrainingStopReason reason, int loops)
            {
                return(new TrainingSessionResult(reason, loops, DateTime.Now.Subtract(startTime).RoundToSeconds(), validationReports, testReports));
            }

            // Convergence manager for the validation dataset
            RelativeConvergence convergence = validationDataset == null
                ? null
                : new RelativeConvergence(validationDataset.Tolerance, validationDataset.EpochsInterval);

            // Optional batch monitor
            BatchProgressMonitor batchMonitor = batchProgress == null ? null : new BatchProgressMonitor(miniBatches.Count, batchProgress);

            // Create the training batches
            for (int i = 0; i < epochs; i++)
            {
                // Shuffle the training set
                miniBatches.CrossShuffle();

                // Gradient descent over the current batches
                for (int j = 0; j < miniBatches.BatchesCount; j++)
                {
                    if (token.IsCancellationRequested)
                    {
                        return(PrepareResult(TrainingStopReason.TrainingCanceled, i));
                    }
                    network.Backpropagate(miniBatches.Batches[j], dropout, updater);
                    batchMonitor?.NotifyCompletedBatch(miniBatches.Batches[j].X.GetLength(0));
                }
                batchMonitor?.Reset();

                // Check for overflows
                if (!Parallel.For(0, network._Layers.Length, (j, state) =>
                {
                    if (network._Layers[j] is WeightedLayerBase layer && !layer.ValidateWeights())
                    {
                        state.Break();
                    }
                }).IsCompleted)
Exemple #5
0
        public static TrainingSessionResult TrainNetwork(
            [NotNull] SequentialNetwork network, [NotNull] BatchesCollection batches,
            int epochs, float dropout,
            [NotNull] ITrainingAlgorithmInfo algorithm,
            [CanBeNull] IProgress <BatchProgress> batchProgress,
            [CanBeNull] IProgress <TrainingProgressEventArgs> trainingProgress,
            [CanBeNull] ValidationDataset validationDataset,
            [CanBeNull] TestDataset testDataset,
            CancellationToken token)
        {
            SharedEventsService.TrainingStarting.Raise();
            WeightsUpdater optimizer;

            switch (algorithm)
            {
            /* =================
             * Optimization
             * =================
             * The right optimizer is selected here, and the capatured closure for each of them also contains local temporary data, if needed.
             * In this case, the temporary data is managed, so that it will automatically be disposed by the GC and there won't be the need to use
             * another callback when the training stops to handle the cleanup of unmanaged resources. */
            case MomentumInfo momentum:
                optimizer = WeightsUpdaters.Momentum(momentum, network);
                break;

            case StochasticGradientDescentInfo sgd:
                optimizer = WeightsUpdaters.StochasticGradientDescent(sgd);
                break;

            case AdaGradInfo adagrad:
                optimizer = WeightsUpdaters.AdaGrad(adagrad, network);
                break;

            case AdaDeltaInfo adadelta:
                optimizer = WeightsUpdaters.AdaDelta(adadelta, network);
                break;

            case AdamInfo adam:
                optimizer = WeightsUpdaters.Adam(adam, network);
                break;

            case AdaMaxInfo adamax:
                optimizer = WeightsUpdaters.AdaMax(adamax, network);
                break;

            case RMSPropInfo rms:
                optimizer = WeightsUpdaters.RMSProp(rms, network);
                break;

            default:
                throw new ArgumentException("The input training algorithm type is not supported");
            }
            return(Optimize(network, batches, epochs, dropout, optimizer, batchProgress, trainingProgress, validationDataset, testDataset, token));
        }
        public TestDataset Clone()
        {
            var ret = new TestDataset
            {
                name              = name,
                trainFilename     = trainFilename,
                testFilename      = testFilename,
                validFilename     = validFilename,
                labelFilename     = labelFilename,
                settings          = settings,
                testSettings      = testSettings,
                extraSettings     = extraSettings,
                loaderSettings    = loaderSettings,
                mamlExtraSettings = mamlExtraSettings
            };

            return(ret);
        }
        public void TestCreateTextInstancesWithNormalization()
        {
            TestDataset dataset   = TestDatasets.mnistTiny28;
            string      trainData = GetDataPath(dataset.trainFilename);
            string      testData  = GetDataPath(dataset.testFilename);

            var    prefix       = TestContext.TestName + "-";
            string outFile1     = DeleteOutputPath(Dir, prefix + "Norm-Separate-Train.txt");
            string outTestFile1 = DeleteOutputPath(Dir, prefix + "Norm-Separate-Test.txt");
            string outFile2     = DeleteOutputPath(Dir, prefix + "Norm-Trans-Train.txt");
            string outTestFile2 = DeleteOutputPath(Dir, prefix + "Norm-Trans-Test.txt");

            string transArgs = "inst=Trans{trans=RFF {rng=1}}";

            var argsStr1 = string.Format(
                "/c=CreateInstances {0} /test={1} /norm=MinMaxNormalizer /{2} /cifile={3} /citestfile={4}",
                trainData, testData, transArgs, outFile1, outTestFile1);
            var args1 = new TLCArguments();

            Assert.IsTrue(CmdParser.ParseArguments(argsStr1, args1));

            RunExperiments.Run(args1);

            var argsStr2 = string.Format(
                "/c=CreateInstances {0} /test={1} /inst Trans{{trans=MinMaxNormalizer {2}}} /cifile={3} /citestfile={4}",
                trainData, testData, transArgs, outFile2, outTestFile2);
            var args2 = new TLCArguments();

            Assert.IsTrue(CmdParser.ParseArguments(argsStr2, args2));

            RunExperiments.Run(args2);

            var instances1 = new TlcTextInstances(new TlcTextInstances.Arguments(), outFile1);
            var instances2 = new TlcTextInstances(new TlcTextInstances.Arguments(), outFile2);

            CompareInstances(instances1, instances2);

            var testInstances1 = new TlcTextInstances(new TlcTextInstances.Arguments(), outTestFile1);
            var testInstances2 = new TlcTextInstances(new TlcTextInstances.Arguments(), outTestFile2);

            CompareInstances(testInstances1, testInstances2);

            Done();
        }
        public void TestCreateTextInstances()
        {
            TestDataset adult     = TestDatasets.adult;
            string      trainData = GetDataPath(adult.trainFilename);
            string      testData  = GetDataPath(adult.testFilename);

            var prefix = TestContext.TestName + "-";

            string outName        = prefix + "Adult-Train.txt";
            string statsName      = prefix + "Adult-Train.stats.txt";
            string outTestName    = prefix + "Adult-Test.txt";
            string testStatsName  = prefix + "Adult-Test.stats.txt";
            string outValidName   = prefix + "Adult-Valid.txt";
            string validStatsName = prefix + "Adult-Valid.stats.txt";

            string outFile        = DeleteOutputPath(Dir, outName);
            string statsFile      = DeleteOutputPath(Dir, statsName);
            string outTestFile    = DeleteOutputPath(Dir, outTestName);
            string testStatsFile  = DeleteOutputPath(Dir, testStatsName);
            string outValidFile   = DeleteOutputPath(Dir, outValidName);
            string validStatsFile = DeleteOutputPath(Dir, validStatsName);

            var argsStr =
                string.Format(
                    "/c=CreateInstances {0} /test={1} /valid={1} /cacheinst=- {2} " +
                    "/cifile={3} /cistatsfile={4} /citestfile={5} /citeststatsfile={6} /civalidfile={7} /civalidstatsfile={8}",
                    trainData, testData, adult.extraSettings,
                    outFile, statsFile, outTestFile, testStatsFile, outValidFile, validStatsFile);

            argsStr += " /writer TextInstanceWriter{/stats=+} /disableTracking=+";
            var args = new TLCArguments();

            Assert.IsTrue(CmdParser.ParseArguments(argsStr, args));

            RunExperiments.Run(args);

            CheckEquality(Dir, outName);
            CheckEquality(Dir, statsName);
            CheckEquality(Dir, outTestName);
            CheckEquality(Dir, testStatsName);
            CheckEquality(Dir, outValidName);
            CheckEquality(Dir, validStatsName);
            Done();
        }
Exemple #9
0
        public static TestDataset GetTestDS()
        {
            TestDataset ds = new TestDataset();


            for (int i = 0; i < 100; i++)
            {
                TestDataset.CustomersRow rw = ds.Customers.AddCustomersRow(
                    "KEY" + i,
                    "Company " + i,
                    "",
                    "",
                    "",
                    "",
                    "",
                    "",
                    "",
                    "",
                    "");

                for (int j = 0; j < 5; j++)
                {
                    ds.Orders.AddOrdersRow(
                        rw,
                        7,
                        DateTime.Now,
                        DateTime.Now.AddDays(3),
                        DateTime.Now.AddDays(1),
                        2,
                        1,
                        "",
                        "",
                        "",
                        "",
                        "",
                        "");
                }
            }

            return(ds);
        }
Exemple #10
0
 public void StoreTestDataset(TestDataset ds)
 {
 }
Exemple #11
0
        private static TrainingSessionResult Optimize(
            NeuralNetworkBase network,
            BatchesCollection miniBatches,
            int epochs, float dropout,
            [NotNull] WeightsUpdater updater,
            [CanBeNull] IProgress <BatchProgress> batchProgress,
            [CanBeNull] IProgress <TrainingProgressEventArgs> trainingProgress,
            [CanBeNull] ValidationDataset validationDataset,
            [CanBeNull] TestDataset testDataset,
            CancellationToken token)
        {
            // Setup
            DateTime startTime = DateTime.Now;
            List <DatasetEvaluationResult>
            validationReports = new List <DatasetEvaluationResult>(),
                testReports   = new List <DatasetEvaluationResult>();

            TrainingSessionResult PrepareResult(TrainingStopReason reason, int loops)
            {
                return(new TrainingSessionResult(reason, loops, DateTime.Now.Subtract(startTime).RoundToSeconds(), validationReports, testReports));
            }

            // Convergence manager for the validation dataset
            RelativeConvergence convergence = validationDataset == null
                ? null
                : new RelativeConvergence(validationDataset.Tolerance, validationDataset.EpochsInterval);

            // Optional batch monitor
            BatchProgressMonitor batchMonitor = batchProgress == null ? null : new BatchProgressMonitor(miniBatches.Count, batchProgress);

            // Create the training batches
            for (int i = 0; i < epochs; i++)
            {
                // Shuffle the training set
                miniBatches.CrossShuffle();

                // Gradient descent over the current batches
                BackpropagationInProgress = true;
                for (int j = 0; j < miniBatches.BatchesCount; j++)
                {
                    if (token.IsCancellationRequested)
                    {
                        BackpropagationInProgress = false;
                        return(PrepareResult(TrainingStopReason.TrainingCanceled, i));
                    }
                    network.Backpropagate(miniBatches.Batches[j], dropout, updater);
                    batchMonitor?.NotifyCompletedBatch(miniBatches.Batches[j].X.GetLength(0));
                }
                BackpropagationInProgress = false;
                batchMonitor?.Reset();
                if (network.IsInNumericOverflow)
                {
                    return(PrepareResult(TrainingStopReason.NumericOverflow, i));
                }

                // Check the training progress
                if (trainingProgress != null)
                {
                    (float cost, _, float accuracy) = network.Evaluate(miniBatches);
                    trainingProgress.Report(new TrainingProgressEventArgs(i + 1, cost, accuracy));
                }

                // Check the validation dataset
                if (convergence != null)
                {
                    (float cost, _, float accuracy) = network.Evaluate(validationDataset.Dataset);
                    validationReports.Add(new DatasetEvaluationResult(cost, accuracy));
                    convergence.Value = accuracy;
                    if (convergence.HasConverged)
                    {
                        return(PrepareResult(TrainingStopReason.EarlyStopping, i));
                    }
                }

                // Report progress if necessary
                if (testDataset != null)
                {
                    (float cost, _, float accuracy) = network.Evaluate(testDataset.Dataset);
                    testReports.Add(new DatasetEvaluationResult(cost, accuracy));
                    testDataset.ThreadSafeProgressCallback?.Report(new TrainingProgressEventArgs(i + 1, cost, accuracy));
                }
            }
            return(PrepareResult(TrainingStopReason.EpochsCompleted, epochs));
        }
Exemple #12
0
        public void CanLookupKeyFields()
        {
            var xmlKeyFields =
                new XmlKeyFields
            {
                DefaultKeyField     = "DEFAULT_ID",
                DataSourceKeyFields =
                    new[]
                {
                    new XmlDataSourceKeyFields
                    {
                        ModelName        = "MODEL1",
                        DefaultKeyField  = "MODEL1_ID",
                        DatasetKeyFields =
                            new[]
                        {
                            new XmlDatasetKeyField
                            {
                                DatasetName = "DATASET_1_1",
                                KeyField    = "MODEL1_DATASET1_ID"
                            },
                            new XmlDatasetKeyField
                            {
                                DatasetName = "DATASET_1_2"
                            }
                        }.ToList()
                    },
                    new XmlDataSourceKeyFields
                    {
                        ModelName        = "MODEL2",
                        DefaultKeyField  = "MODEL2_ID",
                        DatasetKeyFields =
                            new[]
                        {
                            new XmlDatasetKeyField
                            {
                                DatasetName = "DATASET_2_1",
                                KeyField    = "MODEL2_DATASET1_ID"
                            }
                        }.ToList()
                    }
                }.ToList()
            };

            var model1 = new TestModel("model1");             // case should not matter
            var model2 = new TestModel("model2");
            var model3 = new TestModel("model3");

            TestDataset dataset11 = model1.AddDataset(new TestDataset("dataset_1_1"));
            TestDataset dataset12 = model1.AddDataset(new TestDataset("dataset_1_2"));
            TestDataset dataset13 = model1.AddDataset(new TestDataset("dataset_1_3"));

            TestDataset dataset21 = model2.AddDataset(new TestDataset("dataset_2_1"));
            TestDataset dataset22 = model2.AddDataset(new TestDataset("dataset_2_2"));
            TestDataset dataset31 = model3.AddDataset(new TestDataset("dataset_3_1"));

            var lookup = new KeyFieldLookup(xmlKeyFields);

            // dataset configured with alternate key
            Assert.AreEqual("MODEL1_DATASET1_ID", lookup.GetKeyField(dataset11));

            // dataset configured with null (--> use OBJECTID)
            Assert.IsNull(lookup.GetKeyField(dataset12));

            // model configured, but not dataset --> model default
            Assert.AreEqual("MODEL1_ID", lookup.GetKeyField(dataset13));

            // dataset configured with alternate key
            Assert.AreEqual("MODEL2_DATASET1_ID", lookup.GetKeyField(dataset21));

            // model configured, but not dataset --> model default
            Assert.AreEqual("MODEL2_ID", lookup.GetKeyField(dataset22));

            // not configured --> global default
            Assert.AreEqual("DEFAULT_ID", lookup.GetKeyField(dataset31));
        }