public void RunPipelineTest()
        {
            // Creates learning api object
            LearningApi api = new LearningApi(TestHelpers.GetDescriptor());

            // Initialize data provider
            api.UseCsvDataProvider(m_iris_data_path, ',', 1);

            // Use mapper for data, which will extract (map) required columns
            api.UseDefaultDataMapper();

            // Use MinMax data normalizer
            //api.UseMinMaxNormalizer(m_stats.Select(x => x.Min).ToArray(), m_stats.Select(x => x.Max).ToArray());

            // We could also use some other normalizer like Gaus data normalizer
            //api.UseGaussNormalizer(m_stats.Select(x => x.Mean).ToArray(), m_stats.Select(x => x.StDev).ToArray());

            // Prepares the ML Algoritm and setup parameters
            api.UseBackPropagation(1, 0.2, 1.0, null);

            //start process of learning
            api.Run();

            //  api.Train();
            //   api.TrainSample();

            IScore status = api.GetScore();

            //api.Train(vector)
            return;
        }
        public bool InitNeuralBackPropagationTest()
        {
            //  InitIrisMapperInJsonFormat_helper();

            // Creates learning api object
            LearningApi api = new LearningApi(TestHelpers.GetDescriptor());

            // Initialize data provider
            api.UseCsvDataProvider(m_IrisDataPath, ',', false, 1);

            // Use mapper for data, which will extract (map) required columns
            api.UseDefaultDataMapper();

            // Use MinMax data normalizer
            //api.UseMinMaxNormalizer();

            // We could also use some other normalizer like Gaus data normalizer
            //api.UseGaussNormalizer(m_stats.Select(x => x.Mean).ToArray(), m_stats.Select(x => x.StDev).ToArray());

            // Prepares the ML Algoritm and setup parameters
            api.UseBackPropagation(1, 0.2, 1.0, null);

            api.Run();

            IScore status = api.GetScore();

            //api.Train(vector)
            return(true);
        }
Beispiel #3
0
        public void RBMDataSample1Test()
        {
            var dataPath = System.IO.Path.Combine(Directory.GetCurrentDirectory(), @"RestrictedBolzmannMachine2\Data\rbm_sample1.csv");

            LearningApi api = new LearningApi(this.getDescriptorForRbm_sample1());

            // Initialize data provider
            api.UseCsvDataProvider(dataPath, ',', false, 1);
            api.UseDefaultDataMapper();
            api.UseRbm(0.2, 1000, 6, 3);

            RbmResult score = api.Run() as RbmResult;

            double[][] testData = new double[4][];

            testData[0] = new double[] { 1, 1, 0, 0, 0, 0 };
            testData[1] = new double[] { 0, 0, 0, 0, 1, 1 };
            testData[2] = new double[] { 0, 1, 0, 0, 0, 0 };
            testData[3] = new double[] { 0, 0, 0, 0, 1, 1 };

            var result = api.Algorithm.Predict(testData, api.Context);

            // NOT FINISHED.
            //Assert.True(result[0] == 1);
            //Assert.True(result[1] == 0);
            //Assert.True(result[2] == 0);
            //Assert.True(result[3] == 0);
            //Assert.True(result[4] == 1);
            //Assert.True(result[5] == 0);
        }
Beispiel #4
0
        public void calculateCorrelation_test1()
        {
            //
            LearningApi api = new LearningApi(null);

            // Initialize data provider
            api.UseCsvDataProvider(@"CorrelationMatrix/corellation_data.csv", ',', true, 0);


            //Custom action of dataset
            api.UseActionModule <object[][], double[][]>((input, ctx) =>
            {
                return(toColumnVector(input));
            });


            // api.UseMinMaxNormalizer();


            var data = api.Run() as double[][];

            var prov = api.GetModule <CsvDataProvider>("CsvDataProvider");

            var strData = new List <string>();
            var hed     = prov.Header.ToList();

            hed.Insert(0, "");
            strData.Add(string.Join(",", hed.ToArray()));
            for (int i = 0; i < data.Length; i++)
            {
                var lst = new List <string>();
                lst.Add(prov.Header[i]);
                for (int k = 0; k < i; k++)
                {
                    lst.Add(" ");
                }

                for (int j = i; j < data.Length; j++)
                {
                    var corValue = data[i].CorrCoeffOf(data[j]);
                    if (double.IsNaN(corValue))
                    {
                        continue;
                    }
                    lst.Add(corValue.ToString("n5", CultureInfo.InvariantCulture));
                }


                strData.Add(string.Join(",", lst));
            }

            Assert.True("Col1,1.00000,0.16892,0.99111,0.75077,-0.82354,-0.85164" == strData[1]);

            System.IO.File.WriteAllLines(@"CorrelationMatrix/strCorrlation.txt", strData);
            //
            return;
        }
Beispiel #5
0
        private object[][] getData(int cnt)
        {
            string filePath   = $"{Directory.GetCurrentDirectory()}\\DataSet\\Book2.csv";
            var    isris_path = System.IO.Path.Combine(Directory.GetCurrentDirectory(), filePath);

            LearningApi api = new LearningApi(loadMetaData1());

            api.UseCsvDataProvider(isris_path, ',', 0);

            return(api.Run() as object[][]);
        }
        public void LogisticRegression_Test_Real_Example()
        {
            string m_binary_data_path = @"SampleData\binary\admit_binary.csv";

            var binary_path = System.IO.Path.Combine(Directory.GetCurrentDirectory(), m_binary_data_path);

            LearningApi api = new LearningApi(loadMetaData1());

            api.UseCsvDataProvider(binary_path, ',', false, 1);

            // Use mapper for data, which will extract (map) required columns
            api.UseDefaultDataMapper();

            api.UseMinMaxNormalizer();

            //run logistic regression for 10 iteration with learningRate=0.15
            api.UseLogisticRegression(0.00012, 200);


            var score = api.Run();

            ///**************PREDICTION AFTER MODEL IS CREATED*********////
            /////define data for testing (prediction)
            LearningApi apiPrediction = new LearningApi(loadMetaData1());

            //Real dataset must be defined as object type, because data can be numeric, binary and classification
            apiPrediction.UseActionModule <object[][], object[][]>((input, ctx) =>
            {
                var data = new object[5][]
                {
                    new object[] { 660, 3.88, 2, 1 },
                    new object[] { 580, 3.36, 2, 0 },
                    new object[] { 640, 3.17, 2, 0 },
                    new object[] { 640, 3.51, 2, 0 },
                    new object[] { 800, 3.05, 2, 1 },
                };
                return(data);
            });

            // Use mapper for data, which will extract (map) required columns
            apiPrediction.UseDefaultDataMapper();
            apiPrediction.UseMinMaxNormalizer();
            var testData = apiPrediction.Run();

            //use previous trained model
            var result = api.Algorithm.Predict(testData as double[][], api.Context) as LogisticRegressionResult;

            //
            Assert.Equal(Math.Round(result.PredictedValues[0], 0), 0);
            Assert.Equal(Math.Round(result.PredictedValues[1], 0), 0);
            Assert.Equal(Math.Round(result.PredictedValues[2], 0), 0);
            Assert.Equal(Math.Round(result.PredictedValues[3], 0), 0);
            Assert.Equal(Math.Round(result.PredictedValues[3], 0), 0);
        }
Beispiel #7
0
        private object[][] getRealDataSample(string filePath)
        {
            //
            //iris data file
            var isris_path = System.IO.Path.Combine(Directory.GetCurrentDirectory(), filePath);

            LearningApi api = new LearningApi(loadMetaData1());

            api.UseCsvDataProvider(isris_path, ',', 0);

            return(api.Run() as object[][]);
        }
Beispiel #8
0
        public void movieRecommendationTestCRbm(int iterations, double learningRate, int visNodes, int hidNodes)
        {
            Debug.WriteLine($"{iterations}-{visNodes}-{hidNodes}");

            LearningApi api = new LearningApi(getDescriptorForRbm(3898));

            // Initialize data provider
            api.UseCsvDataProvider(Path.Combine(Directory.GetCurrentDirectory(), @"RestrictedBolzmannMachine2\Data\movieDatasetTrain.csv"), ',', false, 0);
            api.UseDefaultDataMapper();
            double[] featureVector = new double[] { 0, 0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.45, 0.5, 0.55, 0.6, 0.65, 0.7, 0.75, 0.8, 0.85 };
            api.UseCRbm(featureVector, learningRate, iterations, visNodes, hidNodes);

            Stopwatch watch = new Stopwatch();

            watch.Start();
            RbmScore score = api.Run() as RbmScore;

            watch.Stop();

            var hiddenNodes  = score.HiddenValues;
            var hiddenWeight = score.HiddenBisases;


            double[] learnedFeatures = new double[hidNodes];
            double[] hiddenWeights   = new double[hidNodes];
            for (int i = 0; i < hidNodes; i++)
            {
                learnedFeatures[i] = hiddenNodes[i];
                hiddenWeights[i]   = hiddenWeight[i];
            }

            StreamWriter tw = new StreamWriter($"PredictedDigit_I{iterations}_V{visNodes}_H{hidNodes}_learnedbias.txt");

            foreach (var item in score.HiddenBisases)
            {
                tw.WriteLine(item);
            }
            tw.Close();

            var testData = ReadData(Path.Combine(Directory.GetCurrentDirectory(), @"RestrictedBolzmannMachine2\Data\movieDatasetTest.csv"));

            var result = api.Algorithm.Predict(testData, api.Context);

            var predictedData = ((RbmResult)result).VisibleNodesPredictions;

            var predictedHiddenNodes = ((RbmResult)result).HiddenNodesPredictions;

            var acc = testData.GetHammingDistance(predictedData);

            WriteDeepResult(iterations, new int[] { visNodes, hidNodes }, learningRate, acc, watch.ElapsedMilliseconds * 1000, predictedHiddenNodes);

            WriteOutputMatrix(iterations, new int[] { visNodes, hidNodes }, learningRate, predictedData, testData);
        }
Beispiel #9
0
        public void smileyTestCRbm(int iterations, double learningRate, int visNodes, int hidNodes)
        {
            LearningApi api = new LearningApi(getDescriptorForRbm(1600));

            // Initialize data provider
            api.UseCsvDataProvider(Path.Combine(Directory.GetCurrentDirectory(), @"RestrictedBolzmannMachine2\Data\Smiley.csv"), ',', false, 0);
            api.UseDefaultDataMapper();
            double[] featureVector = new double[] { 0.1, 0.2 };
            api.UseCRbm(featureVector, learningRate, iterations, visNodes, hidNodes);

            Stopwatch watch = new Stopwatch();

            watch.Start();
            RbmScore score = api.Run() as RbmScore;

            watch.Stop();

            var hiddenNodes  = score.HiddenValues;
            var hiddenWeight = score.HiddenBisases;


            double[] learnedFeatures = new double[hidNodes];
            double[] hiddenWeights   = new double[hidNodes];
            for (int i = 0; i < hidNodes; i++)
            {
                learnedFeatures[i] = hiddenNodes[i];
                hiddenWeights[i]   = hiddenWeight[i];
            }

            var testData = ReadData(Path.Combine(Directory.GetCurrentDirectory(), @"RestrictedBolzmannMachine2\Data\SmileyTest.csv"));

            var result = api.Algorithm.Predict(testData, api.Context);

            var predictedData = ((RbmResult)result).VisibleNodesPredictions;

            var predictedHiddenNodes = ((RbmResult)result).HiddenNodesPredictions;

            var acc = testData.GetHammingDistance(predictedData);

            var ValTest  = calcDelta(predictedData, testData);
            var lossTest = ValTest / (visNodes);

            Debug.WriteLine($"lossTest: {lossTest}");

            WriteDeepResult(iterations, new int[] { visNodes, hidNodes }, acc, watch.ElapsedMilliseconds * 1000, predictedHiddenNodes);

            WriteOutputMatrix(iterations, new int[] { visNodes, hidNodes }, predictedData, testData);
        }
Beispiel #10
0
        public void CRbm_ClassifierTest()
        {
            var dataPath = System.IO.Path.Combine(Directory.GetCurrentDirectory(), @"RestrictedBolzmannMachine2\Data\rbm_twoclass_sample.csv");

            LearningApi api = new LearningApi(this.getDescriptorForRbmTwoClassesClassifier());

            // Initialize data provider
            api.UseCsvDataProvider(dataPath, ';', false, 1);
            api.UseDefaultDataMapper();
            double[] featureVector = new double[] { 0.1, 0.2, 0.3 };
            api.UseCRbm(featureVector, 0.01, 1000, 10, 2);
            RbmResult score = api.Run() as RbmResult;

            double[][] testData = new double[5][];

            //
            // This test data contains two patterns. One is grouped at left and one at almost right.
            testData[0] = new double[] { 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
            testData[1] = new double[] { 1, 0, 1, 0, 0, 0, 0, 0, 0, 0 };
            testData[2] = new double[] { 0, 0, 0, 0, 0, 1, 1, 1, 0, 0 };
            testData[3] = new double[] { 0, 0, 0, 0, 0, 1, 0, 1, 0, 0 };

            // This will be classified as third class.
            testData[4] = new double[] { 1, 1, 1, 0, 0, 1, 1, 1, 0, 0 };

            var result = api.Algorithm.Predict(testData, api.Context) as RbmResult;

            //
            // 2 * BIT1 + BIT2 of [0] and [1] should be same.
            // We don't know how RBM will classiffy data. We only expect that
            // same or similar pattern of data will be assigned to the same class.
            // Note, we have here two classes (two hiddne nodes).
            // First and second data sample are of same class. Third and fourth are also of same class.

            // Here we check first classs.
            Assert.True(2 * result.HiddenNodesPredictions[0][0] + result.HiddenNodesPredictions[0][1] ==
                        2 * result.HiddenNodesPredictions[1][0] + result.HiddenNodesPredictions[1][1]);

            // Here is test for second class.
            Assert.True(2 * result.HiddenNodesPredictions[2][0] + result.HiddenNodesPredictions[2][1] ==
                        2 * result.HiddenNodesPredictions[3][0] + result.HiddenNodesPredictions[3][1]);

            printVector("Weights", result.Weights);
        }
        public void CSVDataProviderTest_SecomData()
        {
            //
            //iris data file
            var isris_path = System.IO.Path.Combine(Directory.GetCurrentDirectory(), m_secom_data_path);

            LearningApi api = new LearningApi(TestHelpers.GetDescriptor(m_secom_data_mapper_path));

            api.UseCsvDataProvider(isris_path, ',', false, 1);

            var result = api.Run() as object[][];

            //get expected result
            var expected = GetReal_Secom_DataSet();

            for (int i = 0; i < result.Length; i++)
            {
                for (int j = 0; j < result[0].Length; j++)
                {
                    var col = api.Context.DataDescriptor.Features[j];
                    if (col.Type == ColumnType.STRING)
                    {
                        Assert.Equal(result[i][j], expected[i][j]);
                    }
                    else if (col.Type == ColumnType.NUMERIC)//numeric column
                    {
                        //var val1 = double.Parse(result[i][j].ToString());
                        //var val2 = double.Parse(expected[i][j].ToString());

                        Assert.Equal(result[i][j], expected[i][j]);
                    }
                    else if (col.Type == ColumnType.BINARY)//binary column
                    {
                        Assert.Equal(result[i][j].ToString(), expected[i][j].ToString());
                    }
                    else if (col.Type == ColumnType.CLASS)//class column
                    {
                        Assert.Equal(result[i][j].ToString(), expected[i][j].ToString());
                    }
                }
            }

            return;
        }
        public void CSVDataProviderTest_IrisData()
        {
            //
            //iris data file
            var isris_path = System.IO.Path.Combine(Directory.GetCurrentDirectory(), m_iris_data_path);

            LearningApi api = new LearningApi(TestHelpers.GetDescriptor());

            api.UseCsvDataProvider(isris_path, ',', false, 1);

            var result = api.Run() as object[][];

            //get expected result
            var expected = GetReal_Iris_DataSet();

            for (int i = 0; i < result.Length; i++)
            {
                for (int j = 0; j < result[0].Length; j++)
                {
                    var col = api.Context.DataDescriptor.Features[j];
                    if (col.Type == ColumnType.STRING)
                    {
                        continue;
                    }
                    else if (col.Type == ColumnType.NUMERIC)//numeric column
                    {
                        var val1 = double.Parse((string)result[i][j], System.Globalization.NumberStyles.Any, CultureInfo.InvariantCulture);
                        var val2 = Convert.ToDouble(expected[i][j], CultureInfo.InvariantCulture);

                        Assert.Equal(val1, val2);
                    }
                    else if (col.Type == ColumnType.BINARY)//binary column
                    {
                        Assert.Equal(result[i][j].ToString(), expected[i][j].ToString());
                    }
                    else if (col.Type == ColumnType.CLASS)//class column
                    {
                        Assert.Equal(result[i][j].ToString(), expected[i][j].ToString());
                    }
                }
            }

            return;
        }
        public void DigitRecognitionDeepTest(int iterations, double learningRate, int[] layers)
        {
            Debug.WriteLine($"{iterations}-{String.Join("", layers)}");

            LearningApi api = new LearningApi(getDescriptorForRbmTwoClassesClassifier(4096));

            // Initialize data provider
            // TODO: Describe Digit Dataset.
            api.UseCsvDataProvider(Path.Combine(Directory.GetCurrentDirectory(), @"RestrictedBolzmannMachine2\Data\DigitDataset.csv"), ',', false, 0);
            api.UseDefaultDataMapper();

            api.UseDeepRbm(learningRate, iterations, layers);

            Stopwatch watch = new Stopwatch();

            watch.Start();
            RbmDeepScore score = api.Run() as RbmDeepScore;

            watch.Stop();

            var testData = RbmHandwrittenDigitUnitTests.ReadData(Path.Combine(Directory.GetCurrentDirectory(), @"RestrictedBolzmannMachine2\Data\predictiondigitdata.csv"));

            var result               = api.Algorithm.Predict(testData, api.Context) as RbmDeepResult;
            var accList              = new double[result.Results.Count];
            var predictions          = new double[result.Results.Count][];
            var predictedHiddenNodes = new double[result.Results.Count][];
            var Time = watch.ElapsedMilliseconds / 1000;

            int i = 0;

            foreach (var item in result.Results)
            {
                predictions[i]          = item.First().VisibleNodesPredictions;
                predictedHiddenNodes[i] = item.Last().HiddenNodesPredictions;
                accList[i] = testData[i].GetHammingDistance(predictions[i]);
                i++;
            }

            RbmHandwrittenDigitUnitTests.WriteDeepResult(iterations, layers, accList, Time * 1000, predictedHiddenNodes);
            /// write predicted hidden nodes.......
            RbmHandwrittenDigitUnitTests.WriteOutputMatrix(iterations, layers, predictions, testData);
        }
Beispiel #14
0
        public void smileyTestDeepRbm(int iterations, double learningRate, int[] layers)
        {
            LearningApi api = new LearningApi(getDescriptorForRbm(1600));

            // Initialize data provider
            api.UseCsvDataProvider(Path.Combine(Directory.GetCurrentDirectory(), @"RestrictedBolzmannMachine2\Data\Smiley.csv"), ',', false, 0);
            api.UseDefaultDataMapper();
            api.UseDeepRbm(learningRate, iterations, layers);

            Stopwatch watch = new Stopwatch();

            watch.Start();
            RbmDeepScore score = api.Run() as RbmDeepScore;

            watch.Stop();

            var testData = ReadData(Path.Combine(Directory.GetCurrentDirectory(), @"RestrictedBolzmannMachine2\Data\SmileyTest.csv"));

            var result               = api.Algorithm.Predict(testData, api.Context) as RbmDeepResult;
            var accList              = new double[result.Results.Count];
            var predictions          = new double[result.Results.Count][];
            var predictedHiddenNodes = new double[result.Results.Count][];
            var Time = watch.ElapsedMilliseconds / 1000;

            int i = 0;

            foreach (var item in result.Results)
            {
                predictions[i]          = item.First().VisibleNodesPredictions;
                predictedHiddenNodes[i] = item.Last().HiddenNodesPredictions;
                accList[i] = testData[i].GetHammingDistance(predictions[i]);
                i++;
            }
            var ValTest  = calcDelta(predictions, testData);
            var lossTest = ValTest / (layers.First());

            Debug.WriteLine($"lossTest: {lossTest}");

            WriteDeepResult(iterations, layers, accList, Time / 60.0, predictedHiddenNodes);
            /// write predicted hidden nodes.......
            WriteOutputMatrix(iterations, layers, predictions, testData);
        }
        public void Save_Test()
        {
            // Creates learning api object
            LearningApi api = new LearningApi(TestHelpers.GetDescriptor());

            // Initialize data provider
            api.UseCsvDataProvider(m_IrisDataPath, ',', false, 1);

            // Use mapper for data, which will extract (map) required columns
            api.UseDefaultDataMapper();

            // Prepares the ML Algorithm and setup parameters
            api.UseBackPropagation(1, 0.2, 1.0, null);

            api.Save("model1");

            var loadedApi = LearningApi.Load("model1");

            Assert.True(((BackPropagationNetwork)loadedApi.Algorithm).learningRate == ((BackPropagationNetwork)api.Algorithm).learningRate);
        }
        public void Rbm_ClassifierTest2()
        {
            var dataPath = System.IO.Path.Combine(Directory.GetCurrentDirectory(), @"RestrictedBolzmannMachine2\Data\rbm_sample2.csv");

            LearningApi api = new LearningApi(this.getDescriptorForRbmTwoClassesClassifier(21));

            // Initialize data provider
            api.UseCsvDataProvider(dataPath, ',', false, 1);
            api.UseDefaultDataMapper();
            api.UseDeepRbm(0.2, 10000, new int[] { 21, 9, 6, 2 });

            RbmResult score = api.Run() as RbmResult;

            var expectedResults = new Dictionary <int, List <double[]> >();

            // All test data, which belong to the sam class.
            List <double[]> testListClass1 = new List <double[]>();
            List <double[]> testListClass2 = new List <double[]>();

            //
            // This test data contains two patterns. One is grouped at left and one at almost right.
            // testListClass1 contains class 1
            // testListClass2 contains class 2
            testListClass1.Add(new double[] { 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 });
            testListClass1.Add(new double[] { 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 });
            testListClass1.Add(new double[] { 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 });

            testListClass2.Add(new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0 });
            testListClass2.Add(new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0 });
            testListClass2.Add(new double[] { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 });

            expectedResults.Add(1, testListClass1);
            expectedResults.Add(2, testListClass2);

            validateClassificationResult(api, expectedResults);
        }
        public void Rbm_ClassifierTest()
        {
            var dataPath = System.IO.Path.Combine(Directory.GetCurrentDirectory(), @"RestrictedBolzmannMachine2\Data\rbm_twoclass_sample.csv");

            LearningApi api = new LearningApi(this.getDescriptorForRbmTwoClassesClassifier(10));

            // Initialize data provider
            api.UseCsvDataProvider(dataPath, ';', false, 1);
            api.UseDefaultDataMapper();
            api.UseDeepRbm(0.2, 1000, new int[] { 10, 2 });

            RbmResult score = api.Run() as RbmResult;

            double[][] trainData = new double[6][];

            // All test data, which belong to the sam class.
            List <double[]> testListClass1 = new List <double[]>();
            List <double[]> testListClass2 = new List <double[]>();

            //
            // This test data contains two patterns. One is grouped at left and one at almost right.
            trainData[0] = new double[] { 1, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
            testListClass1.Add(trainData[0]);

            trainData[1] = new double[] { 1, 0, 1, 0, 0, 0, 0, 0, 0, 0 };
            testListClass1.Add(trainData[1]);

            trainData[2] = new double[] { 0, 1, 1, 0, 0, 0, 0, 0, 0, 0 };
            testListClass1.Add(trainData[2]);


            trainData[3] = new double[] { 0, 0, 0, 0, 0, 1, 1, 1, 0, 0 };
            testListClass2.Add(trainData[3]);

            trainData[4] = new double[] { 0, 0, 0, 0, 0, 1, 0, 1, 0, 0 };
            testListClass2.Add(trainData[4]);

            trainData[5] = new double[] { 0, 0, 0, 0, 0, 1, 1, 0, 0, 0 };
            testListClass2.Add(trainData[5]);


            // This will be classified as third class.
            //testData[4] = new double[] { 1, 1, 1, 0, 0, 1, 1, 1, 0, 0 };

            RbmDeepResult result = api.Algorithm.Predict(trainData, api.Context) as RbmDeepResult;

            var expectedResults = new Dictionary <int, List <double[]> >();

            expectedResults.Add(1, testListClass1);
            expectedResults.Add(2, testListClass2);

            validateClassificationResult(api, expectedResults);

            //
            // 2 * BIT1 + BIT2 of [0] and [1] should be same.
            // We don't know how RBM will classiffy data. We only expect that
            // same or similar pattern of data will be assigned to the same class.
            // Note, we have here two classes (two hiddne nodes).
            // First and second data sample are of the same class.
            // Third and fourth are also of same class. See data.

            //// Here we check first classs.
            //Assert.True(result.Results[0].ToArray()[0].HiddenNodesPredictions[0] == result.Results[1].ToArray()[0].HiddenNodesPredictions[0] &&
            //    result.Results[0].ToArray()[0].HiddenNodesPredictions[1] == result.Results[1].ToArray()[0].HiddenNodesPredictions[1]);

            //// Here is test for second class.
            //Assert.True(result.Results[2].ToArray()[0].HiddenNodesPredictions[0] == result.Results[3].ToArray()[0].HiddenNodesPredictions[0] &&
            //    result.Results[2].ToArray()[0].HiddenNodesPredictions[1] == result.Results[3].ToArray()[0].HiddenNodesPredictions[1]);
        }