Ejemplo n.º 1
0
        /// <summary>
        /// Builds the internal probabilistic cluster chain and makes the "One Takes All" group operable.
        /// </summary>
        /// <param name="readoutUnitsResultsCollection">The collection of the collections of all readout units composite results.</param>
        /// <param name="readoutUnitsIdealValuesCollection">The collection of the collections of all readout units ideal values.</param>
        /// <param name="filters">The feature filters to be used to denormalize output data.</param>
        /// <param name="rand">The random object to be used.</param>
        /// <param name="controller">The build process controller (optional).</param>
        public void Build(List <CompositeResult[]> readoutUnitsResultsCollection,
                          List <double[]> readoutUnitsIdealValuesCollection,
                          BinFeatureFilter[] filters,
                          Random rand,
                          TNRNetBuilder.BuildControllerDelegate controller = null
                          )
        {
            if (DecisionMethod != OneTakesAllDecisionMethod.ClusterChain)
            {
                throw new InvalidOperationException("Wrong call of the Build method.");
            }
            OneTakesAllClusterChainDecisionSettings decisionCfg = (OneTakesAllClusterChainDecisionSettings)_groupCfg.DecisionCfg;
            //Prepare the training data bundle for the cluster chain
            VectorBundle trainingDataBundle = new VectorBundle(readoutUnitsIdealValuesCollection.Count);

            for (int sampleIdx = 0; sampleIdx < readoutUnitsIdealValuesCollection.Count; sampleIdx++)
            {
                double[] inputVector  = CreateInputVector(readoutUnitsResultsCollection[sampleIdx]);
                double[] outputVector = CreateOutputVector(readoutUnitsIdealValuesCollection[sampleIdx], filters);
                trainingDataBundle.AddPair(inputVector, outputVector);
            }


            //Cluster chain builder
            TNRNetClusterChainBuilder builder = new TNRNetClusterChainBuilder(Name,
                                                                              decisionCfg.ClusterChainCfg,
                                                                              rand,
                                                                              controller
                                                                              );

            builder.ChainBuildProgressChanged += OnChainBuildProgressChanged;
            ProbabilisticClusterChain          = builder.Build(trainingDataBundle, filters);
            return;
        }
Ejemplo n.º 2
0
        /// <summary>
        /// Builds the inner cluster chain.
        /// </summary>
        /// <param name="dataBundle">The data to be used for training.</param>
        /// <param name="filter">The feature filter to be used to denormalize output.</param>
        /// <param name="rand">The random object to be used (optional).</param>
        /// <param name="controller">The build process controller (optional).</param>
        public void Build(VectorBundle dataBundle,
                          FeatureFilterBase filter,
                          Random rand = null,
                          TNRNetBuilder.BuildControllerDelegate controller = null
                          )
        {
            rand = rand ?? new Random(0);
            TNRNetClusterChainBuilder builder = new TNRNetClusterChainBuilder(Name,
                                                                              _clusterChainCfg,
                                                                              rand,
                                                                              controller
                                                                              );

            builder.ChainBuildProgressChanged += OnChainBuildProgressChanged;
            _clusterChain = builder.Build(dataBundle, new FeatureFilterBase[] { filter });
            return;
        }
Ejemplo n.º 3
0
        /// <summary>
        /// Trains the network cluster to perform classification task and then verifies its performance.
        /// </summary>
        /// <param name="name">The name of a classification task.</param>
        /// <param name="trainDataFile">The name of a csv datafile containing the training data.</param>
        /// <param name="verifyDataFile">The name of a csv datafile containing the verification data.</param>
        /// <param name="numOfClasses">The number of classes.</param>
        /// <param name="foldDataRatio">Specifies what part of training data is reserved for testing. It determines the size of data fold and also number of networks within the cluster.</param>
        private void PerformClassification(string name, string trainDataFile, string verifyDataFile, int numOfClasses, double foldDataRatio)
        {
            _log.Write($"{name} classification performed by the Probabilistic cluster chain ({numOfClasses.ToString(CultureInfo.InvariantCulture)} classes).");
            //Load csv data and create vector bundles
            _log.Write($"Loading {trainDataFile}...");
            CsvDataHolder trainCsvData = new CsvDataHolder(trainDataFile);
            VectorBundle  trainData    = VectorBundle.Load(trainCsvData, numOfClasses);

            _log.Write($"Loading {verifyDataFile}...");
            CsvDataHolder verifyCsvData = new CsvDataHolder(verifyDataFile);
            VectorBundle  verifyData    = VectorBundle.Load(verifyCsvData, numOfClasses);

            //Input data standardization
            //Allocation and preparation of the input feature filters
            FeatureFilterBase[] inputFeatureFilters = PrepareInputFeatureFilters(trainData);
            //Standardize training input data
            StandardizeInputVectors(trainData, inputFeatureFilters);
            //Standardize verification input data
            StandardizeInputVectors(verifyData, inputFeatureFilters);
            //Output data
            //Output data is already in the 0/1 form requested by the SoftMax activation so we don't
            //need to modify it. We only allocate the binary feature filters requested by the cluster chain builder.
            FeatureFilterBase[] outputFeatureFilters = new BinFeatureFilter[numOfClasses];
            for (int i = 0; i < numOfClasses; i++)
            {
                outputFeatureFilters[i] = new BinFeatureFilter(Interval.IntZP1);
            }
            //Cluster chain configuration (we will have two chained clusters)
            //Configuration of the first cluster in the chain
            //End-networks configuration for the first cluster in the chain. For every testing fold will be trained two end-networks with different structure.
            List <FeedForwardNetworkSettings> netCfgs1 = new List <FeedForwardNetworkSettings>
            {
                //The first FF network will have two hidden layers of 30 TanH activated neurons.
                //Output layer will have the SoftMax activation (it must be SoftMax because we will use the Probabilistic cluster).
                new FeedForwardNetworkSettings(new AFAnalogSoftMaxSettings(),
                                               new HiddenLayersSettings(new HiddenLayerSettings(30, new AFAnalogTanHSettings()),
                                                                        new HiddenLayerSettings(30, new AFAnalogTanHSettings())
                                                                        ),
                                               new RPropTrainerSettings(3, 200)
                                               ),
                //The second FF network will have two hidden layers of 30 LeakyReLU activated neurons.
                //Output layer will have the SoftMax activation (it must be SoftMax because we will use the Probabilistic cluster).
                new FeedForwardNetworkSettings(new AFAnalogSoftMaxSettings(),
                                               new HiddenLayersSettings(new HiddenLayerSettings(30, new AFAnalogLeakyReLUSettings()),
                                                                        new HiddenLayerSettings(30, new AFAnalogLeakyReLUSettings())
                                                                        ),
                                               new RPropTrainerSettings(3, 200)
                                               )
            };
            //The first probabilistic network cluster configuration instance
            TNRNetClusterProbabilisticSettings clusterCfg1 =
                new TNRNetClusterProbabilisticSettings(new TNRNetClusterProbabilisticNetworksSettings(netCfgs1),
                                                       new TNRNetClusterProbabilisticWeightsSettings()
                                                       );
            //Configuration of the second cluster in the chain
            //End-network configuration for the second cluster in the chain. For every testing fold will be trained one end-network.
            List <FeedForwardNetworkSettings> netCfgs2 = new List <FeedForwardNetworkSettings>
            {
                //FF network will have two hidden layers of 30 Elliot activated neurons.
                //Output layer will have the SoftMax activation (it must be SoftMax because we will use the Probabilistic cluster chain).
                new FeedForwardNetworkSettings(new AFAnalogSoftMaxSettings(),
                                               new HiddenLayersSettings(new HiddenLayerSettings(30, new AFAnalogElliotSettings()),
                                                                        new HiddenLayerSettings(30, new AFAnalogElliotSettings())
                                                                        ),
                                               new RPropTrainerSettings(3, 200)
                                               )
            };
            //The second probabilistic network cluster configuration instance
            TNRNetClusterProbabilisticSettings clusterCfg2 =
                new TNRNetClusterProbabilisticSettings(new TNRNetClusterProbabilisticNetworksSettings(netCfgs2),
                                                       new TNRNetClusterProbabilisticWeightsSettings()
                                                       );

            //Probabilistic network cluster chain configuration instance
            ITNRNetClusterChainSettings chainCfg =
                new TNRNetClusterChainProbabilisticSettings(new CrossvalidationSettings(foldDataRatio),
                                                            new TNRNetClustersProbabilisticSettings(clusterCfg1,
                                                                                                    clusterCfg2
                                                                                                    )
                                                            );

            _log.Write($"Cluster configuration xml:");
            _log.Write(chainCfg.GetXml(true).ToString());
            //Training
            _log.Write($"Cluster chain training on {trainDataFile}...");
            //An instance of network cluster chain builder.
            TNRNetClusterChainBuilder builder =
                new TNRNetClusterChainBuilder("Probabilistic Cluster Chain", chainCfg);

            //Register progress event handler
            builder.ChainBuildProgressChanged += OnClusterChainBuildProgressChanged;
            //Build the trained network cluster chain.
            TNRNetClusterChain trainedClusterChain = builder.Build(trainData, outputFeatureFilters);

            //Verification
            _log.Write(string.Empty);
            _log.Write(string.Empty);
            _log.Write($"Cluster chain verification on {verifyDataFile}...");
            _log.Write(string.Empty);
            int numOfErrors = 0;

            for (int i = 0; i < verifyData.InputVectorCollection.Count; i++)
            {
                double[] computed = trainedClusterChain.Compute(verifyData.InputVectorCollection[i], out _);
                //Cluster result
                int computedWinnerIdx = computed.MaxIdx();
                //Real result
                int realWinnerIdx = verifyData.OutputVectorCollection[i].MaxIdx();

                if (computedWinnerIdx != realWinnerIdx)
                {
                    ++numOfErrors;
                }
                _log.Write($"({i + 1}/{verifyData.InputVectorCollection.Count}) Errors: {numOfErrors}", true);
            }
            _log.Write(string.Empty);
            _log.Write($"Accuracy {(1d - (double)numOfErrors / (double)verifyData.InputVectorCollection.Count).ToString(CultureInfo.InvariantCulture)}");
            _log.Write(string.Empty);

            return;
        }