Esempio n. 1
0
        /// <summary>
        /// Builds the internal probabilistic cluster chain and makes the "One Takes All" group operable.
        /// </summary>
        /// <param name="readoutUnitsResultsCollection">The collection of the collections of all readout units composite results.</param>
        /// <param name="readoutUnitsIdealValuesCollection">The collection of the collections of all readout units ideal values.</param>
        /// <param name="filters">The feature filters to be used to denormalize output data.</param>
        /// <param name="rand">The random object to be used.</param>
        /// <param name="controller">The build process controller (optional).</param>
        public void Build(List <CompositeResult[]> readoutUnitsResultsCollection,
                          List <double[]> readoutUnitsIdealValuesCollection,
                          BinFeatureFilter[] filters,
                          Random rand,
                          TNRNetBuilder.BuildControllerDelegate controller = null
                          )
        {
            if (DecisionMethod != OneTakesAllDecisionMethod.ClusterChain)
            {
                throw new InvalidOperationException("Wrong call of the Build method.");
            }
            OneTakesAllClusterChainDecisionSettings decisionCfg = (OneTakesAllClusterChainDecisionSettings)_groupCfg.DecisionCfg;
            //Prepare the training data bundle for the cluster chain
            VectorBundle trainingDataBundle = new VectorBundle(readoutUnitsIdealValuesCollection.Count);

            for (int sampleIdx = 0; sampleIdx < readoutUnitsIdealValuesCollection.Count; sampleIdx++)
            {
                double[] inputVector  = CreateInputVector(readoutUnitsResultsCollection[sampleIdx]);
                double[] outputVector = CreateOutputVector(readoutUnitsIdealValuesCollection[sampleIdx], filters);
                trainingDataBundle.AddPair(inputVector, outputVector);
            }


            //Cluster chain builder
            TNRNetClusterChainBuilder builder = new TNRNetClusterChainBuilder(Name,
                                                                              decisionCfg.ClusterChainCfg,
                                                                              rand,
                                                                              controller
                                                                              );

            builder.ChainBuildProgressChanged += OnChainBuildProgressChanged;
            ProbabilisticClusterChain          = builder.Build(trainingDataBundle, filters);
            return;
        }
Esempio n. 2
0
 //Constructor
 /// <summary>
 /// Creates an initialized instance.
 /// </summary>
 /// <param name="chainName">The name of the cluster chain.</param>
 /// <param name="clusterChainCfg">The configuration of the cluster chain.</param>
 /// <param name="rand">The random generator to be used (optional).</param>
 /// <param name="controller">The network build process controller (optional).</param>
 public TNRNetClusterChainBuilder(string chainName,
                                  ITNRNetClusterChainSettings clusterChainCfg,
                                  Random rand = null,
                                  TNRNetBuilder.BuildControllerDelegate controller = null
                                  )
 {
     _chainName       = chainName;
     _clusterChainCfg = clusterChainCfg;
     _rand            = rand ?? new Random(0);
     _controller      = controller;
     ResetProgressTracking();
     return;
 }
Esempio n. 3
0
 //Constructor
 /// <summary>
 /// Creates an initialized instance.
 /// </summary>
 /// <param name="clusterName">The name of the cluster to be built.</param>
 /// <param name="crossvalidationCfg">The crossvalidation configuration.</param>
 /// <param name="clusterCfg">The configuration of the cluster to be built.</param>
 /// <param name="rand">The random generator to be used (optional).</param>
 /// <param name="controller">The network build process controller (optional).</param>
 public TNRNetClusterBuilder(string clusterName,
                             CrossvalidationSettings crossvalidationCfg,
                             ITNRNetClusterSettings clusterCfg,
                             Random rand = null,
                             TNRNetBuilder.BuildControllerDelegate controller = null
                             )
 {
     _clusterName        = clusterName;
     _crossvalidationCfg = crossvalidationCfg;
     _clusterCfg         = clusterCfg;
     _rand       = rand ?? new Random(0);
     _controller = controller;
     ResetProgressTracking();
     return;
 }
Esempio n. 4
0
        /// <summary>
        /// Builds the inner cluster chain.
        /// </summary>
        /// <param name="dataBundle">The data to be used for training.</param>
        /// <param name="filter">The feature filter to be used to denormalize output.</param>
        /// <param name="rand">The random object to be used (optional).</param>
        /// <param name="controller">The build process controller (optional).</param>
        public void Build(VectorBundle dataBundle,
                          FeatureFilterBase filter,
                          Random rand = null,
                          TNRNetBuilder.BuildControllerDelegate controller = null
                          )
        {
            rand = rand ?? new Random(0);
            TNRNetClusterChainBuilder builder = new TNRNetClusterChainBuilder(Name,
                                                                              _clusterChainCfg,
                                                                              rand,
                                                                              controller
                                                                              );

            builder.ChainBuildProgressChanged += OnChainBuildProgressChanged;
            _clusterChain = builder.Build(dataBundle, new FeatureFilterBase[] { filter });
            return;
        }
Esempio n. 5
0
        /// <summary>
        /// Performs the training of the state machine.
        /// </summary>
        /// <param name="trainingData">The training data bundle.</param>
        /// <param name="controller">The build process controller (optional).</param>
        /// <returns>The training results.</returns>
        public TrainingResults Train(VectorBundle trainingData, TNRNetBuilder.BuildControllerDelegate controller = null)
        {
            //StateMachine reset
            Reset();
            VectorBundle readoutTrainingData;

            NeuralPreprocessor.PreprocessingOverview preprocessingOverview = null;
            if (NP == null)
            {
                //Neural preprocessor is bypassed
                readoutTrainingData = trainingData;
            }
            else
            {
                //Neural preprocessing
                readoutTrainingData = NP.InitializeAndPreprocessBundle(trainingData, out preprocessingOverview);
            }
            //Training of the readout layer
            ReadoutLayer.RegressionOverview regressionOverview = RL.Build(readoutTrainingData, BuildPredictorsMapper(), controller, Config.RandomizerSeek);
            //Return the training results
            return(new TrainingResults(preprocessingOverview, regressionOverview));
        }
Esempio n. 6
0
        /// <summary>
        /// Builds trained readout layer.
        /// </summary>
        /// <param name="dataBundle">The data to be used for training.</param>
        /// <param name="predictorsMapper">The mapper of specific predictors to readout units (optional).</param>
        /// <param name="controller">The build process controller (optional).</param>
        /// <param name="randomizerSeek">Specifies the random number generator initial seek (optional). A value greater than or equal to 0 will always ensure the same initialization.</param>
        /// <returns>The results of training.</returns>
        public RegressionOverview Build(VectorBundle dataBundle,
                                        PredictorsMapper predictorsMapper = null,
                                        TNRNetBuilder.BuildControllerDelegate controller = null,
                                        int randomizerSeek = 0
                                        )
        {
            if (Trained)
            {
                throw new InvalidOperationException("Readout layer is already built.");
            }
            //Basic checks
            int numOfPredictors = dataBundle.InputVectorCollection[0].Length;
            int numOfOutputs    = dataBundle.OutputVectorCollection[0].Length;

            if (numOfPredictors == 0)
            {
                throw new InvalidOperationException($"Number of predictors must be greater than 0.");
            }
            if (numOfOutputs != ReadoutLayerCfg.ReadoutUnitsCfg.ReadoutUnitCfgCollection.Count)
            {
                throw new InvalidOperationException($"Incorrect length of output vectors.");
            }
            //Predictors mapper (specified or default)
            _predictorsMapper = predictorsMapper ?? new PredictorsMapper(numOfPredictors);
            //Allocation and preparation of feature filters
            //Predictors
            _predictorFeatureFilterCollection = new FeatureFilterBase[numOfPredictors];
            Parallel.For(0, _predictorFeatureFilterCollection.Length, nrmIdx =>
            {
                _predictorFeatureFilterCollection[nrmIdx] = new RealFeatureFilter(InternalDataRange, true, true);
                for (int pairIdx = 0; pairIdx < dataBundle.InputVectorCollection.Count; pairIdx++)
                {
                    //Adjust filter
                    _predictorFeatureFilterCollection[nrmIdx].Update(dataBundle.InputVectorCollection[pairIdx][nrmIdx]);
                }
            });
            //Output values
            _outputFeatureFilterCollection = new FeatureFilterBase[numOfOutputs];
            Parallel.For(0, _outputFeatureFilterCollection.Length, nrmIdx =>
            {
                _outputFeatureFilterCollection[nrmIdx] = FeatureFilterFactory.Create(InternalDataRange, ReadoutLayerCfg.ReadoutUnitsCfg.ReadoutUnitCfgCollection[nrmIdx].TaskCfg.FeatureFilterCfg);
                for (int pairIdx = 0; pairIdx < dataBundle.OutputVectorCollection.Count; pairIdx++)
                {
                    //Adjust output normalizer
                    _outputFeatureFilterCollection[nrmIdx].Update(dataBundle.OutputVectorCollection[pairIdx][nrmIdx]);
                }
            });
            //Data normalization
            //Allocation
            double[][] normalizedPredictorsCollection   = new double[dataBundle.InputVectorCollection.Count][];
            double[][] normalizedIdealOutputsCollection = new double[dataBundle.OutputVectorCollection.Count][];
            //Normalization
            Parallel.For(0, dataBundle.InputVectorCollection.Count, pairIdx =>
            {
                //Predictors
                double[] predictors = new double[numOfPredictors];
                for (int i = 0; i < numOfPredictors; i++)
                {
                    if (_predictorsMapper.PredictorGeneralSwitchCollection[i])
                    {
                        predictors[i] = _predictorFeatureFilterCollection[i].ApplyFilter(dataBundle.InputVectorCollection[pairIdx][i]);
                    }
                    else
                    {
                        predictors[i] = double.NaN;
                    }
                }
                normalizedPredictorsCollection[pairIdx] = predictors;
                //Outputs
                double[] outputs = new double[numOfOutputs];
                for (int i = 0; i < numOfOutputs; i++)
                {
                    outputs[i] = _outputFeatureFilterCollection[i].ApplyFilter(dataBundle.OutputVectorCollection[pairIdx][i]);
                }
                normalizedIdealOutputsCollection[pairIdx] = outputs;
            });

            //Random object initialization
            Random rand = (randomizerSeek < 0 ? new Random() : new Random(randomizerSeek));
            //Create shuffled copy of the data
            VectorBundle shuffledData = new VectorBundle(normalizedPredictorsCollection, normalizedIdealOutputsCollection);

            shuffledData.Shuffle(rand);

            //"One Takes All" groups input data space initialization
            List <CompositeResult[]> allReadoutUnitResults = new List <CompositeResult[]>(shuffledData.InputVectorCollection.Count);

            if (_oneTakesAllGroupCollection != null)
            {
                for (int i = 0; i < shuffledData.InputVectorCollection.Count; i++)
                {
                    allReadoutUnitResults.Add(new CompositeResult[ReadoutLayerCfg.ReadoutUnitsCfg.ReadoutUnitCfgCollection.Count]);
                }
            }

            ResetProgressTracking();
            //Building of readout units
            for (_buildReadoutUnitIdx = 0; _buildReadoutUnitIdx < ReadoutLayerCfg.ReadoutUnitsCfg.ReadoutUnitCfgCollection.Count; _buildReadoutUnitIdx++)
            {
                List <double[]> idealValueCollection = new List <double[]>(shuffledData.OutputVectorCollection.Count);
                //Transformation of ideal vectors to a single value vectors
                foreach (double[] idealVector in shuffledData.OutputVectorCollection)
                {
                    double[] value = new double[1];
                    value[0] = idealVector[_buildReadoutUnitIdx];
                    idealValueCollection.Add(value);
                }
                List <double[]> readoutUnitInputVectorCollection = _predictorsMapper.CreateVectorCollection(ReadoutLayerCfg.ReadoutUnitsCfg.ReadoutUnitCfgCollection[_buildReadoutUnitIdx].Name, shuffledData.InputVectorCollection);
                VectorBundle    readoutUnitDataBundle            = new VectorBundle(readoutUnitInputVectorCollection, idealValueCollection);
                _readoutUnitCollection[_buildReadoutUnitIdx].ReadoutUnitBuildProgressChanged += OnReadoutUnitBuildProgressChanged;
                _readoutUnitCollection[_buildReadoutUnitIdx].Build(readoutUnitDataBundle,
                                                                   _outputFeatureFilterCollection[_buildReadoutUnitIdx],
                                                                   rand,
                                                                   controller
                                                                   );
                //Add unit's all computed results into the input data for "One Takes All" groups
                if (_oneTakesAllGroupCollection != null)
                {
                    for (int sampleIdx = 0; sampleIdx < readoutUnitDataBundle.InputVectorCollection.Count; sampleIdx++)
                    {
                        allReadoutUnitResults[sampleIdx][_buildReadoutUnitIdx] = _readoutUnitCollection[_buildReadoutUnitIdx].Compute(readoutUnitDataBundle.InputVectorCollection[sampleIdx]);
                    }
                }
            }//unitIdx

            //One Takes All groups build
            if (_oneTakesAllGroupCollection != null)
            {
                foreach (OneTakesAllGroup group in _oneTakesAllGroupCollection)
                {
                    //Only the group having inner probabilistic cluster has to be built
                    if (group.DecisionMethod == OneTakesAllGroup.OneTakesAllDecisionMethod.ClusterChain)
                    {
                        BinFeatureFilter[] groupFilters = new BinFeatureFilter[group.NumOfMemberClasses];
                        for (int i = 0; i < group.NumOfMemberClasses; i++)
                        {
                            groupFilters[i] = (BinFeatureFilter)_outputFeatureFilterCollection[group.MemberReadoutUnitIndexCollection[i]];
                        }
                        ++_buildOTAGroupIdx;
                        group.OTAGBuildProgressChanged += OnOTAGBuildProgressChanged;
                        group.Build(allReadoutUnitResults, shuffledData.OutputVectorCollection, groupFilters, rand, controller);
                    }
                }
            }

            //Readout layer is trained and ready
            Trained = true;
            return(new RegressionOverview(ReadoutUnitErrStatCollection));
        }