//Constructor /// <summary> /// Creates an initialized instance. /// </summary> /// <param name="name">The name of the input field.</param> /// <param name="idx">The zero-based index of the input field among other input fields.</param> /// <param name="coordinates">The coordinates of input neurons in 3D space.</param> /// <param name="dataWorkingRange">The output range of the input data.</param> /// <param name="featureFilterCfg">The configuration of the feature filter.</param> /// <param name="spikesEncodingCfg">The configuration of the spikes coder.</param> /// <param name="routeToReadout">Specifies whether to route the input field values to readout layer.</param> /// <param name="inputNeuronsStartIdx">The zero-based index of the first input neuron of this field among all other input neurons.</param> public InputField(string name, int idx, int[] coordinates, Interval dataWorkingRange, IFeatureFilterSettings featureFilterCfg, InputSpikesCoderSettings spikesEncodingCfg, bool routeToReadout, int inputNeuronsStartIdx ) { Name = name; Idx = idx; RouteToReadout = routeToReadout; _featureFilter = FeatureFilterFactory.Create(dataWorkingRange, featureFilterCfg); _iAnalogStimuli = 0; _currentDataIdx = 0; //Spikes encoder _spikesEncoder = new InputSpikesCoder(spikesEncodingCfg); //Analog neuron int verticalCycles = _spikesEncoder.Regime == InputEncoder.InputSpikesCoding.Vertical ? _spikesEncoder.LargestComponentLength : 1; AnalogNeuron = new AnalogInputNeuron(new NeuronLocation(InputEncoder.ReservoirID, inputNeuronsStartIdx, InputEncoder.PoolID, inputNeuronsStartIdx, idx, coordinates[0], coordinates[1], coordinates[2]), verticalCycles); ++inputNeuronsStartIdx; //Spiking neurons int spikingPopulationSize; if (_spikesEncoder.Regime == InputEncoder.InputSpikesCoding.Horizontal) { //Population encoding spikingPopulationSize = _spikesEncoder.AllSpikesFlatCollection.Length; } else if (_spikesEncoder.Regime == InputEncoder.InputSpikesCoding.Vertical) { //Spike-train encoding spikingPopulationSize = _spikesEncoder.ComponentSpikesCollection.Length; } else { //Forbidden encoding spikingPopulationSize = 0; } SpikingNeuronCollection = new SpikingInputNeuron[spikingPopulationSize]; for (int i = 0; i < SpikingNeuronCollection.Length; i++) { SpikingNeuronCollection[i] = new SpikingInputNeuron(new NeuronLocation(InputEncoder.ReservoirID, inputNeuronsStartIdx, InputEncoder.PoolID, inputNeuronsStartIdx, idx, coordinates[0], coordinates[1], coordinates[2])); ++inputNeuronsStartIdx; } return; }
/// <summary> /// Initiates collection of preprocessor's feature filters /// </summary> /// <param name="inputVectorCollection">Collection of input vectors</param> private void InitializeFeatureFilters(List <double[]> inputVectorCollection) { //Instantiate filters _featureFilterCollection = new BaseFeatureFilter[_settings.InputConfig.ExternalFieldCollection.Count]; Parallel.For(0, _featureFilterCollection.Length, i => { _featureFilterCollection[i] = FeatureFilterFactory.Create(_dataRange, _settings.InputConfig.ExternalFieldCollection[i].FeatureFilterCfg); //Update filter foreach (double[] vector in inputVectorCollection) { _featureFilterCollection[i].Update(vector[i]); } }); return; }
/// <summary> /// Initiates collection of preprocessor's feature filters /// </summary> /// <param name="inputPatternCollection">Collection of input patterns</param> private void InitializeFeatureFilters(List <List <double[]> > inputPatternCollection) { //Instantiate and adjust feature filters _featureFilterCollection = new BaseFeatureFilter[_settings.InputConfig.ExternalFieldCollection.Count]; Parallel.For(0, _settings.InputConfig.ExternalFieldCollection.Count, i => { _featureFilterCollection[i] = FeatureFilterFactory.Create(_dataRange, _settings.InputConfig.ExternalFieldCollection[i].FeatureFilterCfg); foreach (List <double[]> pattern in inputPatternCollection) { foreach (double[] vector in pattern) { _featureFilterCollection[i].Update(vector[i]); } } }); return; }
//Constructor /// <summary> /// Creates an initialized instance /// </summary> /// <param name="name">Name of the input field</param> /// <param name="idx">Index of the input field</param> /// <param name="coordinates">Input coordinates (entry point)</param> /// <param name="dataWorkingRange">Input data range</param> /// <param name="featureFilterCfg">Feature filter configuration</param> /// <param name="spikeCodeCfg">Configuration of the input spike code</param> /// <param name="routeToReadout">Specifies whether to route values as the additional predictors to readout</param> /// <param name="inputNeuronsStartIdx">Index of the first input neuron of this unit among all input neurons</param> public InputField(string name, int idx, int[] coordinates, Interval dataWorkingRange, IFeatureFilterSettings featureFilterCfg, SpikeCodeSettings spikeCodeCfg, bool routeToReadout, int inputNeuronsStartIdx ) { Name = name; Idx = idx; RouteToReadout = routeToReadout; _featureFilter = FeatureFilterFactory.Create(dataWorkingRange, featureFilterCfg); //Analog neuron AnalogNeuron = new AnalogInputNeuron(new NeuronLocation(InputEncoder.ReservoirID, inputNeuronsStartIdx, InputEncoder.PoolID, inputNeuronsStartIdx, idx, coordinates[0], coordinates[1], coordinates[2])); ++inputNeuronsStartIdx; //Spiking neurons _realSpikeCode = null; int populationSize = -1; switch (_featureFilter.Type) { case FeatureFilterBase.FeatureType.Real: _realSpikeCode = new SpikeCode(spikeCodeCfg); populationSize = _realSpikeCode.Code.Length; break; case FeatureFilterBase.FeatureType.Binary: populationSize = 1; break; case FeatureFilterBase.FeatureType.Enum: populationSize = ((EnumFeatureFilterSettings)featureFilterCfg).NumOfElements; break; } SpikingNeuronCollection = new SpikingInputNeuron[populationSize]; for (int i = 0; i < SpikingNeuronCollection.Length; i++) { SpikingNeuronCollection[i] = new SpikingInputNeuron(new NeuronLocation(InputEncoder.ReservoirID, inputNeuronsStartIdx, InputEncoder.PoolID, inputNeuronsStartIdx, idx, coordinates[0], coordinates[1], coordinates[2])); ++inputNeuronsStartIdx; } return; }
/// <summary> /// Builds trained readout layer. /// </summary> /// <param name="dataBundle">Collection of input predictors and associated desired output values</param> /// <param name="predictorsMapper">Optional specific mapping of predictors to readout units</param> /// <param name="controller">Optional external regression controller</param> /// <returns>Results of the regression</returns> public RegressionOverview Build(VectorBundle dataBundle, PredictorsMapper predictorsMapper = null, TrainedNetworkBuilder.RegressionControllerDelegate controller = null ) { //Basic checks int numOfPredictors = dataBundle.InputVectorCollection[0].Length; int numOfOutputs = dataBundle.OutputVectorCollection[0].Length; if (numOfPredictors == 0) { throw new InvalidOperationException($"Number of predictors must be greater tham 0."); } if (numOfOutputs != Settings.ReadoutUnitsCfg.ReadoutUnitCfgCollection.Count) { throw new InvalidOperationException($"Incorrect length of output vectors."); } //Predictors mapper (specified or default) _predictorsMapper = predictorsMapper ?? new PredictorsMapper(numOfPredictors); //Allocation and preparation of feature filters //Predictors _predictorFeatureFilterCollection = new FeatureFilterBase[numOfPredictors]; Parallel.For(0, _predictorFeatureFilterCollection.Length, nrmIdx => { _predictorFeatureFilterCollection[nrmIdx] = new RealFeatureFilter(DataRange, true, true); for (int pairIdx = 0; pairIdx < dataBundle.InputVectorCollection.Count; pairIdx++) { //Adjust filter _predictorFeatureFilterCollection[nrmIdx].Update(dataBundle.InputVectorCollection[pairIdx][nrmIdx]); } }); //Output values _outputFeatureFilterCollection = new FeatureFilterBase[numOfOutputs]; Parallel.For(0, _outputFeatureFilterCollection.Length, nrmIdx => { _outputFeatureFilterCollection[nrmIdx] = FeatureFilterFactory.Create(DataRange, Settings.ReadoutUnitsCfg.ReadoutUnitCfgCollection[nrmIdx].TaskCfg.FeatureFilterCfg); for (int pairIdx = 0; pairIdx < dataBundle.OutputVectorCollection.Count; pairIdx++) { //Adjust output normalizer _outputFeatureFilterCollection[nrmIdx].Update(dataBundle.OutputVectorCollection[pairIdx][nrmIdx]); } }); //Data normalization //Allocation double[][] normalizedPredictorsCollection = new double[dataBundle.InputVectorCollection.Count][]; double[][] normalizedIdealOutputsCollection = new double[dataBundle.OutputVectorCollection.Count][]; //Normalization Parallel.For(0, dataBundle.InputVectorCollection.Count, pairIdx => { //Predictors double[] predictors = new double[numOfPredictors]; for (int i = 0; i < numOfPredictors; i++) { if (_predictorsMapper.PredictorGeneralSwitchCollection[i]) { predictors[i] = _predictorFeatureFilterCollection[i].ApplyFilter(dataBundle.InputVectorCollection[pairIdx][i]); } else { predictors[i] = double.NaN; } } normalizedPredictorsCollection[pairIdx] = predictors; //Outputs double[] outputs = new double[numOfOutputs]; for (int i = 0; i < numOfOutputs; i++) { outputs[i] = _outputFeatureFilterCollection[i].ApplyFilter(dataBundle.OutputVectorCollection[pairIdx][i]); } normalizedIdealOutputsCollection[pairIdx] = outputs; }); //Random object initialization Random rand = new Random(0); //Create shuffled copy of the data VectorBundle shuffledData = new VectorBundle(normalizedPredictorsCollection, normalizedIdealOutputsCollection); shuffledData.Shuffle(rand); //Building of readout units for (int unitIdx = 0; unitIdx < Settings.ReadoutUnitsCfg.ReadoutUnitCfgCollection.Count; unitIdx++) { List <double[]> idealValueCollection = new List <double[]>(shuffledData.OutputVectorCollection.Count); //Transformation of ideal vectors to a single value vectors foreach (double[] idealVector in shuffledData.OutputVectorCollection) { double[] value = new double[1]; value[0] = idealVector[unitIdx]; idealValueCollection.Add(value); } List <double[]> readoutUnitInputVectorCollection = _predictorsMapper.CreateVectorCollection(Settings.ReadoutUnitsCfg.ReadoutUnitCfgCollection[unitIdx].Name, shuffledData.InputVectorCollection); VectorBundle readoutUnitDataBundle = new VectorBundle(readoutUnitInputVectorCollection, idealValueCollection); TrainedNetworkClusterBuilder readoutUnitBuilder = new TrainedNetworkClusterBuilder(Settings.ReadoutUnitsCfg.ReadoutUnitCfgCollection[unitIdx].Name, Settings.GetReadoutUnitNetworksCollection(unitIdx), DataRange, Settings.ReadoutUnitsCfg.ReadoutUnitCfgCollection[unitIdx].TaskCfg.Type == ReadoutUnit.TaskType.Classification ? BinBorder : double.NaN, rand, controller ); //Register notification readoutUnitBuilder.RegressionEpochDone += OnRegressionEpochDone; //Build trained readout unit. Trained unit becomes to be the predicting cluster member _readoutUnitCollection[unitIdx] = new ReadoutUnit(unitIdx, readoutUnitBuilder.Build(readoutUnitDataBundle, Settings.TestDataRatio, Settings.Folds, Settings.Repetitions, new FeatureFilterBase[] { _outputFeatureFilterCollection[unitIdx] } ) ); }//unitIdx //Readout layer is trained and ready Trained = true; return(new RegressionOverview(ReadoutUnitErrStatCollection)); }
/// <summary> /// Builds trained readout layer. /// </summary> /// <param name="dataBundle">The data to be used for training.</param> /// <param name="predictorsMapper">The mapper of specific predictors to readout units (optional).</param> /// <param name="controller">The build process controller (optional).</param> /// <param name="randomizerSeek">Specifies the random number generator initial seek (optional). A value greater than or equal to 0 will always ensure the same initialization.</param> /// <returns>The results of training.</returns> public RegressionOverview Build(VectorBundle dataBundle, PredictorsMapper predictorsMapper = null, TNRNetBuilder.BuildControllerDelegate controller = null, int randomizerSeek = 0 ) { if (Trained) { throw new InvalidOperationException("Readout layer is already built."); } //Basic checks int numOfPredictors = dataBundle.InputVectorCollection[0].Length; int numOfOutputs = dataBundle.OutputVectorCollection[0].Length; if (numOfPredictors == 0) { throw new InvalidOperationException($"Number of predictors must be greater than 0."); } if (numOfOutputs != ReadoutLayerCfg.ReadoutUnitsCfg.ReadoutUnitCfgCollection.Count) { throw new InvalidOperationException($"Incorrect length of output vectors."); } //Predictors mapper (specified or default) _predictorsMapper = predictorsMapper ?? new PredictorsMapper(numOfPredictors); //Allocation and preparation of feature filters //Predictors _predictorFeatureFilterCollection = new FeatureFilterBase[numOfPredictors]; Parallel.For(0, _predictorFeatureFilterCollection.Length, nrmIdx => { _predictorFeatureFilterCollection[nrmIdx] = new RealFeatureFilter(InternalDataRange, true, true); for (int pairIdx = 0; pairIdx < dataBundle.InputVectorCollection.Count; pairIdx++) { //Adjust filter _predictorFeatureFilterCollection[nrmIdx].Update(dataBundle.InputVectorCollection[pairIdx][nrmIdx]); } }); //Output values _outputFeatureFilterCollection = new FeatureFilterBase[numOfOutputs]; Parallel.For(0, _outputFeatureFilterCollection.Length, nrmIdx => { _outputFeatureFilterCollection[nrmIdx] = FeatureFilterFactory.Create(InternalDataRange, ReadoutLayerCfg.ReadoutUnitsCfg.ReadoutUnitCfgCollection[nrmIdx].TaskCfg.FeatureFilterCfg); for (int pairIdx = 0; pairIdx < dataBundle.OutputVectorCollection.Count; pairIdx++) { //Adjust output normalizer _outputFeatureFilterCollection[nrmIdx].Update(dataBundle.OutputVectorCollection[pairIdx][nrmIdx]); } }); //Data normalization //Allocation double[][] normalizedPredictorsCollection = new double[dataBundle.InputVectorCollection.Count][]; double[][] normalizedIdealOutputsCollection = new double[dataBundle.OutputVectorCollection.Count][]; //Normalization Parallel.For(0, dataBundle.InputVectorCollection.Count, pairIdx => { //Predictors double[] predictors = new double[numOfPredictors]; for (int i = 0; i < numOfPredictors; i++) { if (_predictorsMapper.PredictorGeneralSwitchCollection[i]) { predictors[i] = _predictorFeatureFilterCollection[i].ApplyFilter(dataBundle.InputVectorCollection[pairIdx][i]); } else { predictors[i] = double.NaN; } } normalizedPredictorsCollection[pairIdx] = predictors; //Outputs double[] outputs = new double[numOfOutputs]; for (int i = 0; i < numOfOutputs; i++) { outputs[i] = _outputFeatureFilterCollection[i].ApplyFilter(dataBundle.OutputVectorCollection[pairIdx][i]); } normalizedIdealOutputsCollection[pairIdx] = outputs; }); //Random object initialization Random rand = (randomizerSeek < 0 ? new Random() : new Random(randomizerSeek)); //Create shuffled copy of the data VectorBundle shuffledData = new VectorBundle(normalizedPredictorsCollection, normalizedIdealOutputsCollection); shuffledData.Shuffle(rand); //"One Takes All" groups input data space initialization List <CompositeResult[]> allReadoutUnitResults = new List <CompositeResult[]>(shuffledData.InputVectorCollection.Count); if (_oneTakesAllGroupCollection != null) { for (int i = 0; i < shuffledData.InputVectorCollection.Count; i++) { allReadoutUnitResults.Add(new CompositeResult[ReadoutLayerCfg.ReadoutUnitsCfg.ReadoutUnitCfgCollection.Count]); } } ResetProgressTracking(); //Building of readout units for (_buildReadoutUnitIdx = 0; _buildReadoutUnitIdx < ReadoutLayerCfg.ReadoutUnitsCfg.ReadoutUnitCfgCollection.Count; _buildReadoutUnitIdx++) { List <double[]> idealValueCollection = new List <double[]>(shuffledData.OutputVectorCollection.Count); //Transformation of ideal vectors to a single value vectors foreach (double[] idealVector in shuffledData.OutputVectorCollection) { double[] value = new double[1]; value[0] = idealVector[_buildReadoutUnitIdx]; idealValueCollection.Add(value); } List <double[]> readoutUnitInputVectorCollection = _predictorsMapper.CreateVectorCollection(ReadoutLayerCfg.ReadoutUnitsCfg.ReadoutUnitCfgCollection[_buildReadoutUnitIdx].Name, shuffledData.InputVectorCollection); VectorBundle readoutUnitDataBundle = new VectorBundle(readoutUnitInputVectorCollection, idealValueCollection); _readoutUnitCollection[_buildReadoutUnitIdx].ReadoutUnitBuildProgressChanged += OnReadoutUnitBuildProgressChanged; _readoutUnitCollection[_buildReadoutUnitIdx].Build(readoutUnitDataBundle, _outputFeatureFilterCollection[_buildReadoutUnitIdx], rand, controller ); //Add unit's all computed results into the input data for "One Takes All" groups if (_oneTakesAllGroupCollection != null) { for (int sampleIdx = 0; sampleIdx < readoutUnitDataBundle.InputVectorCollection.Count; sampleIdx++) { allReadoutUnitResults[sampleIdx][_buildReadoutUnitIdx] = _readoutUnitCollection[_buildReadoutUnitIdx].Compute(readoutUnitDataBundle.InputVectorCollection[sampleIdx]); } } }//unitIdx //One Takes All groups build if (_oneTakesAllGroupCollection != null) { foreach (OneTakesAllGroup group in _oneTakesAllGroupCollection) { //Only the group having inner probabilistic cluster has to be built if (group.DecisionMethod == OneTakesAllGroup.OneTakesAllDecisionMethod.ClusterChain) { BinFeatureFilter[] groupFilters = new BinFeatureFilter[group.NumOfMemberClasses]; for (int i = 0; i < group.NumOfMemberClasses; i++) { groupFilters[i] = (BinFeatureFilter)_outputFeatureFilterCollection[group.MemberReadoutUnitIndexCollection[i]]; } ++_buildOTAGroupIdx; group.OTAGBuildProgressChanged += OnOTAGBuildProgressChanged; group.Build(allReadoutUnitResults, shuffledData.OutputVectorCollection, groupFilters, rand, controller); } } } //Readout layer is trained and ready Trained = true; return(new RegressionOverview(ReadoutUnitErrStatCollection)); }
/// <summary> /// Builds readout layer. /// Prepares prediction clusters containing trained readout units. /// </summary> /// <param name="dataBundle">Collection of input predictors and associated desired output values</param> /// <param name="regressionController">Regression controller delegate</param> /// <param name="regressionControllerData">An user object</param> /// <param name="predictorsMapper">Optional specific mapping of predictors to readout units</param> /// <returns>Returned ResultComparativeBundle is something like a protocol. /// There is recorded fold by fold (unit by unit) predicted and corresponding ideal values. /// This is the pesimistic approach. Real results on unseen data could be better due to the clustering. /// </returns> public ResultBundle Build(VectorBundle dataBundle, ReadoutUnit.RegressionCallbackDelegate regressionController, Object regressionControllerData, PredictorsMapper predictorsMapper = null ) { //Basic checks int numOfPredictors = dataBundle.InputVectorCollection[0].Length; int numOfOutputs = dataBundle.OutputVectorCollection[0].Length; if (numOfPredictors == 0) { throw new Exception("Number of predictors must be greater tham 0."); } if (numOfOutputs != _settings.ReadoutUnitCfgCollection.Count) { throw new Exception("Incorrect number of ideal output values in the vector."); } //Predictors mapper (specified or default) _predictorsMapper = predictorsMapper ?? new PredictorsMapper(numOfPredictors); //Allocation and preparation of feature filters //Predictors _predictorFeatureFilterCollection = new BaseFeatureFilter[numOfPredictors]; Parallel.For(0, _predictorFeatureFilterCollection.Length, nrmIdx => { _predictorFeatureFilterCollection[nrmIdx] = new RealFeatureFilter(DataRange, true, true); for (int pairIdx = 0; pairIdx < dataBundle.InputVectorCollection.Count; pairIdx++) { //Adjust filter _predictorFeatureFilterCollection[nrmIdx].Update(dataBundle.InputVectorCollection[pairIdx][nrmIdx]); } }); //Output values _outputFeatureFilterCollection = new BaseFeatureFilter[numOfOutputs]; Parallel.For(0, _outputFeatureFilterCollection.Length, nrmIdx => { _outputFeatureFilterCollection[nrmIdx] = FeatureFilterFactory.Create(DataRange, _settings.ReadoutUnitCfgCollection[nrmIdx].FeatureFilterCfg); for (int pairIdx = 0; pairIdx < dataBundle.OutputVectorCollection.Count; pairIdx++) { //Adjust output normalizer _outputFeatureFilterCollection[nrmIdx].Update(dataBundle.OutputVectorCollection[pairIdx][nrmIdx]); } }); //Data normalization //Allocation double[][] predictorsCollection = new double[dataBundle.InputVectorCollection.Count][]; double[][] idealOutputsCollection = new double[dataBundle.OutputVectorCollection.Count][]; //Normalization Parallel.For(0, dataBundle.InputVectorCollection.Count, pairIdx => { //Predictors double[] predictors = new double[numOfPredictors]; for (int i = 0; i < numOfPredictors; i++) { if (_predictorsMapper.PredictorGeneralSwitchCollection[i]) { predictors[i] = _predictorFeatureFilterCollection[i].ApplyFilter(dataBundle.InputVectorCollection[pairIdx][i]); } else { predictors[i] = double.NaN; } } predictorsCollection[pairIdx] = predictors; //Outputs double[] outputs = new double[numOfOutputs]; for (int i = 0; i < numOfOutputs; i++) { outputs[i] = _outputFeatureFilterCollection[i].ApplyFilter(dataBundle.OutputVectorCollection[pairIdx][i]); } idealOutputsCollection[pairIdx] = outputs; }); //Data processing //Random object initialization Random rand = new Random(0); //Allocation of computed and ideal vectors for result comparative bundle List <double[]> validationComputedVectorCollection = new List <double[]>(idealOutputsCollection.Length); List <double[]> validationIdealVectorCollection = new List <double[]>(idealOutputsCollection.Length); for (int i = 0; i < idealOutputsCollection.Length; i++) { validationComputedVectorCollection.Add(new double[numOfOutputs]); validationIdealVectorCollection.Add(new double[numOfOutputs]); } //Test dataset size if (_settings.TestDataRatio > MaxRatioOfTestData) { throw new ArgumentException($"Test dataset size is greater than {MaxRatioOfTestData.ToString(CultureInfo.InvariantCulture)}", "TestDataSetSize"); } int testDataSetLength = (int)Math.Round(idealOutputsCollection.Length * _settings.TestDataRatio, 0); if (testDataSetLength < MinLengthOfTestDataset) { throw new ArgumentException($"Num of test samples is less than {MinLengthOfTestDataset.ToString(CultureInfo.InvariantCulture)}", "TestDataSetSize"); } //Number of folds int numOfFolds = _settings.NumOfFolds; if (numOfFolds <= 0) { //Auto setup numOfFolds = idealOutputsCollection.Length / testDataSetLength; if (numOfFolds > MaxNumOfFolds) { numOfFolds = MaxNumOfFolds; } } //Create shuffled copy of the data VectorBundle shuffledData = new VectorBundle(predictorsCollection, idealOutputsCollection); shuffledData.Shuffle(rand); //Data inspection, preparation of datasets and training of ReadoutUnits //Clusters of readout units (one cluster per each output field) for (int clusterIdx = 0; clusterIdx < _settings.ReadoutUnitCfgCollection.Count; clusterIdx++) { _clusterCollection[clusterIdx] = new ReadoutUnit[numOfFolds]; List <double[]> idealValueCollection = new List <double[]>(idealOutputsCollection.Length); BinDistribution refBinDistr = null; if (_settings.ReadoutUnitCfgCollection[clusterIdx].TaskType == ReadoutUnit.TaskType.Classification) { //Reference binary distribution is relevant only for classification task refBinDistr = new BinDistribution(DataRange.Mid); } //Transformation to a single value vectors and data analysis foreach (double[] idealVector in shuffledData.OutputVectorCollection) { double[] value = new double[1]; value[0] = idealVector[clusterIdx]; idealValueCollection.Add(value); if (_settings.ReadoutUnitCfgCollection[clusterIdx].TaskType == ReadoutUnit.TaskType.Classification) { //Reference binary distribution is relevant only for classification task refBinDistr.Update(value); } } List <VectorBundle> subBundleCollection = null; List <double[]> readoutUnitInputVectorCollection = _predictorsMapper.CreateVectorCollection(_settings.ReadoutUnitCfgCollection[clusterIdx].Name, shuffledData.InputVectorCollection); //Datasets preparation is depending on the task type if (_settings.ReadoutUnitCfgCollection[clusterIdx].TaskType == ReadoutUnit.TaskType.Classification) { //Classification task subBundleCollection = DivideSamplesForClassificationTask(readoutUnitInputVectorCollection, idealValueCollection, refBinDistr, testDataSetLength ); } else { //Forecast task subBundleCollection = DivideSamplesForForecastTask(readoutUnitInputVectorCollection, idealValueCollection, testDataSetLength ); } //Find best unit per each fold in the cluster. ClusterErrStatistics ces = new ClusterErrStatistics(_settings.ReadoutUnitCfgCollection[clusterIdx].TaskType, numOfFolds, refBinDistr); int arrayPos = 0; for (int foldIdx = 0; foldIdx < numOfFolds; foldIdx++) { //Build training samples List <double[]> trainingPredictorsCollection = new List <double[]>(); List <double[]> trainingIdealValueCollection = new List <double[]>(); for (int bundleIdx = 0; bundleIdx < subBundleCollection.Count; bundleIdx++) { if (bundleIdx != foldIdx) { trainingPredictorsCollection.AddRange(subBundleCollection[bundleIdx].InputVectorCollection); trainingIdealValueCollection.AddRange(subBundleCollection[bundleIdx].OutputVectorCollection); } } //Call training regression to get the best fold's readout unit. //The best unit becomes to be the predicting cluster member. _clusterCollection[clusterIdx][foldIdx] = ReadoutUnit.CreateTrained(_settings.ReadoutUnitCfgCollection[clusterIdx].TaskType, clusterIdx, foldIdx + 1, numOfFolds, refBinDistr, trainingPredictorsCollection, trainingIdealValueCollection, subBundleCollection[foldIdx].InputVectorCollection, subBundleCollection[foldIdx].OutputVectorCollection, rand, _settings.ReadoutUnitCfgCollection[clusterIdx], regressionController, regressionControllerData ); //Cluster error statistics & data for validation bundle (pesimistic approach) for (int sampleIdx = 0; sampleIdx < subBundleCollection[foldIdx].OutputVectorCollection.Count; sampleIdx++) { double nrmComputedValue = _clusterCollection[clusterIdx][foldIdx].Network.Compute(subBundleCollection[foldIdx].InputVectorCollection[sampleIdx])[0]; double natComputedValue = _outputFeatureFilterCollection[clusterIdx].ApplyReverse(nrmComputedValue); double natIdealValue = _outputFeatureFilterCollection[clusterIdx].ApplyReverse(subBundleCollection[foldIdx].OutputVectorCollection[sampleIdx][0]); ces.Update(nrmComputedValue, subBundleCollection[foldIdx].OutputVectorCollection[sampleIdx][0], natComputedValue, natIdealValue); validationIdealVectorCollection[arrayPos][clusterIdx] = natIdealValue; validationComputedVectorCollection[arrayPos][clusterIdx] = natComputedValue; ++arrayPos; } } //foldIdx _clusterErrStatisticsCollection.Add(ces); } //clusterIdx //Validation bundle is returned. return(new ResultBundle(validationComputedVectorCollection, validationIdealVectorCollection)); }