ToFannTrainData() private method

private ToFannTrainData ( ) : SWIGTYPE_p_fann_train_data
return SWIGTYPE_p_fann_train_data
Esempio n. 1
0
        /* Method: TrainEpochSarpropParallel

            Parameters:
                data - the data to train on
                threadNumb - the thread to do training on
                predictedOutputs - the predicted outputs

        */
        public float TrainEpochSarpropParallel(TrainingData data, uint threadNumb, List<List<float>> predictedOutputs)
        {
            using (floatVectorVector predicted_out = new floatVectorVector(predictedOutputs.Count))
            {
                for (int i = 0; i < predictedOutputs.Count; i++)
                {
                    predicted_out[i] = new floatVector(predictedOutputs[i].Count);
                }
                float result = fannfloat.train_epoch_sarprop_parallel(net.to_fann(), data.ToFannTrainData(), threadNumb, predicted_out);

                predictedOutputs.Clear();
                for (int i = 0; i < predicted_out.Count; i++)
                {
                    List<float> list = new List<float>();
                    for (int j = 0; j < predicted_out[i].Count; j++)
                    {
                        list.Add(predicted_out[i][j]);
                    }
                    predictedOutputs.Add(list);
                }
                return result;
            }
        }
Esempio n. 2
0
        /* Method: TrainEpochSarpropParallel

            Parameters:
                data - the data to train on
                threadNumb - the thread to do training on

        */
        public float TrainEpochSarpropParallel(TrainingData data, uint threadNumb)
        {
            return fannfloat.train_epoch_sarprop_parallel(net.to_fann(), data.ToFannTrainData(), threadNumb);
        }
Esempio n. 3
0
        /* Method: TrainEpochIncrementalMod

            Parameters:
                data - the data to train on

        */
        public float TrainEpochIncrementalMod(TrainingData data)
        {
            return fannfloat.train_epoch_incremental_mod(net.to_fann(), data.ToFannTrainData());
        }
Esempio n. 4
0
        /* Method: TrainEpochIncrementalMod

            Parameters:
                data - the data to train on
                predictedOutputs - the predicted outputs

        */
        public float TrainEpochIncrementalMod(TrainingData data, List<List<float>> predictedOutputs)
        {
            using (floatVectorVector predicted_out = new floatVectorVector(predictedOutputs.Count))
            {
                for (int i = 0; i < predictedOutputs.Count; i++)
                {
                    predicted_out[i] = new floatVector(predictedOutputs[i].Count);
                }
                float result = fannfloat.train_epoch_incremental_mod(net.to_fann(), data.ToFannTrainData(), predicted_out);

                predictedOutputs.Clear();
                for (int i = 0; i < predicted_out.Count; i++)
                {
                    List<float> list = new List<float>();
                    for (int j = 0; j < predicted_out[i].Count; j++)
                    {
                        list.Add(predicted_out[i][j]);
                    }
                    predictedOutputs.Add(list);
                }
                return result;
            }
        }
Esempio n. 5
0
        /* Method: TestDataParallel

            Parameters:
                data - the data to train on
                threadNumb - the thread to do training on

        */
        public float TestDataParallel(TrainingData data, uint threadNumb)
        {
            return fannfloat.test_data_parallel(net.to_fann(), data.ToFannTrainData(), threadNumb);
        }