public ITransformer TrainModel(TrainingAlgorithm algorithm, DataControl.TrainingOptions options) { switch (algorithm) { case TrainingAlgorithm.LOGISTIC_REGRESSION: return(LogisticRegression(options)); case TrainingAlgorithm.NAIVE_BAYES: return(NaiveBayes(options)); case TrainingAlgorithm.BINARY_STOCHASTIC_DUAL_COORDINATE_ASCENT: return(BinaryStochasticDualCoordinateAscent(options)); case TrainingAlgorithm.FAST_TREE: return(FastTree(options)); case TrainingAlgorithm.STOCHASTIC_DUAL_COORDINATE_ASCENT: return(StochasticDualCoordinateAscent(options)); case TrainingAlgorithm.STOCHASTIC_GRADIENT_DESCENT: return(StochasticGradientDescent(options)); default: return(null); } }
public void Evaluate(ITransformer model, TrainingAlgorithm algorithm, string labelColumn) { switch (algorithm) { case TrainingAlgorithm.STOCHASTIC_DUAL_COORDINATE_ASCENT: case TrainingAlgorithm.LOGISTIC_REGRESSION: case TrainingAlgorithm.NAIVE_BAYES: EvaluateMulticlass(model, labelColumn); break; case TrainingAlgorithm.BINARY_STOCHASTIC_DUAL_COORDINATE_ASCENT: case TrainingAlgorithm.FAST_TREE: case TrainingAlgorithm.STOCHASTIC_GRADIENT_DESCENT: EvaluateBinary(model, labelColumn); break; } }
static void Main(string[] args) { // words to be tested List<int> wordsToBeTested = new List<int>(); for(int i = 1; i <= 100; i++) wordsToBeTested.Add(i); //wordsToBeTested.Add(1); // samples to be tested List<int> samplesToBeTested = new List<int>(); samplesToBeTested.Add(1); samplesToBeTested.Add(2); samplesToBeTested.Add(3); samplesToBeTested.Add(4); samplesToBeTested.Add(5); // audio directory List<string> audioDirectory = new List<string>(); audioDirectory.Add("C:\\Users\\Administrator\\Documents\\Visual Studio 2008\\Projects\\cmuspeechrecognition_cmuspeechmain\\test_data\\audio\\092910_123758_Hebrew"); audioDirectory.Add("C:\\Users\\Administrator\\Documents\\Visual Studio 2008\\Projects\\cmuspeechrecognition_cmuspeechmain\\test_data\\audio\\101410_140344_Hebrew"); audioDirectory.Add("C:\\Users\\Administrator\\Documents\\Visual Studio 2008\\Projects\\cmuspeechrecognition_cmuspeechmain\\test_data\\audio\\101510_111237_Hebrew"); // audio file names List<string> audioFileNames = new List<string>(); audioFileNames.Add("[Wed_(Sep_29_2010)_12-37-58]_4124143701_"); audioFileNames.Add("[Thu_(Oct_14_2010)_14-03-44]_4122688595_"); audioFileNames.Add("[Fri_(Oct_15_2010)_11-12-37]_4126203298_"); for (int i = 0; i < 3; i++) { List<int> speakersToBeTested = new List<int>(); speakersToBeTested.Add(i); // set up data data = new Data(audioDirectory, audioFileNames, 5, speakersToBeTested, wordsToBeTested, samplesToBeTested, "C:\\Users\\Administrator\\Documents\\Visual Studio 2008\\Projects\\cmuspeechrecognition_cmuspeechmain\\test_data\\config_files\\config.txt.100.english"); // setup grammar GrammarCreator gc = new GrammarCreator("C:\\Users\\Administrator\\Documents\\Visual Studio 2008\\Projects\\cmuspeechrecognition_cmuspeechmain\\test_data", "Hebrew", "allcombinations"); // setup training TrainingAlgorithm ta = new TrainingAlgorithm(gc, data, 10, 15); ta.LearnAllWords(); } }
static void Main(string[] args) { // speakers to be tested List<int> speakersToBeTested = new List<int>(); speakersToBeTested.Add(0); speakersToBeTested.Add(1); speakersToBeTested.Add(2); // words to be tested List<int> wordsToBeTested = new List<int>(); for (int i = 1; i <= 50; i += 5) wordsToBeTested.Add(i); //wordsToBeTested.Add(25); // samples to be tested List<int> samplesToBeTested = new List<int>(); samplesToBeTested.Add(1); samplesToBeTested.Add(2); samplesToBeTested.Add(3); samplesToBeTested.Add(4); samplesToBeTested.Add(5); // audio directory List<string> audioDirectory = new List<string>(); audioDirectory.Add("C:\\Users\\Administrator\\Documents\\Visual Studio 2008\\Projects\\cmuspeechrecognition_cmuspeechmain\\test_data\\audio\\092910_123758_Hebrew"); audioDirectory.Add("C:\\Users\\Administrator\\Documents\\Visual Studio 2008\\Projects\\cmuspeechrecognition_cmuspeechmain\\test_data\\audio\\101410_140344_Hebrew"); audioDirectory.Add("C:\\Users\\Administrator\\Documents\\Visual Studio 2008\\Projects\\cmuspeechrecognition_cmuspeechmain\\test_data\\audio\\101510_111237_Hebrew"); // audio file names List<string> audioFileNames = new List<string>(); audioFileNames.Add("[Wed_(Sep_29_2010)_12-37-58]_4124143701_"); audioFileNames.Add("[Thu_(Oct_14_2010)_14-03-44]_4122688595_"); audioFileNames.Add("[Fri_(Oct_15_2010)_11-12-37]_4126203298_"); //GrammarReader gr = new GrammarReader("Hebrew", "C:\\Users\\Administrator\\Documents\\Visual Studio 2008\\Projects\\cmuspeechrecognition_cmuspeechmain\\test_data", "Allphone"); // set up data data = new Data(audioDirectory, audioFileNames, 5, speakersToBeTested, wordsToBeTested, samplesToBeTested, "C:\\Users\\Administrator\\Documents\\Visual Studio 2008\\Projects\\cmuspeechrecognition_cmuspeechmain\\test_data\\config_files\\config.txt.100.english"); TrainingAlgorithm ta = new TrainingAlgorithm(10, data, directory, MWordTypes, MSamples, MSpeakers, "C:\\Users\\Administrator\\Documents\\Visual Studio 2008\\Projects\\cmuspeechrecognition_cmuspeechmain\\test_data", 1, "eliminative"); ta.trainWordsDiscriminativeEliminative(); }
/// <summary> /// Given a list of speakers and the samples per speakers to train on, training is carried out to produce a List of pronunciations /// </summary> /// <param name="speakerList">list of speakers for training</param> /// <param name="sampleList">list of samples for training</param> /// <returns>List of pronunciations</returns> private Dictionary<string, List<string>> trainAllWords(List<int> speakerList, List<int> sampleList) { string speaker = "speaker-"; for (int i = 0; i < speakerList.Count; i++) { speaker += speakerList[i].ToString()+"_"; } string sample = "sample-"; List<int> trueSampleList = new List<int>(); trueSampleList.AddRange(sampleList); for (int i = 0; i < trueSampleList.Count; i++) { trueSampleList[i]++; sample += trueSampleList[i].ToString() + "_"; } System.Diagnostics.Debug.WriteLine("training " + speaker + sample); // words to be tested List<int> wordsToBeTested = new List<int>(); for (int i = 1; i <= MWordTypes; i++) wordsToBeTested.Add(i); // set up data Data tempData = new Data(data.audioDirectory, data.audioFileName, data.numberOfSamplesPerWord, speakerList, wordsToBeTested, trueSampleList, data.wordListPath); // setup grammar GrammarCreator gc = new GrammarCreator(grammarDirectory, data.language + speaker + sample, "allcombinations"); // setup training TrainingAlgorithm ta = new TrainingAlgorithm(gc, tempData, numberOfAlternates, 15); return ta.LearnAllWords(); }
/// <summary> /// Training callback, invoked at each iteration /// </summary> /// <param name="epoch">Epoch number</param> /// <param name="error">Current error</param> /// <param name="algorithm">Training algorithm</param> private void TrainingCallback(int epoch, double error, TrainingAlgorithm algorithm) { Invoke(addAction, new object [] { epoch, error, algorithm, _dgvTrainingResults }); }
public void set_training_algorithm(TrainingAlgorithm training_algorithm) { fannfixedPINVOKE.neural_net_set_training_algorithm(swigCPtr, (int)training_algorithm); }
public TrainingAlgorithm get_training_algorithm() { TrainingAlgorithm ret = (TrainingAlgorithm)fannfixedPINVOKE.neural_net_get_training_algorithm(swigCPtr); return(ret); }
void Start() { ga = GetComponent<TrainingAlgorithm>(); spawnWave(1); timer = 30.0f; }
static void Main() { const float desired_error = 0.0F; uint max_neurons = 30; uint neurons_between_reports = 1; uint bit_fail_train, bit_fail_test; float mse_train, mse_test; DataType[] output; DataType[] steepness = new DataType[1]; int multi = 0; ActivationFunction[] activation = new ActivationFunction[1]; TrainingAlgorithm training_algorithm = TrainingAlgorithm.TRAIN_RPROP; Console.WriteLine("Reading data."); using (TrainingData trainData = new TrainingData("..\\..\\..\\datasets\\parity8.train")) using (TrainingData testData = new TrainingData("..\\..\\..\\datasets\\parity8.test")) { trainData.ScaleTrainData(-1, 1); testData.ScaleTrainData(-1, 1); Console.WriteLine("Creating network."); using (NeuralNet net = new NeuralNet(NetworkType.SHORTCUT, 2, trainData.InputCount, trainData.OutputCount)) { net.TrainingAlgorithm = training_algorithm; net.ActivationFunctionHidden = ActivationFunction.SIGMOID_SYMMETRIC; net.ActivationFunctionOutput = ActivationFunction.LINEAR; net.TrainErrorFunction = ErrorFunction.ERRORFUNC_LINEAR; if (multi == 0) { steepness[0] = 1; net.CascadeActivationSteepnesses = steepness; activation[0] = ActivationFunction.SIGMOID_SYMMETRIC; net.CascadeActivationFunctions = activation; net.CascadeCandidateGroupsCount = 8; } if (training_algorithm == TrainingAlgorithm.TRAIN_QUICKPROP) { net.LearningRate = 0.35F; net.RandomizeWeights(-2.0F, 2.0F); } net.BitFailLimit = (DataType)0.9; net.TrainStopFunction = StopFunction.STOPFUNC_BIT; net.PrintParameters(); net.Save("..\\..\\..\\examples\\cascade_train2.net"); Console.WriteLine("Training network."); net.CascadetrainOnData(trainData, max_neurons, neurons_between_reports, desired_error); net.PrintConnections(); mse_train = net.TestData(trainData); bit_fail_train = net.BitFail; mse_test = net.TestData(testData); bit_fail_test = net.BitFail; Console.WriteLine("\nTrain error: {0}, Train bit-fail: {1}, Test error: {2}, Test bit-fail: {3}\n", mse_train, bit_fail_train, mse_test, bit_fail_test); for (int i = 0; i < trainData.TrainDataLength; i++) { output = net.Run(trainData.GetTrainInput((uint)i)); if ((trainData.GetTrainOutput((uint)i)[0] >= 0 && output[0] <= 0) || (trainData.GetTrainOutput((uint)i)[0] <= 0 && output[0] >= 0)) { Console.WriteLine("ERROR: {0} does not match {1}", trainData.GetTrainOutput((uint)i)[0], output[0]); } } Console.WriteLine("Saving network."); net.Save("..\\..\\..\\examples\\cascade_train.net"); Console.ReadKey(); } } }
public void set_training_algorithm(TrainingAlgorithm training_algorithm) { fanndoublePINVOKE.neural_net_set_training_algorithm(swigCPtr, (int)training_algorithm); }