void UpdateWeights(NLPFeatureDataModel Feature, double LRate) { for (int i = 0; i < layers.Count; i++) { double[] inputs = new double[Feature.PhraseFeatures.Length - 1]; for (int j = 0; j < Feature.PhraseFeatures.Length - 1; j++) { inputs[j] = Feature.PhraseFeatures[j]; //remove last feature } if (i != 0) { inputs = new double[layers[i - 1].Length]; for (int j = 0; j < layers[i - 1].Length; j++) { inputs[j] = layers[i - 1][j].output; } } for (int j = 0; j < layers[i].Length; j++) { Neuron neuron = layers[i][j]; for (int k = 0; k < inputs.Length; k++) { layers[i][j].weight[k] += LRate * layers[i][j].delta * inputs[k]; } layers[i][j].weight[layers[i][j].weight.Length - 1] += LRate * layers[i][j].delta; } } }
public void GetInitilizationData() { List <NLPDataModel> rawTrainingData = new List <NLPDataModel>(); FileProcessing fileProcessing = new FileProcessing(); tokenizer = new Tokenizer(); trainingData.Clear(); rawTrainingData = fileProcessing.ReadCSVLines <NLPDataModel>("NLPBagofWords.csv", true).ToList(); tokenizer.Keywords = fileProcessing.Headings; foreach (NLPDataModel data in rawTrainingData) { NLPFeatureDataModel feature = new NLPFeatureDataModel(); var properties = typeof(NLPDataModel).GetProperties(); feature.PhraseFeatures = new double[properties.Length]; for (int i = 0; i < properties.Length; i++) { NLPDataModel d = new NLPDataModel(); double f = -999; properties[i].GetValue(d); string token = (string)properties[i].GetValue(data); Double.TryParse(token, out f); feature.PhraseFeatures[i] = f; } int phaseType = -999; int.TryParse(data.PhraseType, out phaseType); feature.PhaseType = phaseType; trainingData.Add(feature); } }
public void GuessSentence() { network.GetInitilizationData(); network.InitializeNeuralNetwork(); network.TrainNetwork(network.TrainingData, 0.3, (network.TrainingData.Count - 1), 3); NLPFeatureDataModel inputData = network.NormalizeInput("Please create an SOW"); //inputData.PhraseFeatures[0] = 0; //inputData.PhraseFeatures[1] = 0; //inputData.PhraseFeatures[2] = 0; network.ForwardPropigation(inputData.PhraseFeatures); Assert.IsTrue(true); }
public virtual async Task MessageReceivedAsync(IDialogContext context, IAwaitable <IMessageActivity> result) { var message = await result; NLPFeatureDataModel inputData = network.NormalizeInput(message.Text); double[] guesses = network.ForwardPropigation(inputData.PhraseFeatures); string r = ""; r += "I do not know what you are asking: " + (guesses[0] * 100) + "%"; r += ", Create an SOW: " + (guesses[1] * 100) + "%"; r += ", Create a new Client: " + (guesses[2] * 100) + "%"; if ((guesses[1] * 100) > 90) { context.Call(new SOWForm(), ResumeAfterOptionDialog); } }
public NLPFeatureDataModel NormalizeInput(string InputString) { NLPFeatureDataModel feature = new NLPFeatureDataModel(); feature.PhraseFeatures = new double[typeof(NLPDataModel).GetProperties().Length - 1]; List <string> tokens = tokenizer.TokenizeString(InputString); foreach (string token in tokens) { for (int i = 0; i < tokenizer.Keywords.Count; i++) { if (tokenizer.Keywords[i] == token) { feature.PhraseFeatures[i] = 1; } } } return(feature); }