/** * o(i): Recurrent Neural Network / Part 5 * Neural Network Output: Probability for a possible insurance client to opt for a specific product. A function * computing an importance measure of each dataset property; as specified from the information gain of each * specific variable. It constitues an advanced method calculating estimation probabilities for the category of a * probable insurance client; given the a priori, the a posteriori probabilities; as well. as well values of entrophy * converted to probabilities through softmax. */ public void RNN_Compute_o() { o = new MyList <IList <Double> >(); MyList <Double> .InitializeLists(o, dataset.Count, categories); GetAdditiveGains(); for (int n = 0; n < dataset.Count; n++) { TrainSample t = dataset[n]; double sum = 0; for (int j = 0; j <= categories; j++) { o[n][j] += Math.Exp(h[n][collectedPoints.Count - 1] * categoryGains[j]); sum += o[n][j]; } for (int j = 0; j <= categories; j++) { o[n][j] /= sum; } } for (int n = 0; n < dataset.Count; n++) { Console.WriteLine("\nInsured ID: " + dataset[n].getID()); MyList <Double> .NormalizeListValues(o[n], MyList <Double> .FindBoundaries(o[n])); for (int j = 0; j <= categories; j++) { Console.WriteLine("Insurance Program: " + j + " selected with probability: " + o[n][j]); } } }
/** * A method reading a given train file of a specific format described in comments; contents of which are * assigned to a parameterized tokens dynamic container if reading was successful; otherwise displays an * error description. * @param filename The name of the given text file. */ public void ReadTrainFile(String filename) { var lines = File.ReadAllLines(filename); for (var i = 0; i < lines.Length; i += 1) { String line = lines[i]; String[] lineTokens = line.Split(' '); if (lineTokens.Length != 4) throw new FileLoadException("Invalid file format!"); TrainSample trainData = new TrainSample(lineTokens[0], Convert.ToDouble(lineTokens[1]), Convert.ToDouble(lineTokens[2]), Convert.ToInt32(lineTokens[3])); classificationDecisions[Convert.ToInt32(lineTokens[3])] += 1; dataset.Add(trainData); } }
/** * Classification scores */ public IList<Vec2<String, Double>> Classify() { IList<Vec2<String, Double>> classified = new MyList<Vec2<String, Double>>(); for (int n = 0; n < dataset.Count; n++) { TrainSample t = dataset[n]; for (int k = 1; k < collectedPoints.Count; k++) { classified.Add(new Vec2<String, Double>(t.getID(), Math.Abs(10 * h[n][k]))); } } return classified; }
/** * h(i): Recurrent Neural Network / Part 4 * Data history computations: A method computing the dataset history; based from the piecewise linear border of * correlation between the instance variables. */ public void RNN_Compute_h() { h = new MyList<IList<Double>>(); MyList<Double>.InitializeLists(h, dataset.Count, collectedPoints.Count); GetCorrelationBorders(); for (int n = 0; n < dataset.Count; n++) { TrainSample t = dataset[n]; IList<Double> thisReg = normalizeProperty(0); h[n][0] = Math.Tanh(propertyGains[0] * thisReg[n]); for (int k = 1; k < collectedPoints.Count; k++) { thisReg = normalizeProperty(k); h[n][k] = Math.Tanh(correlations[k - 1] * h[n][k - 1] + propertyGains[k] * thisReg[n]); Console.WriteLine(h[n][k]); } } }