public void CreatePattern(WriteToCMDLine write) { write("Line Count : " + this.Lines.Count()); write("Extents : " + Extents[0] + "," + Extents[1] + "," + Extents[2] + "," + Extents[3]); SaveFileDialog sfd = new SaveFileDialog() { FileName = "Create a pattern file", Filter = "PAT files (*.pat)|*.pat", Title = "Create a pat file" }; if (sfd.ShowDialog() == DialogResult.OK) { var fp = sfd.FileName; if (fp.EndsWith(".txt")) { fp.Replace(".txt", ".pat"); } if (!fp.EndsWith(".pat")) { fp += ".pat"; } File.WriteAllLines(fp, GetText(fp, write)); } }
internal AlphaContext(Datatype dt, WriteToCMDLine write, int numb = 0) { datatype = dt; switch (numb) { default: case 0: Network = Datatype.AlphaContextPrimary.LoadNetwork(write, dt); break; case 1: Network = Datatype.AlphaContextSecondary.LoadNetwork(write, dt); break; case 2: Network = Datatype.AlphaContextTertiary.LoadNetwork(write, dt); break; } if (Network.Datatype == Datatype.None) { switch (numb) { default: case 0: Network = new NeuralNetwork(Datatype.AlphaContextPrimary); break; case 1: Network = new NeuralNetwork(Datatype.AlphaContextSecondary); break; case 2: Network = new NeuralNetwork(Datatype.AlphaContextTertiary); break; } Network.Layers.Add(new Layer(1, CharSet.CharCount * (1 + (2 * SearchRange)), Activation.Linear)); } }
public static void Read(WriteToCMDLine write) { OpenFileDialog ofd = new OpenFileDialog() { FileName = "Select a binary file", Filter = "BIN files (*.bin)|*.bin", Title = "Open bin file" }; if (ofd.ShowDialog() == DialogResult.OK) { int runs = 0; var filepath = ofd.FileName; var dir = Path.GetDirectoryName(filepath); var Files = Directory.GetFiles(dir); foreach (string f in Files) { try { runs++; write(""); Sample s = f.ReadFromBinaryFile <Sample>(); write("File Number : " + runs); write("GUID : " + s.GUID); write("Input : " + s.TextInput); write("Output : " + s.DesiredOutput.ToList().IndexOf(s.DesiredOutput.Max())); write(""); System.Threading.Thread.Sleep(1000); } catch (Exception e) { e.OutputError(); } } } }
public List<double[,]> Forward(double[] input,/* double dropout, */WriteToCMDLine write) { List<double[,]> Results = new List<double[,]>(); double[,] resultinput = new double[2,input.Count()]; resultinput.SetRank(input, 0); resultinput.SetRank(input, 1); Results.Add(resultinput); for (int k = 0; k < Layers.Count(); k++) { /* double[,] output = new double[2, Layers[k].Biases.Count()]; var rank = Layers[k].Output(Results.Last().GetRank(1)); if(rank.Any(x => double.IsNaN(x))) { write("Layer " + k + " in " + Datatype.ToString() + " Network has NaN Values"); } output.SetRank(Layers[k].Output(Results.Last().GetRank(1)), 0); var drop = DropOut(output.GetRank(0), dropout, write); output.SetRank(drop, 1); Results.Add(output); */ Results.Add(Layers[k].Output(Results.Last().GetRank(1)); } return Results; }
public static void PropogateSingle(this Datatype dt, int correct, WriteToCMDLine write, string s, string s2 = null, double[] other = null, double[] img = null) { Sample entry = new Sample(dt); entry.TextInput = s; if (s2 != null) { entry.SecondaryText = s2; } if (other != null) { entry.ValInput = other; } if (img != null) { entry.ImgInput = img; } var type = typeof(INetworkPredUpdater); Assembly a = type.Assembly; var NetTypes = a.GetTypes().Where(y => !y.IsInterface).Where(x => type.IsAssignableFrom(x)).ToList(); for (int i = 0; i < NetTypes.Count(); i++) { var Network = (INetworkPredUpdater)Activator.CreateInstance(NetTypes[i], entry); if (Network.datatype == dt) { entry.DesiredOutput = new double[Network.Network.Layers.Last().Biases.Count()]; entry.DesiredOutput[correct] = 1; Network.Propogate(entry, write); break; } } }
public static Alpha2 LoadAlpha(this Datatype datatype, WriteToCMDLine write) { string fn = "AlphaNetwork"; fn += ".bin"; string Folder = "NeuralNets".GetMyDocs(); if (Directory.Exists(Folder)) { string[] Files = Directory.GetFiles(Folder); if (Files.Any(x => x.Contains(fn))) { var doc = Files.Where(x => x.Contains(fn)).First(); write("Alpha read from My Docs"); return(ReadFromBinaryFile <Alpha2>(doc)); } } var assembly = typeof(ReadWriteNeuralNetwork).GetTypeInfo().Assembly; if (assembly.GetManifestResourceNames().Any(x => x.Contains(fn))) { string name = assembly.GetManifestResourceNames().Where(x => x.Contains(fn)).First(); using (Stream stream = assembly.GetManifestResourceStream(name)) { var binaryFormatter = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter(); write("Alpha Read from Assembly"); return((Alpha2)binaryFormatter.Deserialize(stream)); } } write("Alpha Not Found. New Network Created"); return(new Alpha2(write)); }
internal AlphaFilter3(WriteToCMDLine write) { AttentionNetwork = new NeuralNetwork(Datatype.Alpha); ValueNetwork = new NeuralNetwork(Datatype.Alpha); AttentionNetwork.Layers.Add(new Layer(1, 1 + (2 * Radius), Activation.Linear)); ValueNetwork.Layers.Add(new Layer(Size, ((2 * Radius) + 1) * CharSet.CharCount, Activation.LRelu, 1e-5, 1e-5)); ValueNetwork.Layers.Add(new Layer(Size, ValueNetwork.Layers.Last().Weights.GetLength(0), Activation.LRelu, 1e-5, 1e-5)); }
public static string GetMyDocs(this string Subdir, WriteToCMDLine wo) { string directory = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments); string subdir = directory + "\\" + Subdir; wo(subdir); return(subdir); }
internal ShortTermWordFilter(WriteToCMDLine write) { AttentionNetwork = new NeuralNetwork(Datatype.Alpha); ValueNetwork = new NeuralNetwork(Datatype.Alpha); AttentionNetwork.Layers.Add(new Layer(1, CharSet.LetterCount * (1 + (2 * Radius)), Activation.Linear)); ValueNetwork.Layers.Add(new Layer(Size, CharSet.CharCount * (1 + (2 * Radius)), Activation.LRelu, 1e-5, 1e-5)); ValueNetwork.Layers.Add(new Layer(Size, ValueNetwork.Layers.Last().Weights.GetLength(0), Activation.LRelu, 1e-5, 1e-5)); }
public void Show(WriteToCMDLine write) { write("Current Error " + CurError); write("Run Error " + RunError); write("Previous Error " + PrevError); write("Travel " + Travel); CurError = 0; }
internal Alpha2(WriteToCMDLine write) { this.Filters = new List <IAlphaFilter>(); Filters.Add(new AlphaFilter1(write)); Filters.Add(new AlphaFilter2(write)); Filters.Add(new AlphaFilter3(write)); Filters.Add(new LongTermWordFilter(write)); Filters.Add(new ShortTermWordFilter(write)); }
public static void WriteArray <t>(this t[] values, string label, WriteToCMDLine write) { string s = label + " : " + values.FirstOrDefault(); for (int i = 1; i < values.Count(); i++) { s += ", " + values[i]; } write(s); }
public static void show(this string label, double[] values, WriteToCMDLine write) { string s = label + " : "; for (int i = 0; i < values.Count(); i++) { s += values[i] + ", "; } write(s); }
private static double[] DropOut(double[] input, double rate, WriteToCMDLine write) { double[] output = new double[input.Count()]; var DOLayer = input.RandomBinomial(rate); for(int i = 0; i < output.Count(); i++) { output[i] = input[i] * DOLayer[i] / (1 - rate); } return output; }
public static string CreateFolder(this Datatype dt, WriteToCMDLine wo) { string folder = dt.ToString().GetMyDocs(wo); if (!Directory.Exists(folder)) { Directory.CreateDirectory(folder); } return(folder); }
public int Predict(ValueSet val, WriteToCMDLine write) { Stonk st = new Stonk(); StonkContext ctxt = new StonkContext(Datatype.AAPL); var comps = Comparison.GenerateComparisons(val); double[] Results = st.Forward(comps, ctxt); Results = Network.Forward(Results); return(Results.ToList().IndexOf(Results.Max())); }
internal Alpha(WriteToCMDLine write) { Network = Datatype.Alpha.LoadNetwork(write); if (Network.Datatype == Datatype.None) { Network = new NeuralNetwork(Datatype.Alpha); Network.Layers.Add(new Layer(DictSize, ((2 * SearchRange) + 1) * CharSet.CharCount, Activation.LRelu, 1e-5, 1e-5)); Network.Layers.Add(new Layer(DictSize, Network.Layers.Last().Weights.GetLength(0), Activation.LRelu, 1e-5, 1e-5)); Network.Layers.Add(new Layer(DictSize, Network.Layers.Last().Weights.GetLength(0), Activation.Linear, 1e-5, 1e-5)); } }
public List <string> GetText(string title, WriteToCMDLine write) { List <string> lines = new List <string>(); this.Reframe(); lines.Add("*" + title.Split('\\').Last().Split('.').First() + ", Scalar is " + scalar); lines.Add(";%TYPE=MODEL,"); for (int n = 0; n < this.Lines.Count(); n++) { lines.Add(GetLineText(Lines[n])); } return(lines); }
public static double[] Predict(string s, WriteToCMDLine write) { NeuralNetwork net = GetNetwork(write); Alpha a = new Alpha(write); AlphaContext ctxt = new AlphaContext(datatype, write); double[] Results = a.Forward(s, ctxt); Results.WriteArray("Alpha Results : ", write); for (int i = 0; i < net.Layers.Count(); i++) { Results = net.Layers[i].Output(Results); } return(Results); }
public static NeuralNetwork GetNetwork(WriteToCMDLine write) { var size = Enum.GetNames(typeof(Uniformat)).Length; NeuralNetwork net = datatype.LoadNetwork(write); if (net.Datatype == Datatype.None) { net = new NeuralNetwork(datatype); net.Layers.Add(new Layer(Alpha.DictSize, Alpha.DictSize, Activation.LRelu, 1e-5, 1e-5)); net.Layers.Add(new Layer(Alpha.DictSize, net.Layers.Last().Weights.GetLength(0), Activation.LRelu, 1e-5, 1e-5)); net.Layers.Add(new Layer(size, net.Layers.Last().Weights.GetLength(0), Activation.CombinedCrossEntropySoftmax)); } return(net); }
public static NeuralNetwork GetNetwork(WriteToCMDLine write) { Alpha2 a = new Alpha2(CMDLibrary.WriteNull); NeuralNetwork net = datatype.LoadNetwork(write); if (net.Datatype == Datatype.None) { net = new NeuralNetwork(datatype); net.Layers.Add(new Layer(100, a.GetSize(), Activation.LRelu, 1e-5, 1e-5)); net.Layers.Add(new Layer(100, net.Layers.Last().Weights.GetLength(0), Activation.LRelu, 1e-5, 1e-5)); net.Layers.Add(new Layer(40, net.Layers.Last().Weights.GetLength(0), Activation.CombinedCrossEntropySoftmax)); } return(net); }
public static double Propogate (Sample s, WriteToCMDLine write, bool tf = false) { double error = 0; var Pred = Predict(s.TextInput, CMDLibrary.WriteNull); if (s.DesiredOutput.ToList().IndexOf(s.DesiredOutput.Max()) != Pred.ToList().IndexOf(Pred.Max()) || tf) { NeuralNetwork net = GetNetwork(write); var Samples = s.ReadSamples(24); Alpha a = new Alpha(write); AlphaContext ctxt = new AlphaContext(datatype, write); NetworkMem NetMem = new NetworkMem(net); NetworkMem AlphaMem = new NetworkMem(a.Network); NetworkMem CtxtMem = new NetworkMem(ctxt.Network); Parallel.For(0, Samples.Count(), j => { AlphaMem am = new AlphaMem(Samples[j].TextInput.ToCharArray()); var output = a.Forward(Samples[j].TextInput, ctxt, am); var F = net.Forward(output, dropout, write); error += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), Samples[j].DesiredOutput).Max(); var DValues = net.Backward(F, Samples[j].DesiredOutput, NetMem, write); a.Backward(Samples[j].TextInput, DValues, ctxt, am, AlphaMem, CtxtMem); }); NetMem.Update(Samples.Count(), 0.00001, net); AlphaMem.Update(Samples.Count(), 0.00001, a.Network); CtxtMem.Update(Samples.Count(), 0.00001, ctxt.Network); write("Pre Training Error : " + error); net.Save(); a.Network.Save(); ctxt.Network.Save(datatype); error = 0; Parallel.For(0, Samples.Count(), j => { AlphaMem am = new AlphaMem(Samples[j].TextInput.ToCharArray()); var output = a.Forward(Samples[j].TextInput, ctxt, am); var F = net.Forward(output, dropout, write); error += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), Samples[j].DesiredOutput).Max(); }); write("Post Training Error : " + error); s.Save(); } return(error); }
public void Propogate (Sample s, WriteToCMDLine write) { var check = Predict(s); if (s.DesiredOutput.ToList().IndexOf(s.DesiredOutput.Max()) != check.ToList().IndexOf(check.Max())) { Alpha a = new Alpha(write); AlphaContext ctxt1 = new AlphaContext(datatype, write); AlphaContext ctxt2 = new AlphaContext(datatype, write, 1); var Samples = s.ReadSamples(); List <string> lines = new List <string>(); for (int i = 0; i < 5; i++) { NetworkMem ObjMem = new NetworkMem(Network); NetworkMem AlphaMem = new NetworkMem(a.Network); NetworkMem CtxtMem1 = new NetworkMem(ctxt1.Network); NetworkMem CtxtMem2 = new NetworkMem(ctxt2.Network); Parallel.For(0, Samples.Count(), j => { AlphaMem am = new AlphaMem(Samples[j].TextInput.ToCharArray()); Samples[j].TextOutput = a.Forward(Samples[j].TextInput, ctxt1, am); AlphaMem am2 = new AlphaMem(Samples[j].SecondaryText.ToCharArray()); Samples[j].SecondaryTextOutput = a.Forward(Samples[j].SecondaryText, ctxt2, am2); var F = Forward(Samples[j]); lines.AddRange(Samples[j].OutputError(CategoricalCrossEntropy.Forward(F.Last(), Samples[j].DesiredOutput))); var DValues = Backward(Samples[j], F, ObjMem); var DV1 = DValues.ToList().Take(Alpha.DictSize).ToArray(); var DV2 = Enumerable.Reverse(DValues).Take(Alpha.DictSize).Reverse().ToArray(); a.Backward(Samples[j].TextInput, DV1, ctxt1, am, AlphaMem, CtxtMem1); a.Backward(Samples[j].SecondaryText, DV2, ctxt2, am2, AlphaMem, CtxtMem2); }); ObjMem.Update(1, 0.0001, Network); AlphaMem.Update(1, 0.00001, a.Network); CtxtMem1.Update(1, 0.0001, ctxt1.Network); CtxtMem2.Update(1, 0.0001, ctxt2.Network); } lines.ShowErrorOutput(); Network.Save(); a.Network.Save(); ctxt1.Save(); ctxt2.Save(); s.Save(); } }
public static void RunPredictions(WriteToCMDLine write) { OpenFileDialog ofd = new OpenFileDialog() { FileName = "Select a binary file", Filter = "BIN files (*.bin)|*.bin", Title = "Open bin file" }; if (ofd.ShowDialog() == DialogResult.OK) { int runs = 0; double er = 0; var filepath = ofd.FileName; var dir = Path.GetDirectoryName(filepath); var Files = Directory.GetFiles(dir); Random random = new Random(); for (int i = 0; i < 1000; i++) { string f = Files[random.Next(Files.Count())]; try { Sample s = f.ReadFromBinaryFile <Sample>(); string datatype = s.Datatype; var error = MasterformatNetwork.Propogate(s, write, true); //var error = new MasterformatNetwork(s).PropogateSingle(s, write, true); if (error > 0) { runs++; er += error; write("Total Error : " + er); write("Total Runs : " + runs); write("Error : " + er / runs); write(""); } else { write(""); write("error was 0"); write(""); } } catch (Exception e) { e.OutputError(); } } } }
public static void PropogateSingle(this Sample entry, WriteToCMDLine write) { var type = typeof(INetworkPredUpdater); Assembly a = type.Assembly; var NetTypes = a.GetTypes().Where(y => !y.IsInterface).Where(x => type.IsAssignableFrom(x)).ToList(); for (int i = 0; i < NetTypes.Count(); i++) { var Network = (INetworkPredUpdater)Activator.CreateInstance(NetTypes[i], entry); var dt = Network.datatype.ToString(); if (dt == entry.Datatype) { Network.Propogate(entry, write); break; } } }
public static void TestPredictions(string phrase, string dt, WriteToCMDLine write) { if (Enum.GetNames(typeof(Datatype)).Any(x => dt == x)) { Datatype dtype = (Datatype)Enum.Parse(typeof(Datatype), dt); switch (dtype) { default: case Datatype.Masterformat: var outputs = MasterformatNetwork.Predict(phrase, write); var output = outputs.ToList().IndexOf(outputs.Max()); try { outputs.WriteArray("Values", write); } catch (Exception e) { e.OutputError(); } write(phrase + " : Division Number : " + output.ToString()); break; } } }
public void Propogate (ValueSet val, WriteToCMDLine write) { Stonk stk = new Stonk(); StonkContext ctxt = new StonkContext(datatype); var vals = val.ReadValues(Datatypes.Datatype.AAPL, 24); NetworkMem AAPLMem = new NetworkMem(Network); NetworkMem StkMem = new NetworkMem(stk.Network); NetworkMem CtxtMem = new NetworkMem(ctxt.Network); double e = 0; Parallel.For(0, vals.Count(), j => { try { List <Comparison> comps = Comparison.GenerateComparisons(vals[j]); if (j == 0 || j == 1) { write("Comparisons : " + comps.Count()); } StonkMem sm = new StonkMem(comps.Count()); var MktOutput = stk.Forward(comps, ctxt, sm); var F = Network.Forward(MktOutput, dropout, write); var output = new double[2]; int opnumb = vals[j].Increase ? 1 : 0; output[opnumb] = 1; var Error = CategoricalCrossEntropy.Forward(F.Last().GetRank(0), output); e += Error.Max(); var D = Network.Backward(F, output, AAPLMem, write); stk.Backward(D, ctxt, sm, StkMem, CtxtMem); } catch { } }); write("Samples : " + vals.Count()); write("Loss : " + e); AAPLMem.Update(vals.Count(), 1e-4, Network); StkMem.Update(vals.Count(), 1e-4, stk.Network); CtxtMem.Update(vals.Count(), 1e-4, ctxt.Network); Network.Save(); stk.Network.Save(); ctxt.Save(); }
public static double Propogate (WriteToCMDLine write) { double error = 0; NeuralNetwork net = GetNetwork(write); var Samples = ReadVals(24); Alpha a = new Alpha(write); AlphaContext ctxt = new AlphaContext(datatype, write); NetworkMem OLFMem = new NetworkMem(net); NetworkMem AlphaMem = new NetworkMem(a.Network); NetworkMem CtxtMem = new NetworkMem(ctxt.Network); Parallel.For(0, Samples.Count(), j => { AlphaMem am = new AlphaMem(Samples.Keys.ToList()[j].ToCharArray()); var output = a.Forward(Samples.Keys.ToList()[j], ctxt, am); var F = net.Forward(output, dropout, write); var desired = new double[Enum.GetNames(typeof(Command)).Length]; desired[Samples.Values.ToList()[j]] = 1; error += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), desired).Max(); var DValues = net.Backward(F, desired, OLFMem, write); a.Backward(Samples.Keys.ToList()[j], DValues, ctxt, am, AlphaMem, CtxtMem); }); OLFMem.Update(Samples.Count(), 0.0001, net); AlphaMem.Update(Samples.Count(), 0.0001, a.Network); CtxtMem.Update(Samples.Count(), 0.0001, ctxt.Network); write("Pre Training Error : " + error); net.Save(); a.Network.Save(); ctxt.Network.Save(datatype); error = 0; Parallel.For(0, Samples.Count(), j => { AlphaMem am = new AlphaMem(Samples.Keys.ToList()[j].ToCharArray()); var output = a.Forward(Samples.Keys.ToList()[j], ctxt, am); var F = net.Forward(output, dropout, write); var desired = new double[Enum.GetNames(typeof(Command)).Length]; desired[Samples.Values.ToList()[j]] = 1; error += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), desired).Max(); }); write("Post Training Error : " + error); return(error); }
public static double Propogate (WriteToCMDLine write, bool tf = false) { double error = 0; NeuralNetwork net = GetNetwork(write); var Samples = datatype.ReadSamples(24); Alpha a = new Alpha(write); AlphaContext ctxt = new AlphaContext(datatype, write); NetworkMem OLFMem = new NetworkMem(net); NetworkMem AlphaMem = new NetworkMem(a.Network); NetworkMem CtxtMem = new NetworkMem(ctxt.Network); Parallel.For(0, Samples.Count(), j => { AlphaMem am = new AlphaMem(Samples[j].TextInput.ToCharArray()); var output = a.Forward(Samples[j].TextInput, ctxt, am); var F = net.Forward(output, dropout, write); error += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), Samples[j].DesiredOutput).Max(); var DValues = net.Backward(F, Samples[j].DesiredOutput, OLFMem, write); a.Backward(Samples[j].TextInput, DValues, ctxt, am, AlphaMem, CtxtMem); }); OLFMem.Update(Samples.Count(), 0.0001, net); AlphaMem.Update(Samples.Count(), 0.0001, a.Network); CtxtMem.Update(Samples.Count(), 0.0001, ctxt.Network); write("Pre Training Error : " + error); net.Save(); a.Network.Save(); ctxt.Network.Save(Datatype.OccupantLoadFactor); error = 0; Parallel.For(0, Samples.Count(), j => { AlphaMem am = new AlphaMem(Samples[j].TextInput.ToCharArray()); var output = a.Forward(Samples[j].TextInput, ctxt, am); var F = net.Forward(output, dropout, write); error += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), Samples[j].DesiredOutput).Max(); }); write("Post Training Error : " + error); return(error); }
public static double[] PredictMulti(this Datatype dt, double[] input, WriteToCMDLine write) { Sample entry = new Sample(dt); entry.ValInput = input; var type = typeof(INetworkPredUpdater); Assembly a = type.Assembly; var NetTypes = a.GetTypes().Where(y => !y.IsInterface).Where(x => type.IsAssignableFrom(x)).ToList(); for (int i = 0; i < NetTypes.Count(); i++) { var Network = (INetworkPredUpdater)Activator.CreateInstance(NetTypes[i], entry); if (Network.datatype == dt) { return(Network.Predict(entry)); } } return(null); }