public void Backward
            (string s, double[] DValues,
            AlphaMem am, NetworkMem ValMem, NetworkMem FocMem)
        {
            var LocDValues = am.DLocation(DValues);

            DValues = am.DGlobalContext(DValues);
            DValues = Activations.InverseSoftMax(DValues, am.GlobalContextOutputs);
            Parallel.For(0, s.Length, j =>
            {
                var ldv      = LocDValues[j];
                double[] cdv = new double[1] {
                    DValues[j] / s.Length
                };
                for (int i = ValueNetwork.Layers.Count() - 1; i >= 0; i--)
                {
                    ldv = ValMem.Layers[i].DActivation(ldv, am.LocationOutputs[j][i + 1]);
                    ValMem.Layers[i].DBiases(ldv, ValueNetwork.Layers[i], s.Length);
                    ValMem.Layers[i].DWeights(ldv, am.LocationOutputs[j][i], ValueNetwork.Layers[i], s.Length);
                    ldv = ValMem.Layers[i].DInputs(ldv, ValueNetwork.Layers[i]);
                }
                for (int i = AttentionNetwork.Layers.Count() - 1; i >= 0; i--)
                {
                    try
                    {
                        cdv = FocMem.Layers[i].DActivation(cdv, am.LocalContextOutputs[j][i + 1]);
                        FocMem.Layers[i].DBiases(cdv, AttentionNetwork.Layers[i], s.Length);
                        FocMem.Layers[i].DWeights(cdv, am.LocalContextOutputs[j][i], AttentionNetwork.Layers[i], s.Length);
                        cdv = FocMem.Layers[i].DInputs(cdv, AttentionNetwork.Layers[i]);
                    }
                    catch (Exception e) { e.OutputError(); }
                }
            });
        }
        public double[] Forward(string s, AlphaMem am)
        {
            try
            {
                double[,] loc = new double[s.Length, Size];
                Parallel.For(0, s.Length, j =>
                {
                    am.LocalContextOutputs[j].Add(s.LocatePhrase(j, Radius));
                    for (int i = 0; i < AttentionNetwork.Layers.Count(); i++)
                    {
                        am.LocalContextOutputs[j].Add
                            (AttentionNetwork.Layers[i].Output(am.LocalContextOutputs[j].Last()));
                    }

                    am.LocationOutputs[j].Add(s.Locate(j, Radius));
                    for (int i = 0; i < ValueNetwork.Layers.Count(); i++)
                    {
                        am.LocationOutputs[j].Add
                            (ValueNetwork.Layers[i].Output(am.LocationOutputs[j].Last()));
                    }

                    loc.SetRank(am.LocationOutputs[j].Last(), j);
                    am.GlobalContextOutputs[j] = am.LocalContextOutputs[j].Last().First();
                });
                return(loc.Multiply(Activations.SoftMax(am.GlobalContextOutputs)));
            }
            catch (Exception e) { e.OutputError(); }
            return(null);
        }
Example #3
0
 public double Contextualize(string s, int c, AlphaMem am)
 {
     am.LocalContextOutputs[c].Add(CharSet.Locate(s, c, SearchRange));
     for (int i = 0; i < Network.Layers.Count(); i++)
     {
         am.LocalContextOutputs[c].Add(Network.Layers[i].Output(am.LocalContextOutputs[c].Last()));
     }
     return(am.LocalContextOutputs[c].Last().First());
 }
Example #4
0
 private double[] Locate(double[] values, int j, AlphaMem am)
 {
     for (int i = 0; i < ValueNetwork.Layers.Count(); i++)
     {
         values = ValueNetwork.Layers[i].Output(values);
         am.LocationOutputs[j].Add(values);
     }
     return(values);
 }
Example #5
0
 private double ScoreAttention(double[] values, int j, AlphaMem am)
 {
     for (int i = 0; i < AttentionNetwork.Layers.Count(); i++)
     {
         values = AttentionNetwork.Layers[i].Output(values);
         am.LocalContextOutputs[j].Add(values);
     }
     return(values.First());
 }
Example #6
0
        public static double Propogate
            (Sample s, WriteToCMDLine write, bool tf = false)
        {
            double error = 0;
            var    Pred  = Predict(s.TextInput, CMDLibrary.WriteNull);

            if (s.DesiredOutput.ToList().IndexOf(s.DesiredOutput.Max()) != Pred.ToList().IndexOf(Pred.Max()) || tf)
            {
                NeuralNetwork net      = GetNetwork(write);
                var           Samples  = s.ReadSamples(24);
                Alpha         a        = new Alpha(write);
                AlphaContext  ctxt     = new AlphaContext(datatype, write);
                NetworkMem    NetMem   = new NetworkMem(net);
                NetworkMem    AlphaMem = new NetworkMem(a.Network);
                NetworkMem    CtxtMem  = new NetworkMem(ctxt.Network);

                Parallel.For(0, Samples.Count(), j =>
                {
                    AlphaMem am = new AlphaMem(Samples[j].TextInput.ToCharArray());
                    var output  = a.Forward(Samples[j].TextInput, ctxt, am);
                    var F       = net.Forward(output, dropout, write);
                    error      += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), Samples[j].DesiredOutput).Max();

                    var DValues = net.Backward(F, Samples[j].DesiredOutput, NetMem, write);
                    a.Backward(Samples[j].TextInput, DValues, ctxt, am, AlphaMem, CtxtMem);
                });
                NetMem.Update(Samples.Count(), 0.00001, net);
                AlphaMem.Update(Samples.Count(), 0.00001, a.Network);
                CtxtMem.Update(Samples.Count(), 0.00001, ctxt.Network);
                write("Pre Training Error : " + error);

                net.Save();
                a.Network.Save();
                ctxt.Network.Save(datatype);

                error = 0;
                Parallel.For(0, Samples.Count(), j =>
                {
                    AlphaMem am = new AlphaMem(Samples[j].TextInput.ToCharArray());
                    var output  = a.Forward(Samples[j].TextInput, ctxt, am);
                    var F       = net.Forward(output, dropout, write);
                    error      += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), Samples[j].DesiredOutput).Max();
                });
                write("Post Training Error : " + error);

                s.Save();
            }
            return(error);
        }
Example #7
0
 public double[] Forward(string s, AlphaMem am)
 {
     double[] ctxt = new double[s.Length];
     double[,] loc = new double[s.Length, Size];
     Parallel.For(0, s.Length, j =>
     {
         var vals = s.Locate(j, Radius);
         am.LocationOutputs[j].Add(vals);
         am.LocalContextOutputs[j].Add(vals);
         ctxt[j]    = ScoreAttention(vals, j, am);
         var output = Locate(vals, j, am);
         loc.SetRank(output, j);
     });
     return(loc.Multiply(Activations.SoftMax(ctxt)));
 }
        public void Propogate
            (Sample s, WriteToCMDLine write)
        {
            var check = Predict(s);

            if (s.DesiredOutput.ToList().IndexOf(s.DesiredOutput.Max()) != check.ToList().IndexOf(check.Max()))
            {
                Alpha         a       = new Alpha(write);
                AlphaContext  ctxt1   = new AlphaContext(datatype, write);
                AlphaContext  ctxt2   = new AlphaContext(datatype, write, 1);
                var           Samples = s.ReadSamples();
                List <string> lines   = new List <string>();
                for (int i = 0; i < 5; i++)
                {
                    NetworkMem ObjMem   = new NetworkMem(Network);
                    NetworkMem AlphaMem = new NetworkMem(a.Network);
                    NetworkMem CtxtMem1 = new NetworkMem(ctxt1.Network);
                    NetworkMem CtxtMem2 = new NetworkMem(ctxt2.Network);

                    Parallel.For(0, Samples.Count(), j =>
                    {
                        AlphaMem am                    = new AlphaMem(Samples[j].TextInput.ToCharArray());
                        Samples[j].TextOutput          = a.Forward(Samples[j].TextInput, ctxt1, am);
                        AlphaMem am2                   = new AlphaMem(Samples[j].SecondaryText.ToCharArray());
                        Samples[j].SecondaryTextOutput = a.Forward(Samples[j].SecondaryText, ctxt2, am2);
                        var F = Forward(Samples[j]);
                        lines.AddRange(Samples[j].OutputError(CategoricalCrossEntropy.Forward(F.Last(), Samples[j].DesiredOutput)));

                        var DValues = Backward(Samples[j], F, ObjMem);
                        var DV1     = DValues.ToList().Take(Alpha.DictSize).ToArray();
                        var DV2     = Enumerable.Reverse(DValues).Take(Alpha.DictSize).Reverse().ToArray();
                        a.Backward(Samples[j].TextInput, DV1, ctxt1, am, AlphaMem, CtxtMem1);
                        a.Backward(Samples[j].SecondaryText, DV2, ctxt2, am2, AlphaMem, CtxtMem2);
                    });
                    ObjMem.Update(1, 0.0001, Network);
                    AlphaMem.Update(1, 0.00001, a.Network);
                    CtxtMem1.Update(1, 0.0001, ctxt1.Network);
                    CtxtMem2.Update(1, 0.0001, ctxt2.Network);
                }
                lines.ShowErrorOutput();
                Network.Save();
                a.Network.Save();
                ctxt1.Save();
                ctxt2.Save();

                s.Save();
            }
        }
Example #9
0
        public static double Propogate
            (WriteToCMDLine write)
        {
            double        error    = 0;
            NeuralNetwork net      = GetNetwork(write);
            var           Samples  = ReadVals(24);
            Alpha         a        = new Alpha(write);
            AlphaContext  ctxt     = new AlphaContext(datatype, write);
            NetworkMem    OLFMem   = new NetworkMem(net);
            NetworkMem    AlphaMem = new NetworkMem(a.Network);
            NetworkMem    CtxtMem  = new NetworkMem(ctxt.Network);

            Parallel.For(0, Samples.Count(), j =>
            {
                AlphaMem am = new AlphaMem(Samples.Keys.ToList()[j].ToCharArray());
                var output  = a.Forward(Samples.Keys.ToList()[j], ctxt, am);
                var F       = net.Forward(output, dropout, write);
                var desired = new double[Enum.GetNames(typeof(Command)).Length];
                desired[Samples.Values.ToList()[j]] = 1;
                error += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), desired).Max();

                var DValues = net.Backward(F, desired, OLFMem, write);
                a.Backward(Samples.Keys.ToList()[j], DValues, ctxt, am, AlphaMem, CtxtMem);
            });
            OLFMem.Update(Samples.Count(), 0.0001, net);
            AlphaMem.Update(Samples.Count(), 0.0001, a.Network);
            CtxtMem.Update(Samples.Count(), 0.0001, ctxt.Network);
            write("Pre Training Error : " + error);

            net.Save();
            a.Network.Save();
            ctxt.Network.Save(datatype);

            error = 0;
            Parallel.For(0, Samples.Count(), j =>
            {
                AlphaMem am = new AlphaMem(Samples.Keys.ToList()[j].ToCharArray());
                var output  = a.Forward(Samples.Keys.ToList()[j], ctxt, am);
                var F       = net.Forward(output, dropout, write);
                var desired = new double[Enum.GetNames(typeof(Command)).Length];
                desired[Samples.Values.ToList()[j]] = 1;
                error += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), desired).Max();
            });
            write("Post Training Error : " + error);
            return(error);
        }
Example #10
0
        public double[] Forward(string s, AlphaContext context, AlphaMem am)
        {
            double[,] loc = new double[s.Length, DictSize];

            Parallel.For(0, s.Length, j =>
            {
                double[] a = s.Locate(j, SearchRange);
                am.LocationOutputs[j].Add(a);
                for (int i = 0; i < Network.Layers.Count(); i++)
                {
                    a = Network.Layers[i].Output(a);
                    am.LocationOutputs[j].Add(a);
                }
                loc.SetRank(a, j);
                am.GlobalContextOutputs[j] = context.Contextualize(s, j, am);
            });
            return(loc.Multiply(Activations.SoftMax(am.GlobalContextOutputs)));
        }
Example #11
0
        public void Backward(string s, double[] DValues, AlphaContext context, AlphaMem am, NetworkMem mem, NetworkMem CtxtMem)
        {
            var LocDValues = am.DLocation(DValues);

            DValues = am.DGlobalContext(DValues);
            DValues = Activations.InverseSoftMax(DValues, am.GlobalContextOutputs);
            context.Backward(DValues, s.Length, am, CtxtMem);
            Parallel.For(0, s.Length, j =>
            {
                var ldv = LocDValues[j];
                for (int i = Network.Layers.Count() - 1; i >= 0; i--)
                {
                    ldv = mem.Layers[i].DActivation(ldv, am.LocationOutputs[j][i + 1]);
                    mem.Layers[i].DBiases(ldv, Network.Layers[i], s.Length);
                    mem.Layers[i].DWeights(ldv, am.LocationOutputs[j][i], Network.Layers[i], s.Length);
                    ldv = mem.Layers[i].DInputs(ldv, Network.Layers[i]);
                }
            });
        }
Example #12
0
        public static double Propogate
            (WriteToCMDLine write, bool tf = false)
        {
            double        error    = 0;
            NeuralNetwork net      = GetNetwork(write);
            var           Samples  = datatype.ReadSamples(24);
            Alpha         a        = new Alpha(write);
            AlphaContext  ctxt     = new AlphaContext(datatype, write);
            NetworkMem    OLFMem   = new NetworkMem(net);
            NetworkMem    AlphaMem = new NetworkMem(a.Network);
            NetworkMem    CtxtMem  = new NetworkMem(ctxt.Network);

            Parallel.For(0, Samples.Count(), j =>
            {
                AlphaMem am = new AlphaMem(Samples[j].TextInput.ToCharArray());
                var output  = a.Forward(Samples[j].TextInput, ctxt, am);
                var F       = net.Forward(output, dropout, write);
                error      += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), Samples[j].DesiredOutput).Max();

                var DValues = net.Backward(F, Samples[j].DesiredOutput, OLFMem, write);
                a.Backward(Samples[j].TextInput, DValues, ctxt, am, AlphaMem, CtxtMem);
            });
            OLFMem.Update(Samples.Count(), 0.0001, net);
            AlphaMem.Update(Samples.Count(), 0.0001, a.Network);
            CtxtMem.Update(Samples.Count(), 0.0001, ctxt.Network);
            write("Pre Training Error : " + error);

            net.Save();
            a.Network.Save();
            ctxt.Network.Save(Datatype.OccupantLoadFactor);

            error = 0;
            Parallel.For(0, Samples.Count(), j =>
            {
                AlphaMem am = new AlphaMem(Samples[j].TextInput.ToCharArray());
                var output  = a.Forward(Samples[j].TextInput, ctxt, am);
                var F       = net.Forward(output, dropout, write);
                error      += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), Samples[j].DesiredOutput).Max();
            });
            write("Post Training Error : " + error);

            return(error);
        }
Example #13
0
 public void Backward(double[] DValues, int runs, AlphaMem am, NetworkMem mem)
 {
     Parallel.For(0, runs, j =>
     {
         double[] cdv = new double[1] {
             DValues[j] / runs
         };
         for (int i = Network.Layers.Count() - 1; i >= 0; i--)
         {
             try
             {
                 cdv = mem.Layers[i].DActivation(cdv, am.LocalContextOutputs[j][i + 1]);
                 mem.Layers[i].DBiases(cdv, Network.Layers[i], runs);
                 mem.Layers[i].DWeights(cdv, am.LocalContextOutputs[j][i], Network.Layers[i], runs);
                 cdv = mem.Layers[i].DInputs(cdv, Network.Layers[i]);
             }
             catch (Exception e) { e.OutputError(); }
         }
     });
 }
Example #14
0
 public AlphaMem[] CreateAlphaMemory(string s)
 {
     AlphaMem[] mem = new AlphaMem[Filters.Count];
     Parallel.For(0, Filters.Count, j => mem[j] = new AlphaMem(s.ToCharArray()));
     return(mem);
 }