Exemplo n.º 1
0
        public static double Propogate
            (Sample s, WriteToCMDLine write, bool tf = false)
        {
            double error = 0;
            var    Pred  = Predict(s.TextInput, CMDLibrary.WriteNull);

            if (s.DesiredOutput.ToList().IndexOf(s.DesiredOutput.Max()) != Pred.ToList().IndexOf(Pred.Max()) || tf)
            {
                NeuralNetwork net      = GetNetwork(write);
                var           Samples  = s.ReadSamples(24);
                Alpha         a        = new Alpha(write);
                AlphaContext  ctxt     = new AlphaContext(datatype, write);
                NetworkMem    NetMem   = new NetworkMem(net);
                NetworkMem    AlphaMem = new NetworkMem(a.Network);
                NetworkMem    CtxtMem  = new NetworkMem(ctxt.Network);

                Parallel.For(0, Samples.Count(), j =>
                {
                    AlphaMem am = new AlphaMem(Samples[j].TextInput.ToCharArray());
                    var output  = a.Forward(Samples[j].TextInput, ctxt, am);
                    var F       = net.Forward(output, dropout, write);
                    error      += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), Samples[j].DesiredOutput).Max();

                    var DValues = net.Backward(F, Samples[j].DesiredOutput, NetMem, write);
                    a.Backward(Samples[j].TextInput, DValues, ctxt, am, AlphaMem, CtxtMem);
                });
                NetMem.Update(Samples.Count(), 0.00001, net);
                AlphaMem.Update(Samples.Count(), 0.00001, a.Network);
                CtxtMem.Update(Samples.Count(), 0.00001, ctxt.Network);
                write("Pre Training Error : " + error);

                net.Save();
                a.Network.Save();
                ctxt.Network.Save(datatype);

                error = 0;
                Parallel.For(0, Samples.Count(), j =>
                {
                    AlphaMem am = new AlphaMem(Samples[j].TextInput.ToCharArray());
                    var output  = a.Forward(Samples[j].TextInput, ctxt, am);
                    var F       = net.Forward(output, dropout, write);
                    error      += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), Samples[j].DesiredOutput).Max();
                });
                write("Post Training Error : " + error);

                s.Save();
            }
            return(error);
        }
Exemplo n.º 2
0
        public void Propogate
            (Sample s, WriteToCMDLine write)
        {
            var check = Predict(s);

            if (s.DesiredOutput.ToList().IndexOf(s.DesiredOutput.Max()) != check.ToList().IndexOf(check.Max()))
            {
                Alpha         a       = new Alpha(write);
                AlphaContext  ctxt1   = new AlphaContext(datatype, write);
                AlphaContext  ctxt2   = new AlphaContext(datatype, write, 1);
                var           Samples = s.ReadSamples();
                List <string> lines   = new List <string>();
                for (int i = 0; i < 5; i++)
                {
                    NetworkMem ObjMem   = new NetworkMem(Network);
                    NetworkMem AlphaMem = new NetworkMem(a.Network);
                    NetworkMem CtxtMem1 = new NetworkMem(ctxt1.Network);
                    NetworkMem CtxtMem2 = new NetworkMem(ctxt2.Network);

                    Parallel.For(0, Samples.Count(), j =>
                    {
                        AlphaMem am                    = new AlphaMem(Samples[j].TextInput.ToCharArray());
                        Samples[j].TextOutput          = a.Forward(Samples[j].TextInput, ctxt1, am);
                        AlphaMem am2                   = new AlphaMem(Samples[j].SecondaryText.ToCharArray());
                        Samples[j].SecondaryTextOutput = a.Forward(Samples[j].SecondaryText, ctxt2, am2);
                        var F = Forward(Samples[j]);
                        lines.AddRange(Samples[j].OutputError(CategoricalCrossEntropy.Forward(F.Last(), Samples[j].DesiredOutput)));

                        var DValues = Backward(Samples[j], F, ObjMem);
                        var DV1     = DValues.ToList().Take(Alpha.DictSize).ToArray();
                        var DV2     = Enumerable.Reverse(DValues).Take(Alpha.DictSize).Reverse().ToArray();
                        a.Backward(Samples[j].TextInput, DV1, ctxt1, am, AlphaMem, CtxtMem1);
                        a.Backward(Samples[j].SecondaryText, DV2, ctxt2, am2, AlphaMem, CtxtMem2);
                    });
                    ObjMem.Update(1, 0.0001, Network);
                    AlphaMem.Update(1, 0.00001, a.Network);
                    CtxtMem1.Update(1, 0.0001, ctxt1.Network);
                    CtxtMem2.Update(1, 0.0001, ctxt2.Network);
                }
                lines.ShowErrorOutput();
                Network.Save();
                a.Network.Save();
                ctxt1.Save();
                ctxt2.Save();

                s.Save();
            }
        }
Exemplo n.º 3
0
        public static double Propogate
            (WriteToCMDLine write)
        {
            double        error    = 0;
            NeuralNetwork net      = GetNetwork(write);
            var           Samples  = ReadVals(24);
            Alpha         a        = new Alpha(write);
            AlphaContext  ctxt     = new AlphaContext(datatype, write);
            NetworkMem    OLFMem   = new NetworkMem(net);
            NetworkMem    AlphaMem = new NetworkMem(a.Network);
            NetworkMem    CtxtMem  = new NetworkMem(ctxt.Network);

            Parallel.For(0, Samples.Count(), j =>
            {
                AlphaMem am = new AlphaMem(Samples.Keys.ToList()[j].ToCharArray());
                var output  = a.Forward(Samples.Keys.ToList()[j], ctxt, am);
                var F       = net.Forward(output, dropout, write);
                var desired = new double[Enum.GetNames(typeof(Command)).Length];
                desired[Samples.Values.ToList()[j]] = 1;
                error += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), desired).Max();

                var DValues = net.Backward(F, desired, OLFMem, write);
                a.Backward(Samples.Keys.ToList()[j], DValues, ctxt, am, AlphaMem, CtxtMem);
            });
            OLFMem.Update(Samples.Count(), 0.0001, net);
            AlphaMem.Update(Samples.Count(), 0.0001, a.Network);
            CtxtMem.Update(Samples.Count(), 0.0001, ctxt.Network);
            write("Pre Training Error : " + error);

            net.Save();
            a.Network.Save();
            ctxt.Network.Save(datatype);

            error = 0;
            Parallel.For(0, Samples.Count(), j =>
            {
                AlphaMem am = new AlphaMem(Samples.Keys.ToList()[j].ToCharArray());
                var output  = a.Forward(Samples.Keys.ToList()[j], ctxt, am);
                var F       = net.Forward(output, dropout, write);
                var desired = new double[Enum.GetNames(typeof(Command)).Length];
                desired[Samples.Values.ToList()[j]] = 1;
                error += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), desired).Max();
            });
            write("Post Training Error : " + error);
            return(error);
        }
Exemplo n.º 4
0
        public void Propogate
            (ValueSet val, WriteToCMDLine write)
        {
            Stonk        stk  = new Stonk();
            StonkContext ctxt = new StonkContext(datatype);
            var          vals = val.ReadValues(Datatypes.Datatype.AAPL, 24);

            NetworkMem AAPLMem = new NetworkMem(Network);
            NetworkMem StkMem  = new NetworkMem(stk.Network);
            NetworkMem CtxtMem = new NetworkMem(ctxt.Network);
            double     e       = 0;

            Parallel.For(0, vals.Count(), j =>
            {
                try
                {
                    List <Comparison> comps = Comparison.GenerateComparisons(vals[j]);
                    if (j == 0 || j == 1)
                    {
                        write("Comparisons : " + comps.Count());
                    }
                    StonkMem sm = new StonkMem(comps.Count());

                    var MktOutput  = stk.Forward(comps, ctxt, sm);
                    var F          = Network.Forward(MktOutput, dropout, write);
                    var output     = new double[2];
                    int opnumb     = vals[j].Increase ? 1 : 0;
                    output[opnumb] = 1;

                    var Error = CategoricalCrossEntropy.Forward(F.Last().GetRank(0), output);
                    e        += Error.Max();
                    var D     = Network.Backward(F, output, AAPLMem, write);
                    stk.Backward(D, ctxt, sm, StkMem, CtxtMem);
                }
                catch { }
            });
            write("Samples : " + vals.Count());
            write("Loss : " + e);
            AAPLMem.Update(vals.Count(), 1e-4, Network);
            StkMem.Update(vals.Count(), 1e-4, stk.Network);
            CtxtMem.Update(vals.Count(), 1e-4, ctxt.Network);

            Network.Save();
            stk.Network.Save();
            ctxt.Save();
        }
Exemplo n.º 5
0
        public static double Propogate
            (WriteToCMDLine write, bool tf = false)
        {
            double        error    = 0;
            NeuralNetwork net      = GetNetwork(write);
            var           Samples  = datatype.ReadSamples(24);
            Alpha         a        = new Alpha(write);
            AlphaContext  ctxt     = new AlphaContext(datatype, write);
            NetworkMem    OLFMem   = new NetworkMem(net);
            NetworkMem    AlphaMem = new NetworkMem(a.Network);
            NetworkMem    CtxtMem  = new NetworkMem(ctxt.Network);

            Parallel.For(0, Samples.Count(), j =>
            {
                AlphaMem am = new AlphaMem(Samples[j].TextInput.ToCharArray());
                var output  = a.Forward(Samples[j].TextInput, ctxt, am);
                var F       = net.Forward(output, dropout, write);
                error      += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), Samples[j].DesiredOutput).Max();

                var DValues = net.Backward(F, Samples[j].DesiredOutput, OLFMem, write);
                a.Backward(Samples[j].TextInput, DValues, ctxt, am, AlphaMem, CtxtMem);
            });
            OLFMem.Update(Samples.Count(), 0.0001, net);
            AlphaMem.Update(Samples.Count(), 0.0001, a.Network);
            CtxtMem.Update(Samples.Count(), 0.0001, ctxt.Network);
            write("Pre Training Error : " + error);

            net.Save();
            a.Network.Save();
            ctxt.Network.Save(Datatype.OccupantLoadFactor);

            error = 0;
            Parallel.For(0, Samples.Count(), j =>
            {
                AlphaMem am = new AlphaMem(Samples[j].TextInput.ToCharArray());
                var output  = a.Forward(Samples[j].TextInput, ctxt, am);
                var F       = net.Forward(output, dropout, write);
                error      += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), Samples[j].DesiredOutput).Max();
            });
            write("Post Training Error : " + error);

            return(error);
        }
        public static double Propogate
            (Sample s, WriteToCMDLine write, bool tf = false)
        {
            double error = 0;

            //var Pred = Predict(s.TextInput, new WriteToCMDLine(CMDLibrary.WriteNull));

            //if (s.DesiredOutput.ToList().IndexOf(s.DesiredOutput.Max()) != Pred.ToList().IndexOf(Pred.Max()) || tf)
            {
                NeuralNetwork net     = GetNetwork(write);
                var           Samples = s.ReadSamples(24);
                Alpha2        a       = datatype.LoadAlpha(write);
                var           am      = a.CreateMemory();
                //Alpha a = new Alpha(write);
                //AlphaContext ctxt = new AlphaContext(datatype, write);
                NetworkMem MFMem = new NetworkMem(net);
                //NetworkMem AlphaMem = new NetworkMem(a.Network);
                //NetworkMem CtxtMem = new NetworkMem(ctxt.Network);

                try
                {
                    Parallel.For(0, Samples.Count(), j =>
                    {
                        //AlphaMem am = new AlphaMem(Samples[j].TextInput.ToCharArray());
                        //var output = a.Forward(Samples[j].TextInput, ctxt, am);
                        var AMem   = a.CreateAlphaMemory(Samples[j].TextInput);
                        var output = a.Forward(Samples[j].TextInput, AMem, write);
                        var F      = net.Forward(output, dropout, write);
                        error     += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), Samples[j].DesiredOutput).Max();

                        var DValues = net.Backward(F, Samples[j].DesiredOutput, MFMem, write);
                        a.Backward(Samples[j].TextInput, DValues, AMem, am, write);
                        //a.Backward(Samples[j].TextInput, DValues, ctxt, am, AlphaMem, CtxtMem);
                    });
                }
                catch (Exception e) { e.OutputError(); }
                MFMem.Update(Samples.Count(), 0.00001, net);
                a.Update(am, Samples.Count());
                //AlphaMem.Update(Samples.Count(), 0.00001, a.Network);
                //CtxtMem.Update(Samples.Count(), 0.00001, ctxt.Network);
                write("Pre Training Error : " + error);

                net.Save();
                a.Save();
                //a.Network.Save();
                //ctxt.Network.Save(Datatype.Masterformat);

                error = 0;
                Parallel.For(0, Samples.Count(), j =>
                {
                    var AMem   = a.CreateAlphaMemory(Samples[j].TextInput);
                    var output = a.Forward(Samples[j].TextInput, AMem, write);
                    var F      = net.Forward(output, 0, write);
                    error     += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), Samples[j].DesiredOutput).Max();
                    //AlphaMem am = new AlphaMem(Samples[j].TextInput.ToCharArray());
                    //var output = a.Forward(Samples[j].TextInput, ctxt, am);
                    //var F = net.Forward(output, dropout, write);
                    //error += CategoricalCrossEntropy.Forward(F.Last().GetRank(0), Samples[j].DesiredOutput).Max();
                });
                write("Post Training Error : " + error);

                //s.Save();
            }
            return(error);
        }