public override double[] TrainBatchContinue(IDataEnumerator data, int batch, int count)
        {
            double[] errors = new double[count];
            for (int c = 0; c < count; c++)
            {
                int max_found = 0;

                double[] loc_errors = new double[batch];
                int[]    loc_found  = new int[batch];

                DateTime start = DateTime.Now;
                double   err   = 0;
                Parallel.For(0, batch, b =>
                {
                    var pair      = data.GetRandom(ref network);
                    var res       = network.ParallelWriteG(b, pair.Key, pair.Value, k);
                    loc_errors[b] = res.Key;
                });

                for (int b = 0; b < batch; b++)
                {
                    network.ActionTwoG(b, batch, batch, (v1, v2) => v1 + v2);
                    err       += loc_errors[b];
                    max_found += loc_found[b];
                }

                network.ActionG(batch, batch, val => val / batch);
                network.GradWeights(batch);
                network.ActionG(batch, batch, val => 0);
                errors[c] = err / batch;
            }

            return(errors);
        }
Esempio n. 2
0
 public Process(Logger logger, Func <IGenerator <TData> > generator, IDataEnumerator <TData> sourceEnumerator, IEmitRepository emitRepository)
     : base(logger)
 {
     _sourceEnumerator = sourceEnumerator;
     _emitRepository   = emitRepository;
     _generator        = generator;
 }
Esempio n. 3
0
        public virtual double[] GetMeanMetrics(IDataEnumerator data, List <IMetric> metrics)
        {
            double[] vals = new double[metrics.Count];
            data.Reset();
            int count = 0;

            object lock_target = new object();

            Parallel.ForEach(data, (needed) =>
            {
                double[,,] output = GetOutput(needed.input);
                for (int i = 0; i < metrics.Count; i++)
                {
                    var m = metrics[i].Metric(output, needed.output);
                    lock (lock_target)
                    {
                        vals[i] += m;
                        count++;
                    }
                }
            });
            data.Reset();
            for (int i = 0; i < vals.Length; i++)
            {
                vals[i] /= count;
            }
            return(vals);
        }
Esempio n. 4
0
        public double TrainContinue(Optimizer optimizer, IDataEnumerator data, int batch)
        {
            NetworkEnumerator nEm = new NetworkEnumerator(background, data);
            MirrorEnumerator  Mem = new MirrorEnumerator(nEm);

            optimizer.network = autoencoder;
            double err = optimizer.TrainBatchContinue(Mem, batch, 1).Last();

            return(err);
        }
Esempio n. 5
0
        public override double[] TrainBatch(IDataEnumerator data, int batch, int count)
        {
            double[] errors = new double[count];
            for (int c = 0; c < count; c++)
            {
                int    max_found = 0;
                double max       = 0;
                double err       = 0;
                for (int j = 0; j < batch; j++)
                {
                    max = -1;
                    var pair    = data.GetRandom(ref network);
                    int max_arg = -1;
                    pair.Value.ForEach((val, z, y, x) => {
                        if (val > max)
                        {
                            max_arg = x;
                            max     = val;
                        }
                    });

                    max = 0;
                    int max_arg2 = -2;

                    var res = network.WriteG(0, pair.Key, pair.Value, k);
                    res.Value.ForEach((val, z, y, x) =>
                    {
                        if (val > max)
                        {
                            max_arg2 = x;
                            max      = val;
                        }
                    });

                    err += res.Key;
                    network.ActionTwoG(0, 1, 1, (x, y) => x + y);

                    if (max_arg == max_arg2)
                    {
                        max_found++;
                    }
                }

                network.ActionG(1, 1, val => val / batch);
                network.GradWeights(1);
                network.ActionG(1, 1, x => 0);

                errors[c] = err / batch;
                Console.WriteLine(err / batch);
            }

            return(errors);
        }
        public double TrainContinue(Optimizer optimizer, IDataEnumerator data, int batch)
        {
            optimizer.network = autoencoder;
            if (background.layers.Count != 0)
            {
                NetworkEnumerator nEm = new NetworkEnumerator(background, data);

                double err = optimizer.TrainBatchContinue(nEm, batch, 1).Last();
                return(err);
            }
            else
            {
                double err = optimizer.TrainBatchContinue(data, batch, 1).Last();
                return(err);
            }
        }
Esempio n. 7
0
        public override double[] TrainBatch(IDataEnumerator data, int batch, int count)
        {
            double[] errors = new double[count];
            for (int c = 0; c < count; c++)
            {
                double err = 0;
                for (int j = 0; j < batch; j++)
                {
                    var pair = data.GetRandom(ref network);


                    var res = network.WriteG(0, pair.Key, pair.Value, 1);

                    err += res.Key;
                    network.ActionTwoG(0, 1, 1, (x, y) => x + y);
                }

                network.ActionG(1, 1, val => val / batch);

                double norm       = 0;
                int    norm_count = 0;

                network.ActionG(1, 1, val => {
                    norm += val * val;
                    norm_count++;
                    return(val);
                });

                norm /= norm_count;

                r += norm;

                double k = 1e-1 / (d + Math.Sqrt(r));

                network.ActionG(1, 1, val => k * val);

                network.GradWeights(1);
                network.ActionG(1, 1, x => 0);

                errors[c] = err / batch;
                Console.WriteLine(err / batch);
            }

            return(errors);
        }
        public KeyValuePair <double, double> TrainBatch(Optimizer optimizer, IDataEnumerator data, IDataEnumerator val, int count, int batch = 32)
        {
            DateTime          start = DateTime.Now;
            NetworkEnumerator nEm   = new NetworkEnumerator(background, data);

            optimizer.network = autoencoder;

            optimizer.TrainBatch(nEm, batch, 1).Last();

            for (int k = 1; k < count; k++)
            {
                optimizer.TrainBatchContinue(nEm, batch, 1);
                if ((DateTime.Now - start).TotalMinutes >= 10)
                {
                    System.IO.File.WriteAllText("autosave_" + k + ".neural", network.SaveJSON());
                    start = DateTime.Now;
                }
            }

            return(autoencoder.GetError(val));
        }
Esempio n. 9
0
        /// <summary>
        /// Возращает среднюю метрику metric по всему DataEnumerator
        /// </summary>
        /// <param name="data"></param>
        /// <param name="metric"></param>
        /// <returns></returns>
        public virtual double GetMeanMetric(IDataEnumerator data, IMetric metric)
        {
            data.Reset();
            double mean_metric = 0;
            int    count       = 0;

            object lock_target = new object();

            Parallel.ForEach(data, (needed) =>
            {
                double[,,] output = GetOutput(needed.input);
                var m             = metric.Metric(output, needed.output);
                lock (lock_target)
                {
                    mean_metric += m;
                    count++;
                }
            });
            data.Reset();
            mean_metric /= count;
            return(mean_metric);
        }
Esempio n. 10
0
        public virtual KeyValuePair <double, double> GetError(IDataEnumerator data)
        {
            data.Reset();
            double err   = 0;
            double wins  = 0;
            int    count = 0;

            object lock_target = new object();

            Parallel.ForEach(data, (needed) =>
            {
                var pair = GetErrorPair(needed.input, needed.output);
                lock (lock_target)
                {
                    err  += pair.Key;
                    wins += pair.Value;
                    count++;
                }
            });
            data.Reset();
            return(new KeyValuePair <double, double>(err / count, wins / count));
        }
        public KeyValuePair <double, double> Validation(IDataEnumerator data)
        {
            NetworkEnumerator nEm = new NetworkEnumerator(background, data);

            return(autoencoder.GetError(nEm));
        }
Esempio n. 12
0
 public MirrorEnumerator(IDataEnumerator enumer)
 {
     this.enumer = enumer;
 }
Esempio n. 13
0
 public NetworkEnumerator(Network network, IDataEnumerator enumer)
 {
     this.network = network;
     this.enumer  = enumer;
 }
Esempio n. 14
0
        public static KeyValuePair <double, double> Action(Network network, Optimizer optimizer, IDataEnumerator data, IDataEnumerator val, int count, int batch = 32)
        {
            DateTime start;

            start = DateTime.Now;
            int layers = network.layers.Count;

            for (int i = 0; i < layers - 1; i++)
            {
                Network background;
                Network autoencoder;

                if (!network.layers[i].ITrained)
                {
                    continue;
                }

                background = new Network();
                for (int j = 0; j < i; j++)
                {
                    background.layers.Add(network.layers[j]);
                }

                autoencoder = new Network();
                autoencoder.layers.Add(network.layers[i]);
                autoencoder.layers.Add(network.layers[i].Mirror);

                background.CompileOnlyError();
                autoencoder.CompileOnlyError();

                NetworkEnumerator nEm = new NetworkEnumerator(background, data);
                MirrorEnumerator  Mem = new MirrorEnumerator(nEm);

                optimizer.network = autoencoder;


                double err = optimizer.TrainBatch(Mem, batch, 1).Last();


                for (int k = 1; k < count; k++)
                {
                    Console.Write("{0} ", k);
                    optimizer.TrainBatchContinue(Mem, batch, 1);
                    if ((DateTime.Now - start).TotalMinutes >= 10)
                    {
                        System.IO.File.WriteAllText("autosave_" + i + "_" + k + ".neural", network.SaveJSON());
                        Console.WriteLine("Save to " + "autosave_" + i + "_" + k + ".neural");
                        start = DateTime.Now;
                    }
                }



                Console.WriteLine("Trained {0} with error {1}", i, (float)err);
            }

            return(network.GetError(val));
        }
Esempio n. 15
0
 public override double[] TrainBatchContinue(IDataEnumerator data, int batch, int count)
 {
     return(base.TrainBatchContinue(data, batch, count));
 }
        public override double[] TrainBatch(IDataEnumerator data, int batch, int count)
        {
            double[] errors = new double[count];
            network.CreateGradients(batch + 2);
            for (int c = 0; c < count; c++)
            {
                int max_found = 0;

                double[] loc_errors = new double[batch];
                int[]    loc_found  = new int[batch];

                DateTime start = DateTime.Now;
                double   err   = 0;
                //for(int b = 0;b < batch;b++)
                Parallel.For(0, batch, b =>
                {
                    var pair = data.GetRandom(ref network);
                    var res  = network.ParallelWriteG(b, pair.Key, pair.Value, k, 0);

                    loc_errors[b] = res.Key;

                    if (need_max)
                    {
                        double max  = -1;
                        int max_arg = -1;
                        pair.Value.ForEach((val, z, y, x) =>
                        {
                            if (val > max)
                            {
                                max_arg = x;
                                max     = val;
                            }
                        });

                        max          = -1;
                        int max_arg2 = -2;

                        res.Value.ForEach((val, z, y, x) =>
                        {
                            if (val > max)
                            {
                                max_arg2 = x;
                                max      = val;
                            }
                        });

                        if (max_arg == max_arg2)
                        {
                            loc_found[b] = 1;
                        }
                    }
                });

                for (int b = 0; b < batch; b++)
                {
                    network.ActionTwoG(b, batch, batch, (v1, v2) => v1 + v2);
                    err       += loc_errors[b];
                    max_found += loc_found[b];
                }

                network.ActionG(batch, batch, val => val / batch);

                if (firstSet)
                {
                    network.ActionTwoG(batch, batch + 1, batch + 1, (g, v) => g);
                    firstSet = false;
                }
                else
                {
                    network.ActionTwoG(batch, batch + 1, batch + 1, (g, v) => a * v + g);
                }

                network.GradWeights(batch + 1);

                network.ActionG(batch, batch, val => 0);

                errors[c] = err / batch;

                double dw      = 0;
                int    w_count = 0;
                //network.ActionG(batch + 1, batch + 1, x =>
                //{
                //    dw += x * x;
                //    w_count++;
                //    return x;
                //});
                //Console.WriteLine();
                //Console.WriteLine("{2}: {0} : {1}", err / batch, Math.Sqrt(dw / w_count), c);
                //Console.WriteLine("A: {0} ", (float)(DateTime.Now - start).TotalSeconds);
                //if(need_max)
                //    Console.WriteLine("Found {0} of {1} {2}%", max_found, batch, 100.0f * max_found / batch);
                //if (c % 10 == 0)
                //    File.WriteAllText(c + "_" + err / batch, network.SaveJSON());
                if (err / batch <= 1e-7)
                {
                    acc++;
                    if (acc > 10)
                    {
                        Console.WriteLine("Stopped at {0}", c);
                        return(errors);
                    }
                }
                else
                {
                    acc = 0;
                }


                //Console.WriteLine("Norm: {0}", network.layers[0].NormG(2));
            }

            return(errors);
        }
        public override double[] TrainBatch(IDataEnumerator data, int batch, int count)
        {
            double[] errors = new double[count];
            for (int c = 0; c < count; c++)
            {
                DateTime start = DateTime.Now;
                double   err   = 0;
                for (int b = 0; b < batch; b++)
                {
                    var pair = data.GetRandom(ref network);
                    var res  = network.WriteG(0, pair.Key, pair.Value, 1);
                    err += res.Key;
                    network.ActionTwoG(0, 1, 1, (v1, v2) => v1 + v2);
                }

                double norm       = 0;
                int    norm_count = 0;

                network.ActionG(1, 1, val => {
                    norm += val * val;
                    norm_count++;
                    return(val);
                });

                norm = norm / norm_count;

                r = r * gamma + (1 - gamma) * norm;

                double k2 = k / (Math.Sqrt(r) + d) / batch;

                network.ActionG(1, 1, val => val * k2);
                network.ActionTwoG(1, 2, 2, (g, v) => a * v + g);

                network.GradWeights(2);

                network.ActionG(1, 1, val => 0);

                errors[c] = err / batch;


                Console.WriteLine("{0} : {1}", err / batch, Math.Sqrt(norm));
                //Console.WriteLine("A: {0}", (DateTime.Now - start).TotalSeconds);

                if (err / batch <= 1e-7)
                {
                    acc++;
                    if (acc > 10)
                    {
                        Console.WriteLine("Stopped at {0}", c);
                        return(errors);
                    }
                }
                else
                {
                    acc = 0;
                }

                //Console.WriteLine("Norm: {0}", network.layers[0].NormG(2));
            }

            return(errors);
        }
Esempio n. 18
0
        public override double[] TrainBatch(IDataEnumerator data, int batch, int count)
        {
            double[] errors = new double[count];
            for (int c = 0; c < count; c++)
            {
                int    max_found = 0;
                double max       = 0;

                var      g_write = new TimeSpan();
                var      a_time  = new TimeSpan();
                DateTime start;
                double   err = 0;
                for (int b = 0; b < batch; b++)
                {
                    var pair = data.GetRandom(ref network);
                    start = DateTime.Now;
                    var res = network.ParallelWriteG(0, pair.Key, pair.Value, k);
                    g_write += (DateTime.Now - start);
                    err     += res.Key;
                    start    = DateTime.Now;
                    network.ActionTwoG(0, 1, 1, (v1, v2) => v1 + v2);
                    a_time += (DateTime.Now - start);


                    max = -1;
                    int max_arg = -1;
                    pair.Value.ForEach((val, z, y, x) => {
                        if (val > max)
                        {
                            max_arg = x;
                            max     = val;
                        }
                    });

                    max = -1;
                    int max_arg2 = -2;

                    res.Value.ForEach((val, z, y, x) =>
                    {
                        if (val > max)
                        {
                            max_arg2 = x;
                            max      = val;
                        }
                    });

                    if (max_arg == max_arg2)
                    {
                        max_found++;
                    }
                }

                start = DateTime.Now;
                network.ActionG(1, 1, val => val / batch);
                network.ActionTwoG(1, 2, 2, (g, v) => a * v + g * (1 - a));
                a_time += (DateTime.Now - start);

                network.GradWeights(2);

                start = DateTime.Now;
                network.ActionG(1, 1, val => 0);
                a_time += (DateTime.Now - start);

                errors[c] = err / batch;

                double dw      = 0;
                int    w_count = 0;
                network.ActionG(2, 2, x =>
                {
                    dw += x * x;
                    w_count++;
                    return(x);
                });
                Console.WriteLine();
                Console.WriteLine("{2}: {0} : {1}", err / batch, Math.Sqrt(dw / w_count), c);
                Console.WriteLine("A: {0} G:{1}", a_time.TotalSeconds, g_write.TotalSeconds);
                Console.WriteLine("Found {0} of {1} {2}%", max_found, batch, 100.0f * max_found / batch);

                if (err / batch <= 1e-7)
                {
                    acc++;
                    if (acc > 10)
                    {
                        Console.WriteLine("Stopped at {0}", c);
                        return(errors);
                    }
                }
                else
                {
                    acc = 0;
                }

                //Console.WriteLine("Norm: {0}", network.layers[0].NormG(2));
            }

            return(errors);
        }
Esempio n. 19
0
        public override double[] TrainBatch(IDataEnumerator data, int batch, int count)
        {
            double[] errors = new double[count];
            network.CreateGradients(batch + 2);
            for (int c = 0; c < count; c++)
            {
                int max_found = 0;

                double[] loc_errors = new double[batch];
                int[]    loc_found  = new int[batch];

                DateTime start = DateTime.Now;
                double   err   = 0;
                //Parallel.For(0, batch, b =>
                for (int b = 0; b < batch; b++)
                {
                    var pair = data.GetRandom();
                    var res  = network.ParallelWriteG(b, pair.Key, pair.Value, 1);

                    loc_errors[b] = res.Key;

                    double max     = -1;
                    int    max_arg = -1;
                    pair.Value.ForEach((val, z, y, x) =>
                    {
                        if (val > max)
                        {
                            max_arg = x;
                            max     = val;
                        }
                    });

                    max = -1;
                    int max_arg2 = -2;

                    res.Value.ForEach((val, z, y, x) =>
                    {
                        if (val > max)
                        {
                            max_arg2 = x;
                            max      = val;
                        }
                    });

                    if (max_arg == max_arg2)
                    {
                        loc_found[b] = 1;
                    }
                }//);

                for (int b = 0; b < batch; b++)
                {
                    network.ActionTwoG(b, batch, batch, (v1, v2) => v1 + v2);
                    err       += loc_errors[b];
                    max_found += loc_found[b];
                }

                network.ActionG(batch, batch, val => val / batch);
                double dw      = 0;
                int    w_count = 0;
                network.ActionG(batch, batch, x =>
                {
                    dw += x * x;
                    w_count++;
                    return(x);
                });
                dw /= w_count;
                dw *= 1000;
                k   = ro * k + (1 - ro) * e / Math.Sqrt(1 + dw);
                network.ActionTwoG(batch, batch + 1, batch + 1, (g, v) => a * v + g * k);

                network.GradWeights(batch + 1);

                network.ActionG(batch, batch, val => 0);

                errors[c] = err / batch;


                Console.WriteLine();
                Console.WriteLine("{2}: {0} : {1}", err / batch, k, c);
                Console.WriteLine("A: {0} ", (float)(DateTime.Now - start).TotalSeconds);
                Console.WriteLine("Found {0} of {1} {2}%", max_found, batch, 100.0f * max_found / batch);

                //Console.WriteLine("Norm: {0}", network.layers[0].NormG(2));
            }

            return(errors);
        }
Esempio n. 20
0
 public virtual double[] TrainBatchContinue(IDataEnumerator data, int batch, int count)
 {
     throw new NotImplementedException("Вызван абстрактный класс");
 }
Esempio n. 21
0
 public IDisposableEnumerable <T> Create <T>(IContext context, IDataEnumerator <T> data) where T : class
 {
     return(new EnumerableData <T>().Initialize(context, data));
 }
        public List <KeyValuePair <double, double> > TrainBatchPercent(IDataEnumerator data, int batch, int count)
        {
            List <KeyValuePair <double, double> > pairs = new List <KeyValuePair <double, double> >();

            network.CreateGradients(batch + 2);
            for (int c = 0; c < count; c++)
            {
                int max_found = 0;

                double[] loc_errors = new double[batch];
                int[]    loc_found  = new int[batch];

                DateTime start = DateTime.Now;
                double   err   = 0;
                Parallel.For(0, batch, b =>
                {
                    var pair = data.GetRandom(ref network);
                    var res  = network.ParallelWriteG(b, pair.Key, pair.Value, k);

                    loc_errors[b] = res.Key;

                    double max  = -1;
                    int max_arg = -1;
                    pair.Value.ForEach((val, z, y, x) =>
                    {
                        if (val > max)
                        {
                            max_arg = x;
                            max     = val;
                        }
                    });

                    max          = -1;
                    int max_arg2 = -2;

                    res.Value.ForEach((val, z, y, x) =>
                    {
                        if (val > max)
                        {
                            max_arg2 = x;
                            max      = val;
                        }
                    });

                    if (max_arg == max_arg2)
                    {
                        loc_found[b] = 1;
                    }
                });

                for (int b = 0; b < batch; b++)
                {
                    network.ActionTwoG(b, batch, batch, (v1, v2) => v1 + v2);
                    err       += loc_errors[b];
                    max_found += loc_found[b];
                }

                network.ActionG(batch, batch, val => val / batch);
                network.ActionTwoG(batch, batch + 1, batch + 1, (g, v) => a * v + g);

                network.GradWeights(batch + 1);

                network.ActionG(batch, batch, val => 0);

                pairs.Add(new KeyValuePair <double, double>(err / batch, 100.0f * max_found / batch));

                //double dw = 0;
                //int w_count = 0;
                //network.ActionG(batch + 1, batch + 1, x =>
                //{
                //    dw += x * x;
                //    w_count++;
                //    return x;
                //});
                //Console.WriteLine();
                //Console.WriteLine("{2}: {0} : {1}", err / batch, Math.Sqrt(dw / w_count), c);
                //Console.WriteLine("A: {0} ", (float)(DateTime.Now - start).TotalSeconds);
                //Console.WriteLine("Found {0} of {1} {2}%", max_found, batch, 100.0f * max_found / batch);


                //Console.WriteLine("Norm: {0}", network.layers[0].NormG(2));
            }

            return(pairs);
        }
Esempio n. 23
0
 public IDisposableEnumerable <T> Initialize(IContext context, IDataEnumerator <T> data)
 {
     _context = context;
     _data    = data;
     return(this);
 }