Exemplo n.º 1
0
        public sealed override void Optimize(IOptimizer optimizer)
        {
            if (IsTrainable == false)
            {
                return;
            }

            Neurons.ForEach(N =>
            {
                N.BatchDelta += optimizer.Optimize(N);
                N.InSynapses.ForEach(S => S.BatchDelta += optimizer.Optimize(S));
            });
        }
        private static bool OptimizationMakesDifferenceInSinglePattern(IOptimizer without, IOptimizer with, Operator op)
        {
            var p = new Pattern {
                Data = op
            };
            var unoptimized = new Compiler().Compile(p);
            var withoutOptimizationRunner = without.Optimize(unoptimized);
            var withOptimizationRunner    = with.Optimize(unoptimized);

            Console.WriteLine($"Optimization results: \n{string.Join("\n", withoutOptimizationRunner.Methods[0].Instructions)}\n\n vs \n{string.Join("\n", withOptimizationRunner.Methods[0].Instructions)}\n\n");

            return(!AreEqual(withoutOptimizationRunner.Methods[0].Instructions, withOptimizationRunner.Methods[0].Instructions));
        }
Exemplo n.º 3
0
        OptimizerResult[] FindNextCandidates(RegressionForestModel model, double bestScore)
        {
            Func <double[], OptimizerResult> minimize = (param) =>
            {
                // use the model to predict the expected performance, mean and variance, of the parameter set.
                var p = model.PredictCertainty(param);

                return(new OptimizerResult(param,
                                           // negative, since we want to "maximize" the acquisition function.
                                           -m_acquisitionFunc(bestScore, p.Prediction, p.Variance)));
            };

            return(m_maximizer.Optimize(minimize).Take(m_numberOfCandidatesEvaluatedPrIteration).ToArray());
        }
        OptimizerResult[] FindNextCandidates(RegressionForestModel model, double bestScore)
        {
            OptimizerResult minimize(double[] param)
            {
                // use the model to predict the expected performance, mean and variance, of the parameter set.
                var p = model.PredictCertainty(param);

                return(new OptimizerResult(param,
                                           // negative, since we want to "maximize" the acquisition function.
                                           -m_acquisitionFunc(bestScore, p.Prediction, p.Variance)));
            }

            return(m_maximizer.Optimize(minimize)
                   .Where(v => !double.IsNaN(v.Error)).OrderBy(r => r.Error)
                   .Take(m_functionEvaluationsPerIteration).ToArray());
        }
Exemplo n.º 5
0
            public QueryTree Optimize(QueryTree queryTree)
            {
                var history = new HashSet <QueryTree>(new QueryExpressionEqualityComparator());

                var current = queryTree;
                var reduced = default(QueryTree);

                var i = 0;

                while (current != reduced)
                {
                    if (!history.Add(current))
                    {
                        if (_throwOnCycle)
                        {
                            throw new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, "Irreducible recursive query tree detected: '{0}'.", current));
                        }
                        else
                        {
                            break;
                        }
                    }

                    if (i == _maxIterations)
                    {
                        break;
                    }

                    reduced = current;
                    current = _optimizer.Optimize(current);

                    i++;
                }

                return(current);
            }
Exemplo n.º 6
0
        public void Train(IEnumerable <Tuple <double[], double[]> > collection, int epochs, int batchSize = 32)
        {
            // Backpropagation
            int dataSize   = collection.Count();
            int maxThreads = 2 * Environment.ProcessorCount;
            int t          = 0;

            // Stochastic gradient descent (SGD)
            while (t < epochs)
            {
                // Mini-batch
                int remaining = dataSize;

                do
                {
                    var batchDataQueue = new Queue <Tuple <Layer, double[], double[]> >(collection.Sample <Tuple <double[], double[]> >(this.random, Math.Min(remaining, batchSize)).Aggregate <Tuple <double[], double[]>, List <Tuple <Layer, double[], double[]> > >(new List <Tuple <Layer, double[], double[]> >(), (list, tuple) =>
                    {
                        list.Add(Tuple.Create <Layer, double[], double[]>(Copy(this.inputLayer), tuple.Item1, tuple.Item2));

                        return(list);
                    }));
                    var batchTaskList    = new List <Task <IEnumerable <Tuple <double[], double[]> > > >();
                    var batchedDataList  = new LinkedList <IEnumerable <Tuple <double[], double[]> > >();
                    var mergedDataTuples = new Tuple <double[], double[]> [this.layerCollection.Count];
                    int index            = 0;

                    do
                    {
                        do
                        {
                            var task = new Task <IEnumerable <Tuple <double[], double[]> > >(delegate(object state)
                            {
                                var tuple = (Tuple <Layer, double[], double[]>)state;

                                return(BackwardPropagate(ForwardPropagate(true, tuple.Item1, tuple.Item2), tuple.Item3));
                            }, batchDataQueue.Dequeue());

                            batchTaskList.Add(task);
                            task.Start();
                        } while (batchDataQueue.Count > 0 && batchTaskList.Count < maxThreads);

                        var tasks = batchTaskList.ToArray();
                        var i     = Task <IEnumerable <Tuple <double[], double[]> > > .WaitAny(tasks);

                        for (int j = 0; j < tasks.Length; j++)
                        {
                            if (i == j)
                            {
                                if (tasks[j].Exception == null)
                                {
                                    batchedDataList.AddLast(tasks[j].Result);
                                }

                                batchTaskList.RemoveAt(i);

                                break;
                            }
                        }
                    } while (batchDataQueue.Count > 0);

                    if (batchTaskList.Count > 0)
                    {
                        var tasks = batchTaskList.ToArray();

                        Task <IEnumerable <Tuple <double[], double[]> > > .WaitAll(tasks);

                        foreach (var task in tasks)
                        {
                            if (task.Exception == null)
                            {
                                batchedDataList.AddLast(task.Result);
                            }
                        }
                    }

                    foreach (var(gradients, deltas) in batchedDataList.First.Value)
                    {
                        mergedDataTuples[index] = Tuple.Create <double[], double[]>(new double[gradients.Length], new double[deltas.Length]);

                        for (int j = 0; j < gradients.Length; j++)
                        {
                            mergedDataTuples[index].Item1[j] = gradients[j];
                        }

                        for (int j = 0; j < deltas.Length; j++)
                        {
                            mergedDataTuples[index].Item2[j] = deltas[j];
                        }

                        index++;
                    }

                    for (var tuplesNode = batchedDataList.First.Next; tuplesNode != null; tuplesNode = tuplesNode.Next)
                    {
                        index = 0;

                        foreach (var(gradients, deltas) in tuplesNode.Value)
                        {
                            for (int j = 0; j < gradients.Length; j++)
                            {
                                mergedDataTuples[index].Item1[j] += gradients[j];
                            }

                            for (int j = 0; j < deltas.Length; j++)
                            {
                                mergedDataTuples[index].Item2[j] += deltas[j];
                            }

                            index++;
                        }
                    }

                    for (int i = 0, j = 0; i < this.layerCollection.Count; i++)
                    {
                        for (int k = 0; k < mergedDataTuples[i].Item1.Length; k++)
                        {
                            mergedDataTuples[i].Item1[k] = mergedDataTuples[i].Item1[k] / batchedDataList.Count;
                        }

                        for (int k = 0; k < mergedDataTuples[i].Item2.Length; k++)
                        {
                            mergedDataTuples[i].Item2[k] = mergedDataTuples[i].Item2[k] / batchedDataList.Count;
                        }

                        this.layerCollection[i].Update(mergedDataTuples[i].Item1, mergedDataTuples[i].Item2, (weight, gradient) => optimizer.Optimize(j++, weight, gradient));
                    }

                    remaining -= batchSize;
                } while (remaining > 0);

                this.loss = GetLoss(this.inputLayer, collection);

                if (this.Stepped != null)
                {
                    this.Stepped(this, new EventArgs());
                }

                t++;
            }
        }
Exemplo n.º 7
0
        public void Train(IEnumerable <Tuple <double[], double[]> > collection, int epochs, int batchSize = 32)
        {
            // Backpropagation
            int dataSize = collection.Count();
            int t        = 0;

            // Stochastic gradient descent (SGD)
            while (t < epochs)
            {
                // Mini-batch
                int remaining = dataSize;

                do
                {
                    var dataTuple = collection.Sample <Tuple <double[], double[]> >(this.random, Math.Min(remaining, batchSize)).Aggregate <Tuple <double[], double[]>, Tuple <List <double[]>, List <double[]> > >(Tuple.Create <List <double[]>, List <double[]> >(new List <double[]>(), new List <double[]>()), (tuple1, tuple2) =>
                    {
                        tuple1.Item1.Add(tuple2.Item1);
                        tuple1.Item2.Add(tuple2.Item2);

                        return(tuple1);
                    });
                    int index      = 0;
                    int identifier = 0;

                    foreach (var gradients in BackwardPropagate(ForwardPropagate(new Batch <double[]>(dataTuple.Item1), true), new Batch <double[]>(dataTuple.Item2)))
                    {
                        this.layerCollection[index].Update(gradients, (weight, gradient) => optimizer.Optimize(identifier++, weight, gradient));
                        index++;
                    }

                    remaining -= batchSize;
                } while (remaining > 0);

                this.loss = GetLoss(collection);

                if (this.Stepped != null)
                {
                    this.Stepped(this, new EventArgs());
                }

                t++;
            }
        }
Exemplo n.º 8
0
        public void Fit(IEnumerable <Tuple <double[], double[]> > collection, int epochs, int batchSize, Func <IEnumerable <Tuple <double[], double[]> >, int, IEnumerable <Tuple <double[], double[]> > > func)
        {
            // Backpropagation
            int dataSize = collection.Count();
            int t        = 0;

            // Stochastic gradient descent (SGD)
            while (t < epochs)
            {
                // Mini-batch
                int remaining = dataSize;

                do
                {
                    var dataTuple = func(collection, Math.Min(remaining, batchSize)).Aggregate <Tuple <double[], double[]>, Tuple <List <double[]>, List <double[]> > >(Tuple.Create <List <double[]>, List <double[]> >(new List <double[]>(), new List <double[]>()), (tuple1, tuple2) =>
                    {
                        tuple1.Item1.Add(tuple2.Item1);
                        tuple1.Item2.Add(tuple2.Item2);

                        return(tuple1);
                    });
                    int index      = 0;
                    int identifier = 0;
                    var tuples     = Backward(Forward(new Batch <double[]>(dataTuple.Item1), true), new Batch <double[]>(dataTuple.Item2));

                    // Weight decay
                    foreach (var tuple in tuples)
                    {
                        tuple.SetGradients((x, y, z) => x ? y + this.weightDecayRate * tuple.Weights[z] : y);
                    }

                    if (this.maxGradient.HasValue)
                    {
                        // Gradients clipping
                        var    vectors = from tuple in tuples let batch = tuple.GetGradients() from vector in batch select vector;
                        double sum     = 0.0;

                        foreach (var gradient in from vector in vectors from gradient in vector select gradient)
                        {
                            sum += gradient * gradient;
                        }

                        double rate = this.maxGradient.Value / (Math.Sqrt(sum) + Math.Pow(10, -6));

                        if (rate < 1)
                        {
                            foreach (var vector in vectors)
                            {
                                for (int i = 0; i < vector.Length; i++)
                                {
                                    vector[i] *= rate;
                                }
                            }
                        }
                    }

                    foreach (var tuple in tuples)
                    {
                        tuple.Update(tuple.GetGradients(), (weight, gradient) => optimizer.Optimize(identifier++, weight, gradient));
                        index++;
                    }

                    remaining -= batchSize;
                } while (remaining > 0);

                this.loss = GetLoss(collection);

                if (this.Stepped != null)
                {
                    this.Stepped(this, new EventArgs());
                }

                t++;
            }
        }
Exemplo n.º 9
0
 public QueryTree Optimize(QueryTree queryTree) => _second.Optimize(_first.Optimize(queryTree));
Exemplo n.º 10
0
 public void Optimize(IOptimizer optimizer)
 {
     optimizer.Optimize(Filters, FiltersGradient);
     optimizer.Optimize(Bias, BiasGradient);
 }
Exemplo n.º 11
0
 /// <summary>
 /// 优化权重和偏置
 /// </summary>
 /// <param name="optimizer"></param>
 public void Optimize(IOptimizer optimizer)
 {
     optimizer.Optimize(Weights, WeightsGradient);
     optimizer.Optimize(Bias, BiasGradient);
 }