public override void Optimize(OptimizerBase <T> optimizer) { for (int i = 0; i < _layers.Count; i++) { _layers[i].Optimize(optimizer); } }
protected void CanOptimizeRosenbrock(OptimizerBase <float> optimizer) { var weight = new NeuroWeight <float>(Matrix <float> .Build.Dense(2, 1)); //_output.WriteLine($"Rosenbrock: {Rosenbrock(weight.Weight)}"); //_output.WriteLine(weight.Weight.ToMatrixString()); var watch = new Stopwatch(); watch.Start(); for (int i = 0; i < 10000; i++) { RosenbrockGrad(weight.Weight, weight.Gradient); optimizer.Optimize(weight); //_output.WriteLine($"Rosenbrock: {Rosenbrock(weight.Weight)}"); //_output.WriteLine(weight.Weight.ToMatrixString()); } watch.Stop(); double result = Rosenbrock(weight.Weight); result.ShouldBeLessThanOrEqualTo(6e-5); _output.WriteLine($"Rosenbrock: {result}"); _output.WriteLine($"Optimized in {watch.Elapsed}"); }
/// <summary> /// Add a optimizer to the model. the new optimzier will be append to the optimiers list. /// </summary> /// <param name="allWeights">all weights that this optimizer need to optimzer</param> /// <param name="loss">loss tensor</param> /// <param name="optimizer">optimizer</param> /// <returns></returns> public List <List <Tensor> > AddOptimizer(List <Tensor> allWeights, Tensor loss, OptimizerBase optimizer) { if (optimiers == null) { optimiers = new List <OptimizerBase>(); } optimiers.Add(optimizer); return(optimizer.get_updates(allWeights, null, loss));; }
public OptimizingTrainer(NeuralNet <T> network, OptimizerBase <T> optimizer, IDataSet <T> trainingSet, OptimizingTrainerOptions options, OptimizingSession session) : base(options, session) { _network = network; _optimizer = optimizer; TrainingSet = trainingSet; // TODO: This is not very good. session.Optimizer = optimizer; session.Network = network; }
protected override void AgentReplay( int batchSize, OptimizerBase optimizer, MetricFunction lossMetric, bool shuffle) { var batch = shuffle ? ReplayMemory.ToShuffledBatch(batchSize) : ReplayMemory.ToRandomBatch(batchSize); var states = new DataFrame <float>(StateShape); var statesTarget = new DataFrame <float>(StateShape); foreach (var sample in batch) { states.Add(sample.Item1); statesTarget.Add(sample.Item4 ?? new float[StateShape.TotalSize]); } var prediction = Model.Predict(states); var predictionOfTargetStates = Model.Predict(statesTarget); var predictionTarget = TargetModel.Predict(statesTarget); var data = new DataFrameList <float>(StateShape, ActionShape); for (var i = 0; i < batch.Length; i++) { var sample = batch[i]; var t = prediction[i]; if (sample.Item4 == null) { t[sample.Item2] = sample.Item3; } else { var lastValue = float.MinValue; var valueIndex = 0; for (var j = 0; j < predictionOfTargetStates[i].Length; j++) { if (predictionOfTargetStates[i][j] > lastValue) { lastValue = predictionOfTargetStates[i][j]; valueIndex = j; } } t[sample.Item2] = (float)(sample.Item3 + DiscountFactor * predictionTarget[i][valueIndex]); } data.AddFrame(sample.Item1, t); } Model.Fit(data, 1, batch.Length, optimizer, lossMetric); }
public override void Optimize(OptimizerBase <T> optimizer) { optimizer.Optimize(_bxr); optimizer.Optimize(_bxz); optimizer.Optimize(_bxh); optimizer.Optimize(_bhr); optimizer.Optimize(_bhz); optimizer.Optimize(_bhh); optimizer.Optimize(_wxr); optimizer.Optimize(_wxz); optimizer.Optimize(_wxh); optimizer.Optimize(_whr); optimizer.Optimize(_whz); optimizer.Optimize(_whh); }
public void TestOptimizer(OptimizerBase optimizer) { Tensor input = new Tensor(new Shape(2, 2, 2, 2)); input.FillWithRand(10); for (int i = 0; i < 10000; ++i) { optimizer.Step(new List <ParametersAndGradients>() { new ParametersAndGradients() { Parameters = input, Gradients = SquareFuncGradient(input) } }, 1); } var minimum = SquareFunc(input); for (int i = 0; i < input.Shape.Length; ++i) { Assert.AreEqual(minimum.GetFlat(i), 0, 1e-5); } }
public override void Optimize(OptimizerBase <T> optimizer) { }
protected async Task OptimizeClick() { await Wait.Show(); _stopWatch.Reset(); _stopWatch.Start(); IterationResult result = null; try { //Console.WriteLine(MinimizeFunctionCode.Code); OptimizerBase optimizer = null; if (MinimizeFunctionCode.Language == "javascript") { optimizer = (JavascriptOptimizer)ServiceProvider.GetService(typeof(JavascriptOptimizer)); } else if (MinimizeFunctionCode.Language == "csharp") { optimizer = (CSharpThreadedOptimizer)ServiceProvider.GetService(typeof(CSharpThreadedOptimizer)); } if (_config == null) { ToastService.ShowError("No config was uploaded or created."); await Wait.Hide(); return; } else { var fitness = string.IsNullOrEmpty(_config.Fitness?.OptimizerTypeName) ? _config.FitnessTypeName : _config.Fitness.OptimizerTypeName; ActivityLogger.ResetLog(); ActivityLogger.Add("Starting " + fitness); optimizer.Initialize(MinimizeFunctionCode.Code, ActivityLogger); TokenSource = new CancellationTokenSource(); var task = Task.Run(() => optimizer.Start(_config, TokenSource.Token), TokenSource.Token); try { result = await task; } catch (TaskCanceledException) { CodeEditorBase.TokenSource = null; await Wait.Hide(); ToastService.ShowInfo("Optimization was cancelled."); TokenSource = null; return; } TokenSource = null; // Console.WriteLine(ActivityLogger.Log); await JSRuntime.InvokeVoidAsync("ClientStorage.storeChartData", ActivityLogger.Log); //todo: backticks //dynamic context = new EvalContext(JSRuntime); //(context as EvalContext).Expression = () => context.ClientStorage.storeChartData(ActivityLogger.Log); //await (context as EvalContext).InvokeAsync<dynamic>(); ToastService.ShowSuccess("Chart data was saved."); } } catch (Exception ex) { await Wait.Hide(); ToastService.ShowError(ex.Message); throw ex; } finally { await Wait.Hide(); } _stopWatch.Stop(); ToastService.ShowSuccess("Best Cost: " + result.Cost.ToString("N")); ToastService.ShowSuccess("Best Parameters: " + string.Join(",", result.ParameterSet.Select(s => s.ToString("N")))); ActivityLogger.Add("Best Cost: ", result.Cost); ActivityLogger.Add("Best Parameters: ", result.ParameterSet); ActivityLogger.Add("Total Time (s): ", _stopWatch.ElapsedMilliseconds / 1000); }
public override void Optimize(OptimizerBase <T> optimizer) { _linearLayer.Optimize(optimizer); _activationLayer.Optimize(optimizer); }
public abstract void Optimize(OptimizerBase <T> optimizer);
public override void Optimize(OptimizerBase <T> optimizer) { optimizer.Optimize(_weights); optimizer.Optimize(_bias); }