public void Stop() { if ((ExecutionState != ExecutionState.Started) && (ExecutionState != ExecutionState.Paused)) { throw new InvalidOperationException(string.Format("Stop not allowed in execution state \"{0}\".", ExecutionState)); } if (Optimizers.Count == 0) { return; } experimentStarted = false; experimentStopped = true; if (Optimizers.Any(x => (x.ExecutionState == ExecutionState.Started) || (x.ExecutionState == ExecutionState.Paused))) { foreach (var optimizer in Optimizers.Where(x => (x.ExecutionState == ExecutionState.Started) || (x.ExecutionState == ExecutionState.Paused))) { // a race-condition may occur when the optimizer has changed the state by itself in the meantime try { optimizer.Stop(); } catch (InvalidOperationException) { } } } else { OnStopped(); } }
private void optimizer_Stopped(object sender, EventArgs e) { lock (locker) { if (experimentStopped) { if (Optimizers.All(x => (x.ExecutionState == ExecutionState.Stopped) || (x.ExecutionState == ExecutionState.Prepared))) { OnStopped(); } } else { if (experimentStarted && Optimizers.Any(x => (x.ExecutionState == ExecutionState.Prepared) || (x.ExecutionState == ExecutionState.Paused))) { Optimizers.First(x => (x.ExecutionState == ExecutionState.Prepared) || (x.ExecutionState == ExecutionState.Paused)).Start(); } else if (Optimizers.All(x => x.ExecutionState == ExecutionState.Stopped)) { OnStopped(); } else if (Optimizers.Any(x => (x.ExecutionState == ExecutionState.Prepared) || (x.ExecutionState == ExecutionState.Paused)) && Optimizers.All(o => o.ExecutionState != ExecutionState.Started)) { OnPaused(); } } } }
public void LogisticRegression() { var X = Matrix <double> .Build.DenseOfArray(new double[, ] { { 1 }, { 2 } }); var Y = Matrix <double> .Build.DenseOfArray(new double[, ] { { 1 } }); var model = new DeepCat.DeepCat(); model.Add(new Dense(1, Activations.Sigmoid(), weightInitializer: Initializations.Fixed())); model.Compile(X.RowCount, LossFunctions.CrossEntropy(), Optimizers.GradientDescent(0.02)); model.Fit(X, Y, 1); var a = model.Predict(X); a[0, 0] = Math.Round(a[0, 0], 8); var expectedResult = Matrix <double> .Build.DenseOfArray(new double[, ] { { 0.59859297 } }); Assert.AreEqual(a, expectedResult); }
public void Prepare(bool clearRuns) { if ((ExecutionState != ExecutionState.Prepared) && (ExecutionState != ExecutionState.Paused) && (ExecutionState != ExecutionState.Stopped)) { throw new InvalidOperationException(string.Format("Prepare not allowed in execution state \"{0}\".", ExecutionState)); } if (Optimizers.Count == 0) { return; } if (clearRuns) { runs.Clear(); } experimentStarted = false; experimentStopped = false; foreach (IOptimizer optimizer in Optimizers.Where(x => x.ExecutionState != ExecutionState.Started)) { // a race-condition may occur when the optimizer has changed the state by itself in the meantime try { optimizer.Prepare(clearRuns); } catch (InvalidOperationException) { } } }
private void optimizer_Prepared(object sender, EventArgs e) { lock (locker) if (Optimizers.All(x => x.ExecutionState == ExecutionState.Prepared)) { OnPrepared(); } }
private void optimizer_Paused(object sender, EventArgs e) { lock (locker) if (Optimizers.All(x => x.ExecutionState != ExecutionState.Started)) { OnPaused(); } }
private void UpdateExecutionState() { // Execution states of the Experiment are determined using the following _basic_ rules: // if any Optimizer is Started => Experiment is Started (2. if) // if any Optimizer is Paused => Experiment is Paused (3. if) // if any Optimizer is Prepared => Experiment is Prepared (5. if) // else (all Optimizer are Stopped) => Experiment is Stopped (6. if) // Additional there are two extra rules: // if the Experiment is running and there are still optimizers that can be started => keep the Experiment Running (1. if) // if experiment-stop is pending: Stop Experiment even if there are still Prepared Optimizer (4. if) lock (locker) { // 1. experiment is running & further startable optimizers are available => continue executing if (experimentStarted && StartableOptimizers.Any()) { return; } // 2. any optimizer is running => continue executing if (Optimizers.Any(x => x.ExecutionState == ExecutionState.Started)) { return; } experimentStarted = false; // 3. any optimizer is paused => experiment paused if (Optimizers.Any(x => x.ExecutionState == ExecutionState.Paused)) { OnPaused(); } // 4. stop pending & all optimizers either stopped or prepared => experiment stopped else if (experimentStopped) { OnStopped(); } // 5. any optimizer prepared => experiment prepared else if (Optimizers.Any(x => x.ExecutionState == ExecutionState.Prepared)) { OnPrepared(); } // 6. (else) all optimizers stopped else { OnStopped(); } } }
private void optimizer_ExecutionTimeChanged(object sender, EventArgs e) { // only wait for maximally 100ms to acquire lock, otherwise return and don't update the execution time var success = Monitor.TryEnter(locker, 100); if (!success) { return; } try { ExecutionTime = Optimizers.Aggregate(TimeSpan.Zero, (t, o) => t + o.ExecutionTime); } finally { Monitor.Exit(locker); } }
public void Start() { if ((ExecutionState != ExecutionState.Prepared) && (ExecutionState != ExecutionState.Paused)) { throw new InvalidOperationException(string.Format("Start not allowed in execution state \"{0}\".", ExecutionState)); } if (Optimizers.Count == 0) { return; } experimentStarted = true; experimentStopped = false; IOptimizer optimizer = Optimizers.FirstOrDefault(x => (x.ExecutionState == ExecutionState.Prepared) || (x.ExecutionState == ExecutionState.Paused)); if (optimizer != null) { // a race-condition may occur when the optimizer has changed the state by itself in the meantime try { optimizer.Start(); } catch (InvalidOperationException) { } } }
static void Main(string[] args) { var X = Matrix <double> .Build.Random(5, 100); var Y = Matrix <double> .Build.Random(1, 100); var test = Matrix <double> .Build.Random(5, 1); var model = new DeepCat(); model.Add(new Dense(5, Activations.Relu(), weightInitializer: Initializations.RandomNormal())); model.Add(new Dense(5, Activations.Relu(), weightInitializer: Initializations.RandomNormal())); model.Add(new Dense(1, Activations.Sigmoid())); model.Compile(X.RowCount, LossFunctions.CrossEntropy(), Optimizers.GradientDescent(0.002)); model.Fit(X, Y, 100); model.Predict(test); var x = 1; }
public void Remove(Optimizer optimizer) { Optimizers.Remove(optimizer); }
public void Add(Optimizer optimizer) { Optimizers.Add(optimizer); }