public void LogisticRegression() { var X = Matrix <double> .Build.DenseOfArray(new double[, ] { { 1 }, { 2 } }); var Y = Matrix <double> .Build.DenseOfArray(new double[, ] { { 1 } }); var model = new DeepCat.DeepCat(); model.Add(new Dense(1, Activations.Sigmoid(), weightInitializer: Initializations.Fixed())); model.Compile(X.RowCount, LossFunctions.CrossEntropy(), Optimizers.GradientDescent(0.02)); model.Fit(X, Y, 1); var a = model.Predict(X); a[0, 0] = Math.Round(a[0, 0], 8); var expectedResult = Matrix <double> .Build.DenseOfArray(new double[, ] { { 0.59859297 } }); Assert.AreEqual(a, expectedResult); }
public Dense(int units, IActivation activation, bool useBias = true, IInitialization weightInitializer = null, IInitialization biasInitializer = null) { LayerSize = units; _activation = activation; _weightInitializer = weightInitializer ?? Initializations.Zero(); _biasInitializer = biasInitializer ?? Initializations.Zero(); _useBias = useBias; }
public void TestZeroInitialization() { var initialization = Initializations.Zero(); var initializedMatrix = initialization.Initialize(2, 2); var expectedMatrix = Matrix <double> .Build.Dense(2, 2); Assert.AreEqual(initializedMatrix, expectedMatrix); }
public void TestFixedInitialization() { var initialization = Initializations.Fixed(); var initializedMatrix = initialization.Initialize(2, 2); var expectedMatrix = Matrix <double> .Build.DenseOfArray(new double[, ] { { 0.11, 0.12 }, { 0.21, 0.22 } }); Assert.AreEqual(initializedMatrix, expectedMatrix); }
/// <summary> /// Create a <see cref="For"/> with the specified <see cref="CodeObject"/> in the body. /// </summary> public For(Expression initialization, Expression conditional, Expression iteration) { if (initialization != null) { Initializations.Add(initialization); } Conditional = conditional; if (iteration != null) { Iterations.Add(iteration); } }
/// <summary> /// Create a <see cref="For"/> with the specified <see cref="CodeObject"/> in the body. /// </summary> public For(Expression initialization, Expression conditional, Expression iteration, CodeObject body) : base(body, true) { if (initialization != null) { Initializations.Add(initialization); } Conditional = conditional; if (iteration != null) { Iterations.Add(iteration); } }
public void TestRandomNormalInitialization() { var initialization = Initializations.RandomNormal(); initialization.SetSeed(0); var initializedMatrix = initialization.Initialize(2, 2); var expectedMatrix = Matrix <double> .Build.Random(2, 2, new Normal(new Random(0))); Console.WriteLine(initializedMatrix); Console.WriteLine(expectedMatrix); Assert.AreEqual(initializedMatrix, expectedMatrix); }
static void Main(string[] args) { var X = Matrix <double> .Build.Random(5, 100); var Y = Matrix <double> .Build.Random(1, 100); var test = Matrix <double> .Build.Random(5, 1); var model = new DeepCat(); model.Add(new Dense(5, Activations.Relu(), weightInitializer: Initializations.RandomNormal())); model.Add(new Dense(5, Activations.Relu(), weightInitializer: Initializations.RandomNormal())); model.Add(new Dense(1, Activations.Sigmoid())); model.Compile(X.RowCount, LossFunctions.CrossEntropy(), Optimizers.GradientDescent(0.002)); model.Fit(X, Y, 100); model.Predict(test); var x = 1; }
public void Initialize(VirtualDataWindowFactoryContext factoryContext) { Initializations.Add(factoryContext); }