public void OutputShapeIsEqualToKernelSize() { var graph = new TFGraph(); var input = new Input(new[] { 10L }); input.Compile(new ModelCompilationContext(graph)); var dense = new Dense(2, input, name: "Dense0"); dense.Compile(new ModelCompilationContext(graph)); dense.OutputShape.Should().BeEquivalentTo(new long[] { -1, 2 }); }
public void CreatesLayerConfigurationDuringCompilation() { var graph = new TFGraph(); var input = new Input(new[] { 10L }); var dense = new Dense(2, input, name: "Dense0"); dense.Compile(new ModelCompilationContext(graph)); dense.Configuration.Initializers.Count().Should().Be(2); dense.Configuration.Parameters.Count().Should().Be(2); dense.Configuration.Output.Should().NotBeNull(); }
public void CanBeOptimized() { var graph = new TFGraph(); var context = new ModelCompilationContext(graph); var input = new Input(new long[] { 10 }, name: "Input0"); var output = new Dense(2, input, name: "Dense0"); var compiledOutput = output.Compile(context); var loss = new NegativeLogLikelihood(); var compiledLoss = loss.Compile(context, compiledOutput, context.Graph.Placeholder(TFDataType.Double, new TFShape(-1, 2))); var gradients = graph.AddGradients( new[] { compiledLoss }, context.Parameters.ToArray()); }
public void ShouldBeOptimizable() { var graph = new TFGraph(); var context = new ModelCompilationContext(graph); var input = new Input(new long[] { 10 }, name: "Input0"); var output = new Dense(2, input, name: "Dense0"); var compiledInput = input.Compile(context); var compiledOutput = output.Compile(context); var loss = new CategoricalCrossEntropy(); var compiledLoss = loss.Compile(context, compiledOutput, context.Graph.Placeholder(TFDataType.Double, new TFShape(-1, 2))); var gradients = graph.AddGradients( new [] { compiledLoss }, context.Parameters.ToArray()); }