public void TestBasicBackendAndOptimizerAndExportGraph() { //create model first var input = K.placeholder(new int?[] { -1, 3 }); var target = K.placeholder(new int?[] { -1, 1 }); var weight = K.variable((new Constant(1)).Call(new int[] { 3, 1 }, DataType.Float)); //var normalizedWeight = K.spectral_norm(weight); var output = K.dot(input, weight); output = K.reshape(output, new int[] { -1 }); target = K.reshape(target, new int[] { -1 }); var lossM = new MeanSquareError(); var loss = lossM.Call(target, output); loss = K.constant(1.0f) * loss; //training related var weights = new List <Tensor>(); weights.Add(weight); var optimizer = new SGD(); var updates = optimizer.get_updates(weights, new Dictionary <Tensor, IWeightConstraint>(), loss); var inputs = new List <Tensor>(); inputs.Add(input); inputs.Add(target); var outputs = new List <Tensor>(); outputs.Add(loss); var function = K.function(inputs, outputs, updates, "Train"); var inputData = new List <Array>(); inputData.Add(new float[] { 1.2f, 3.3f, 4.3f, 5, 5, 5 }); inputData.Add(new float[] { 2, 10 }); for (int i = 0; i < 10; ++i) { var functionResult = function.Call(inputData); float resultLoss = (float)functionResult[0].eval(); print(resultLoss); var opWeights = optimizer.get_weights(); foreach (var w in opWeights) { string toPrint = ""; for (int j = 0; j < w.Length; ++j) { if (w is float[]) { toPrint += " " + w.GetValue(j); } else { toPrint += " " + w.GetValue(j, 0); } } print("Weight:" + toPrint); } } ((UnityTFBackend)K).ExportGraphDef("SavedGraph/test.pb"); }