public void EvalManagedConstantNetworkTest() { string modelDefinition = @"precision = ""float"" traceLevel = 1 run=NDLNetworkBuilder NDLNetworkBuilder=[ v1 = Constant(1) v2 = Constant(2, tag=""output"") ol = Plus(v1, v2, tag=""output"") FeatureNodes = (v1) ]"; using (var model = new ModelEvaluationExtendedF()) { model.CreateNetwork(modelDefinition); VariableSchema outputSchema = model.GetOutputSchema(); model.StartForwardEvaluation(outputSchema.Select(s => s.Name).ToList <string>()); var outputBuffer = outputSchema.CreateBuffers <float>(); var inputBuffer = new ValueBuffer <float> [0]; // We can call the evaluate method and get back the results... model.ForwardPass(inputBuffer, outputBuffer); float[][] expected = { new float[] { 2 }, new float[] { 3 } }; Assert.AreEqual(expected.Length, outputBuffer.Length); for (int idx = 0; idx < expected.Length; idx++) { CollectionAssert.AreEqual(expected[idx], outputBuffer[idx].Buffer); } } }
public void EvalManagedConstantNetworkTest() { string modelDefinition = @"precision = ""float"" traceLevel = 1 run=NDLNetworkBuilder NDLNetworkBuilder=[ v1 = Constant(1) v2 = Constant(2, tag=""output"") ol = Plus(v1, v2, tag=""output"") FeatureNodes = (v1) ]"; using (var model = new ModelEvaluationExtendedF()) { model.CreateNetwork(modelDefinition); VariableSchema outputSchema = model.GetOutputSchema(); model.StartForwardEvaluation(outputSchema.Select(s => s.Name).ToList<string>()); var outputBuffer = outputSchema.CreateBuffers<float>(); var inputBuffer = new ValueBuffer<float>[0]; // We can call the evaluate method and get back the results... model.ForwardPass(inputBuffer, outputBuffer); float[][] expected = { new float[] { 2 }, new float[] {3} }; Assert.AreEqual(expected.Length, outputBuffer.Length); for (int idx = 0; idx < expected.Length; idx++) { CollectionAssert.AreEqual(expected[idx], outputBuffer[idx].Buffer); } } }
/// <summary> /// Evaluates an extended network (without a model and without input) and obtains a single layer output /// </summary> private static void EvaluateExtendedNetworkSingleLayerNoInput() { const string modelDefinition = @"precision = ""float"" traceLevel = 1 run=NDLNetworkBuilder NDLNetworkBuilder=[ v1 = Constant(1) v2 = Constant(2, tag=""output"") ol = Plus(v1, v2, tag=""output"") FeatureNodes = (v1) ]"; try { // The examples assume the executable is running from the data folder // We switch the current directory to the data folder (assuming the executable is in the <CNTK>/x64/Debug|Release folder string workingDirectory = Path.Combine(initialDirectory, @"..\..\Examples\Other\Simple2d\Config"); Environment.CurrentDirectory = initialDirectory; using (var model = new ModelEvaluationExtendedF()) { // Create the network // This network (AddOperatorConstantNoInput.cntk) is a simple network consisting of a single binary operator (Plus) // operating over a two constants, therefore no input is necessary. model.CreateNetwork(modelDefinition); VariableSchema outputSchema = model.GetOutputSchema(); var outputNodeNames = outputSchema.Select(s => s.Name).ToList <string>(); model.StartForwardEvaluation(outputNodeNames); var outputBuffer = outputSchema.CreateBuffers <float>(); var inputBuffer = new ValueBuffer <float> [0]; // We can call the evaluate method and get back the results... model.ForwardPass(inputBuffer, outputBuffer); // We expect two outputs: the v2 constant, and the ol Plus result var expected = new float[][] { new float[] { 2 }, new float[] { 3 } }; Console.WriteLine("Expected values: {0}", string.Join(" - ", expected.Select(b => string.Join(", ", b)).ToList <string>())); Console.WriteLine("Actual Values : {0}", string.Join(" - ", outputBuffer.Select(b => string.Join(", ", b.Buffer)).ToList <string>())); } } catch (CNTKException ex) { Console.WriteLine("Error: {0}\nNative CallStack: {1}\n Inner Exception: {2}", ex.Message, ex.NativeCallStack, ex.InnerException != null ? ex.InnerException.Message : "No Inner Exception"); } catch (Exception ex) { Console.WriteLine("Error: {0}\nCallStack: {1}\n Inner Exception: {2}", ex.Message, ex.StackTrace, ex.InnerException != null ? ex.InnerException.Message : "No Inner Exception"); } }
public void EvalManagedSparseTimesTest() { string modelDefinition = @"deviceId = -1 precision = ""float"" traceLevel = 1 run=NDLNetworkBuilder NDLNetworkBuilder=[ i1 = SparseInput(3) o1 = Times(Constant(2, rows=1, cols=3), i1, tag=""output"") FeatureNodes = (i1) ]"; using (var model = new ModelEvaluationExtendedF()) { model.CreateNetwork(modelDefinition); VariableSchema outputSchema = model.GetOutputSchema(); model.StartForwardEvaluation(outputSchema.Select(s => s.Name).ToList <string>()); var outputBuffer = new [] { new ValueBuffer <float>() { Buffer = new float[3], Size = 3 } }; var inputBuffer = new [] { new ValueBuffer <float>() { Buffer = new float[] { 1, 2, 3, 5, 6 }, Indices = new [] { 0, 2, 2, 1, 2 }, ColIndices = new [] { 0, 2, 2, 5 }, Size = 4 } }; // We can call the evaluate method and get back the results... model.ForwardPass(inputBuffer, outputBuffer); float[][] expected = { new float[] { 6, 0, 28 } }; Assert.AreEqual(expected.Length, outputBuffer.Length); for (int idx = 0; idx < expected.Length; idx++) { CollectionAssert.AreEqual(expected[idx], outputBuffer[idx].Buffer); } } }
/// <summary> /// Evaluates an extended network (without a model and without input) and obtains a single layer output /// </summary> private static void EvaluateExtendedNetworkSingleLayerNoInput() { const string modelDefinition = @"precision = ""float"" traceLevel = 1 run=NDLNetworkBuilder NDLNetworkBuilder=[ v1 = Constant(1) v2 = Constant(2, tag=""output"") ol = Plus(v1, v2, tag=""output"") FeatureNodes = (v1) ]"; try { using (var model = new ModelEvaluationExtendedF()) { // Create the network model.CreateNetwork(modelDefinition); VariableSchema outputSchema = model.GetOutputSchema(); var outputNodeNames = outputSchema.Select(s => s.Name).ToList <string>(); model.StartForwardEvaluation(outputNodeNames); var outputBuffer = outputSchema.CreateBuffers <float>(); var inputBuffer = new ValueBuffer <float> [0]; // We can call the evaluate method and get back the results... model.ForwardPass(inputBuffer, outputBuffer); // We expect two outputs: the v2 constant, and the ol Plus result var expected = new float[][] { new float[] { 2 }, new float[] { 3 } }; Console.WriteLine("Expected values: {0}", string.Join(" - ", expected.Select(b => string.Join(", ", b)).ToList <string>())); Console.WriteLine("Actual Values : {0}", string.Join(" - ", outputBuffer.Select(b => string.Join(", ", b.Buffer)).ToList <string>())); } } catch (CNTKException ex) { Console.WriteLine("Error: {0}\nNative CallStack: {1}\n Inner Exception: {2}", ex.Message, ex.NativeCallStack, ex.InnerException != null ? ex.InnerException.Message : "No Inner Exception"); } catch (Exception ex) { Console.WriteLine("Error: {0}\nCallStack: {1}\n Inner Exception: {2}", ex.Message, ex.StackTrace, ex.InnerException != null ? ex.InnerException.Message : "No Inner Exception"); } }
public void EvalManagedScalarTimesDualOutputTest() { string modelDefinition = @"deviceId = -1 precision = ""float"" traceLevel = 1 run=NDLNetworkBuilder NDLNetworkBuilder=[ i1 = Input(1) i2 = Input(1) o1 = Times(Constant(3), i1, tag=""output"") o2 = Times(Constant(5), i1, tag=""output"") FeatureNodes = (i1) ]"; using (var model = new ModelEvaluationExtendedF()) { model.CreateNetwork(modelDefinition); VariableSchema outputSchema = model.GetOutputSchema(); VariableSchema inputSchema = model.GetInputSchema(); model.StartForwardEvaluation(outputSchema.Select(s => s.Name).ToList <string>()); var outputBuffer = outputSchema.CreateBuffers <float>(); var inputBuffer = inputSchema.CreateBuffers <float>(); inputBuffer[0].Buffer[0] = 2; // We can call the evaluate method and get back the results... model.ForwardPass(inputBuffer, outputBuffer); float[][] expected = { new float[] { 6 }, new float[] { 10 } }; Assert.AreEqual(expected.Length, outputBuffer.Length); for (int idx = 0; idx < expected.Length; idx++) { CollectionAssert.AreEqual(expected[idx], outputBuffer[idx].Buffer); } } }
public void EvalManagedRNNTest() { string modelDefinition = @"deviceId = -1 precision = ""float"" traceLevel = 1 run=NDLNetworkBuilder NDLNetworkBuilder = [ LSTMComponent(inputDim, outputDim, cellDim, inputx, cellDimX2, cellDimX3, cellDimX4) = [ wx = Parameter(cellDimX4, 0, init = ""uniform"", initValueScale = 1); b = Parameter(cellDimX4, 1, init = ""fixedValue"", value = 0.0); Wh = Parameter(cellDimX4, 0, init = ""uniform"", initValueScale = 1); Wci = Parameter(cellDim, init = ""uniform"", initValueScale = 1); Wcf = Parameter(cellDim, init = ""uniform"", initValueScale = 1); Wco = Parameter(cellDim, init = ""uniform"", initValueScale = 1); dh = PastValue(outputDim, output, timeStep = 1); dc = PastValue(cellDim, ct, timeStep = 1); wxx = Times(wx, inputx); wxxpb = Plus(wxx, b); whh = Times(wh, dh); wxxpbpwhh = Plus(wxxpb, whh) G1 = RowSlice(0, cellDim, wxxpbpwhh) G2 = RowSlice(cellDim, cellDim, wxxpbpwhh) G3 = RowSlice(cellDimX2, cellDim, wxxpbpwhh); G4 = RowSlice(cellDimX3, cellDim, wxxpbpwhh); Wcidc = DiagTimes(Wci, dc); it = Sigmoid(Plus(G1, Wcidc)); bit = ElementTimes(it, Tanh(G2)); Wcfdc = DiagTimes(Wcf, dc); ft = Sigmoid(Plus(G3, Wcfdc)); bft = ElementTimes(ft, dc); ct = Plus(bft, bit); Wcoct = DiagTimes(Wco, ct); ot = Sigmoid(Plus(G4, Wcoct)); mt = ElementTimes(ot, Tanh(ct)); Wmr = Parameter(outputDim, cellDim, init = ""uniform"", initValueScale = 1); output = Times(Wmr, mt); ] i1 = Input(4) o1 = LSTMComponent(4, 4, 1, i1, 2, 3, 4) FeatureNodes = (i1) outputNodes = (o1) ]"; using (var model = new ModelEvaluationExtendedF()) { int featDim = 4; int labelDim = 4; model.CreateNetwork(modelDefinition); VariableSchema inputSchema = model.GetInputSchema(); VariableSchema outputSchema = model.GetOutputSchema(); model.StartForwardEvaluation(outputSchema.Select(s => s.Name).ToList <string>()); // Allocate the output values layer var outputBuffer = outputSchema.CreateBuffers <float>(); var inputBuffer = inputSchema.CreateBuffers <float>(); for (var i = 0; i < featDim; i++) { inputBuffer[0].Buffer[i] = (float)i; } int scaler = 100000; var result = new int[labelDim]; int[] expected = { 50, 10, 54, 55 }; // the first pass with reset model.ForwardPass(inputBuffer, outputBuffer); for (var i = 0; i < labelDim; i++) { result[i] = (int)(outputBuffer[0].Buffer[i] * scaler); } CollectionAssert.AreEqual(expected, result); // the second pass with reset model.ForwardPass(inputBuffer, outputBuffer); for (var i = 0; i < labelDim; i++) { result[i] = (int)(outputBuffer[0].Buffer[i] * scaler); } CollectionAssert.AreEqual(expected, result); // another pass with reset model.ForwardPass(inputBuffer, outputBuffer, true); for (var i = 0; i < labelDim; i++) { result[i] = (int)(outputBuffer[0].Buffer[i] * scaler); } CollectionAssert.AreEqual(expected, result); // pass w/o reset model.ForwardPass(inputBuffer, outputBuffer, false); for (var i = 0; i < labelDim; i++) { result[i] = (int)(outputBuffer[0].Buffer[i] * scaler); } expected = new int[] { 13, 2, 14, 14 }; CollectionAssert.AreEqual(expected, result); // another pass w/o reset model.ForwardPass(inputBuffer, outputBuffer, false); for (var i = 0; i < labelDim; i++) { result[i] = (int)(outputBuffer[0].Buffer[i] * scaler); } expected = new int[] { -4, 0, -4, -4 }; CollectionAssert.AreEqual(expected, result); } }
/// <summary> /// Evaluates an extended network (without a model and without input) and obtains a single layer output /// </summary> private static void EvaluateExtendedNetworkSingleLayerNoInput() { const string modelDefinition = @"precision = ""float"" traceLevel = 1 run=NDLNetworkBuilder NDLNetworkBuilder=[ v1 = Constant(1) v2 = Constant(2, tag=""output"") ol = Plus(v1, v2, tag=""output"") FeatureNodes = (v1) ]"; try { using (var model = new ModelEvaluationExtendedF()) { // Create the network model.CreateNetwork(modelDefinition); VariableSchema outputSchema = model.GetOutputSchema(); var outputNodeNames = outputSchema.Select(s => s.Name).ToList<string>(); model.StartForwardEvaluation(outputNodeNames); var outputBuffer = outputSchema.CreateBuffers<float>(); var inputBuffer = new ValueBuffer<float>[0]; // We can call the evaluate method and get back the results... model.ForwardPass(inputBuffer, outputBuffer); // We expect two outputs: the v2 constant, and the ol Plus result var expected = new float[][] { new float[] { 2 }, new float[] { 3 } }; Console.WriteLine("Expected values: {0}", string.Join(" - ", expected.Select(b => string.Join(", ", b)).ToList<string>())); Console.WriteLine("Actual Values : {0}", string.Join(" - ", outputBuffer.Select(b => string.Join(", ", b.Buffer)).ToList<string>())); } } catch (CNTKException ex) { OnCNTKException(ex); } catch (Exception ex) { OnGeneralException(ex); } }
/// <summary> /// Evaluates an extended network (without a model and without input) and obtains a single layer output /// </summary> private static void EvaluateExtendedNetworkSingleLayerNoInput() { const string modelDefinition = @"precision = ""float"" traceLevel = 1 run=NDLNetworkBuilder NDLNetworkBuilder=[ v1 = Constant(1) v2 = Constant(2, tag=""output"") ol = Plus(v1, v2, tag=""output"") FeatureNodes = (v1) ]"; try { // The examples assume the executable is running from the data folder // We switch the current directory to the data folder (assuming the executable is in the <CNTK>/x64/Debug|Release folder string workingDirectory = Path.Combine(initialDirectory, @"..\..\Examples\Other\Simple2d\Config"); Environment.CurrentDirectory = initialDirectory; using (var model = new ModelEvaluationExtendedF()) { // Create the network // This network (AddOperatorConstantNoInput.cntk) is a simple network consisting of a single binary operator (Plus) // operating over a two constants, therefore no input is necessary. model.CreateNetwork(modelDefinition); VariableSchema outputSchema = model.GetOutputSchema(); var outputNodeNames = outputSchema.Select(s => s.Name).ToList<string>(); model.StartForwardEvaluation(outputNodeNames); var outputBuffer = outputSchema.CreateBuffers<float>(); var inputBuffer = new ValueBuffer<float>[0]; // We can call the evaluate method and get back the results... model.ForwardPass(inputBuffer, outputBuffer); // We expect two outputs: the v2 constant, and the ol Plus result var expected = new float[][] { new float[] { 2 }, new float[] { 3 } }; Console.WriteLine("Expected values: {0}", string.Join(" - ", expected.Select(b => string.Join(", ", b)).ToList<string>())); Console.WriteLine("Actual Values : {0}", string.Join(" - ", outputBuffer.Select(b => string.Join(", ", b.Buffer)).ToList<string>())); } } catch (CNTKException ex) { Console.WriteLine("Error: {0}\nNative CallStack: {1}\n Inner Exception: {2}", ex.Message, ex.NativeCallStack, ex.InnerException != null ? ex.InnerException.Message : "No Inner Exception"); } catch (Exception ex) { Console.WriteLine("Error: {0}\nCallStack: {1}\n Inner Exception: {2}", ex.Message, ex.StackTrace, ex.InnerException != null ? ex.InnerException.Message : "No Inner Exception"); } }
public void EvalManagedScalarTimesDualOutputTest() { string modelDefinition = @"deviceId = -1 precision = ""float"" traceLevel = 1 run=NDLNetworkBuilder NDLNetworkBuilder=[ i1 = Input(1) i2 = Input(1) o1 = Times(Constant(3), i1, tag=""output"") o2 = Times(Constant(5), i1, tag=""output"") FeatureNodes = (i1) ]"; using (var model = new ModelEvaluationExtendedF()) { model.CreateNetwork(modelDefinition); VariableSchema outputSchema = model.GetOutputSchema(); VariableSchema inputSchema = model.GetInputSchema(); model.StartForwardEvaluation(outputSchema.Select(s => s.Name).ToList<string>()); var outputBuffer = outputSchema.CreateBuffers<float>(); var inputBuffer = inputSchema.CreateBuffers<float>(); inputBuffer[0].Buffer[0] = 2; // We can call the evaluate method and get back the results... model.ForwardPass(inputBuffer, outputBuffer); float[][] expected = {new float[]{6}, new float[]{10} }; Assert.AreEqual(expected.Length, outputBuffer.Length); for(int idx=0; idx<expected.Length; idx++ ) { CollectionAssert.AreEqual(expected[idx], outputBuffer[idx].Buffer); } } }
public void EvalManagedSparseTimesTest() { string modelDefinition = @"deviceId = -1 precision = ""float"" traceLevel = 1 run=NDLNetworkBuilder NDLNetworkBuilder=[ i1 = SparseInput(3) o1 = Times(Constant(2, rows=1, cols=3), i1, tag=""output"") FeatureNodes = (i1) ]"; using (var model = new ModelEvaluationExtendedF()) { model.CreateNetwork(modelDefinition); VariableSchema outputSchema = model.GetOutputSchema(); model.StartForwardEvaluation(outputSchema.Select(s => s.Name).ToList<string>()); var outputBuffer = new [] { new ValueBuffer<float>() { Buffer = new float[3], Size = 3 } }; var inputBuffer = new [] { new ValueBuffer<float>() { Buffer = new float[] { 1, 2, 3, 5, 6 }, Indices = new [] { 0, 2, 2, 1, 2 }, ColIndices = new [] { 0, 2, 2, 5 }, Size = 4 } }; // We can call the evaluate method and get back the results... model.ForwardPass(inputBuffer, outputBuffer); float[][] expected = { new float[] { 6, 0, 28 } }; Assert.AreEqual(expected.Length, outputBuffer.Length); for (int idx = 0; idx < expected.Length; idx++) { CollectionAssert.AreEqual(expected[idx], outputBuffer[idx].Buffer); } } }