Function Discriminator(Function input, Func <CNTKDictionary> weightInit, CNTKDictionary biasInit, DeviceDescriptor device, DataType dataType) { var discriminatorNetwork = input .Reshape(NDShape.CreateNDShape(new int[] { 28, 28, 1 })) .Conv2D((5, 5), 1, (2, 2), Padding.None, weightInit(), biasInit, device, dataType) .BatchNorm(BatchNorm.Spatial, device, dataType) .LeakyReLU(0.2) .Conv2D((5, 5), 64, (2, 2), Padding.None, weightInit(), biasInit, device, dataType) .BatchNorm(BatchNorm.Spatial, device, dataType) .LeakyReLU(0.2) .Dense(1024, weightInit(), biasInit, device, dataType) .BatchNorm(BatchNorm.Regular, device, dataType) .LeakyReLU(0.2) .Dense(1, weightInit(), biasInit, device, dataType) .Sigmoid(); Trace.Write(Model.Summary(discriminatorNetwork)); return(discriminatorNetwork); }
public static Function ConvTranspose1D(this Function input, int filterShape, int filterCount, int filterStride, Padding padding, int outputShape, CNTKDictionary weightInitializer, CNTKDictionary biasInitializer, DeviceDescriptor device, DataType dataType) { // Notice that the order of the filter arguments // are different compared to conventional convolution. var filterSizes = new int[] { filterShape, filterCount, NDShape.InferredDimension // Infer number of channels in input. }; var filterStrides = new int[] { filterStride }; var outputSizes = new int[] { outputShape, filterCount, }; return(ConvTranspose(input, filterSizes, filterCount, filterStrides, padding, outputSizes, weightInitializer, biasInitializer, device, dataType)); }
public Parameter(NDShape shape, DataType dataType, CNTKDictionary initializer, DeviceDescriptor device) : this(CNTKLibPINVOKE.new_Parameter__SWIG_9(NDShape.getCPtr(shape), (int)dataType, CNTKDictionary.getCPtr(initializer), DeviceDescriptor.getCPtr(device)), true) { if (CNTKLibPINVOKE.SWIGPendingException.Pending) { throw CNTKLibPINVOKE.SWIGPendingException.Retrieve(); } }
public static Function Conv2D(this Function input, ValueTuple <int, int> filterShape, int filterCount, ValueTuple <int, int> filterStride, Padding padding, CNTKDictionary weightInitializer, CNTKDictionary biasInitializer, DeviceDescriptor device, DataType dataType) { var filterSizes = new int[] { filterShape.Item1, filterShape.Item2, NDShape.InferredDimension, // Infer number of channels in input. filterCount }; var filterStrides = new int[] { filterStride.Item1, filterStride.Item2, }; return(Conv(input, filterSizes, filterCount, filterStrides, padding, weightInitializer, biasInitializer, device, dataType)); }
public CNTKDictionary(CNTKDictionary arg0) : this(CNTKLibPINVOKE.new_CNTKDictionary__SWIG_1(CNTKDictionary.getCPtr(arg0)), true) { if (CNTKLibPINVOKE.SWIGPendingException.Pending) { throw CNTKLibPINVOKE.SWIGPendingException.Retrieve(); } }
static Function CreateOptimizedRNNStack(Function input, string recurrentOperator, int units, int layerCount, CNTKDictionary weightInitializer, bool bidirectional, DeviceDescriptor device, DataType dataType, string name) { if (device.Type != DeviceKind.GPU) { throw new NotSupportedException($"OptimizedRNNStack only supports GPU. Device was: {device.Type}"); } // TODO: Investigate initialization: // All weights are contained in a single matrix that should have hiddenDims rows // and as many columns as needed to hold all parameters. Since this can be cumbersome to determine, // you can have the dimension inferred automatically. // To make sure that random initialization uses the correct fan-in, specify initOutputRank=-1: var weighthape = new int[] { units, NDShape.InferredDimension }; var weights = new Parameter(weighthape, dataType, weightInitializer, device); return(CNTKLib.OptimizedRNNStack(input, weights, (uint)units, (uint)layerCount, bidirectional, recurrentOperator, name)); }
public void SaveCheckpoint(string filePath, CNTKDictionary externalState) { CNTKLibPINVOKE.Trainer_SaveCheckpoint__SWIG_0(swigCPtr, filePath, CNTKDictionary.getCPtr(externalState)); if (CNTKLibPINVOKE.SWIGPendingException.Pending) { throw CNTKLibPINVOKE.SWIGPendingException.Retrieve(); } }
public void Add(CNTKDictionary other) { CNTKLibPINVOKE.CNTKDictionary_Add__SWIG_0(swigCPtr, CNTKDictionary.getCPtr(other)); if (CNTKLibPINVOKE.SWIGPendingException.Pending) { throw CNTKLibPINVOKE.SWIGPendingException.Retrieve(); } }
public virtual void RestoreFromCheckpoint(CNTKDictionary arg0) { CNTKLibPINVOKE.Learner_RestoreFromCheckpoint(swigCPtr, CNTKDictionary.getCPtr(arg0)); if (CNTKLibPINVOKE.SWIGPendingException.Pending) { throw CNTKLibPINVOKE.SWIGPendingException.Retrieve(); } }
/// <summary> /// Based on Dense from: https://github.com/Microsoft/CNTK/blob/master/bindings/python/cntk/layers/layers.py /// </summary> public static Function Dense(this Function input, int units, CNTKDictionary weightInitializer, CNTKDictionary biasInitializer, DeviceDescriptor device, DataType dataType, int inputRank = 0, int mapRank = 0) { if (inputRank != 0 && mapRank != 0) { throw new ArgumentException("Dense: inputRank and mapRank cannot be specified at the same time."); } var outputShape = NDShape.CreateNDShape(new int[] { units }); var outputRank = outputShape.Dimensions.Count; var inputRanks = (inputRank != 0) ? inputRank : 1; var dimensions = Enumerable.Range(0, inputRanks) .Select(v => NDShape.InferredDimension).ToArray(); // infer all dimensions. var inputShape = NDShape.CreateNDShape(dimensions); int inferInputRankToMap; if (inputRank != 0) { inferInputRankToMap = -1; // means map_rank is not specified; input_rank rules. } else if (mapRank == 0) { inferInputRankToMap = 0; // neither given: default to 'infer W to use all input dims'. } else { inferInputRankToMap = mapRank; // infer W to use all input dims except the first static 'map_rank' ones. } var weightsDimensions = outputShape.Dimensions.ToList(); weightsDimensions.AddRange(inputShape.Dimensions); var weightsShape = NDShape.CreateNDShape(weightsDimensions); var weights = new Parameter(weightsShape, dataType, weightInitializer, device, "w"); // Weights and input is in reversed order compared to the original python code. // Same goes for the dimensions. This is because the python api reverses the dimensions internally. // The python API was made in this way to be similar to other deep learning toolkits. // The C# and the C++ share the same column major layout. var r = CNTKLib.Times(weights, input, (uint)outputRank, inferInputRankToMap); if (biasInitializer != null) { var biasParameter = new Parameter(outputShape, dataType, biasInitializer, device, "b"); r = r + biasParameter; } return(r); }
internal static Function Conv(this Function input, int[] filterShape, int filterCount, int[] filterStride, Padding padding, // TODO: Consider if padding should be decided pr. dimension. CNTKDictionary weightInitializer, CNTKDictionary biasInitializer, DeviceDescriptor device, DataType dataType) { var weights = new Parameter(NDShape.CreateNDShape(filterShape), dataType, weightInitializer, device); var strideShape = NDShape.CreateNDShape(filterStride); // Currently, only sharing=true is supported by CNTK. So these are hardcoded. // sharing dimensions follows stride dimensions. 1D, 2D, 3D, etc. var sharing = CntkUtilities.CreateFilledBoolVector(filterStride.Length, true); // Padding dimensions follows stride dimensions. 1D, 2D, 3D, etc. var usePadding = padding.ToBoolean(); var autoPadding = CntkUtilities.CreateFilledBoolVector(filterStride.Length, usePadding); // TODO: Consider if we want to surface the additional options for Convolution: // - dilation // - reductionRank // - groups // - maxTempMemSizeInSamples // Default for dilation seems to be a shape of size (1) with value 1 var dilation = NDShape.CreateNDShape(new[] { 1 }); // Following are defaults extrapolated from CNTK code var reductionRank = 1u; var groups = 1u; var maxTempMemSizeInSamples = 0u; var sequential = false; var result = CNTKLib.Convolution( weights, input, strideShape, sharing, autoPadding, dilation, reductionRank, groups, maxTempMemSizeInSamples, sequential); if (biasInitializer != null) { // Bias dimensions should be defined for filter dimensions. // For instance for 2D case: (1, 1, filterChannels). var biasShape = filterStride.Select(s => 1).ToList(); biasShape.Add(filterCount); var bias = new Parameter(NDShape.CreateNDShape(biasShape.ToArray()), dataType, biasInitializer, device); result = CNTKLib.Plus(result, bias); } return(result); }
/// <summary> /// based on the Embedding from: https://github.com/Microsoft/CNTK/blob/master/bindings/python/cntk/layers/layers.py /// </summary> public static Function Embedding(this Function input, int shape, CNTKDictionary initializer, DataType dataType, DeviceDescriptor device) { var weightsShape = new int[] { shape, CNTK.NDShape.InferredDimension }; var weights = new Parameter(weightsShape, dataType, initializer, device); var result = CNTKLib.Times(weights, input); return(result); }
/// <summary> /// Gets the specified initializers based on function name. /// </summary> /// <param name="initializers">The initializers name.</param> /// <returns>Initializer instance as CNTKDictionary.</returns> public static CNTKDictionary Get(string initializers, double scale = 0.1) { CNTKDictionary result = null; switch (initializers.Trim().ToLower()) { case OptInitializers.Uniform: result = CNTKLib.UniformInitializer(scale); break; case OptInitializers.Normal: result = CNTKLib.NormalInitializer(scale); break; case OptInitializers.TruncatedNormal: result = CNTKLib.TruncatedNormalInitializer(scale); break; case OptInitializers.Zeros: result = CNTKLib.ConstantInitializer(0); break; case OptInitializers.Ones: result = CNTKLib.ConstantInitializer(1); break; case OptInitializers.Constant: result = CNTKLib.ConstantInitializer(scale); break; case OptInitializers.Xavier: result = CNTKLib.XavierInitializer(scale); break; case OptInitializers.GlorotNormal: result = CNTKLib.GlorotNormalInitializer(scale); break; case OptInitializers.GlorotUniform: result = CNTKLib.GlorotUniformInitializer(scale); break; case OptInitializers.HeNormal: result = CNTKLib.HeNormalInitializer(scale); break; case OptInitializers.HeUniform: result = CNTKLib.HeUniformInitializer(scale); break; default: break; } return(result); }
public virtual CNTKDictionary CreateCheckpoint() { CNTKDictionary ret = new CNTKDictionary(CNTKLibPINVOKE.Learner_CreateCheckpoint(swigCPtr), true); if (CNTKLibPINVOKE.SWIGPendingException.Pending) { throw CNTKLibPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public CNTKDictionary Equal(CNTKDictionary arg0) { CNTKDictionary ret = new CNTKDictionary(CNTKLibPINVOKE.CNTKDictionary_Equal(swigCPtr, CNTKDictionary.getCPtr(arg0)), false); if (CNTKLibPINVOKE.SWIGPendingException.Pending) { throw CNTKLibPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public static Function SimpleTanhRNNStack(this Function input, int units, int layerCount, CNTKDictionary weightInitializer, bool bidirectional, DeviceDescriptor device, DataType dataType, string name = "") { return(CreateOptimizedRNNStack(input, RecurrentOperators.RNN_Tanh, units, layerCount, weightInitializer, bidirectional, device, dataType, name)); }
public CNTKDictionary RestoreFromCheckpoint(string filePath) { CNTKDictionary ret = new CNTKDictionary(CNTKLibPINVOKE.Trainer_RestoreFromCheckpoint(swigCPtr, filePath), true); if (CNTKLibPINVOKE.SWIGPendingException.Pending) { throw CNTKLibPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public virtual CNTKDictionary GetCheckpointState() { CNTKDictionary ret = new CNTKDictionary(CNTKLibPINVOKE.MinibatchSource_GetCheckpointState(swigCPtr), true); if (CNTKLibPINVOKE.SWIGPendingException.Pending) { throw CNTKLibPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public static CNTKDictionary Load(string filename) { CNTKDictionary ret = new CNTKDictionary(CNTKLibPINVOKE.CNTKDictionary_Load(filename), true); if (CNTKLibPINVOKE.SWIGPendingException.Pending) { throw CNTKLibPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public virtual CNTKDictionary Serialize() { CNTKDictionary ret = new CNTKDictionary(CNTKLibPINVOKE.TrainingParameterScheduleDouble_Serialize(swigCPtr), true); if (CNTKLibPINVOKE.SWIGPendingException.Pending) { throw CNTKLibPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public static TrainingParameterScheduleDouble Deserialize(CNTKDictionary dictionary) { TrainingParameterScheduleDouble ret = new TrainingParameterScheduleDouble(CNTKLibPINVOKE.TrainingParameterScheduleDouble_Deserialize(CNTKDictionary.getCPtr(dictionary)), true); if (CNTKLibPINVOKE.SWIGPendingException.Pending) { throw CNTKLibPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public bool AreNotEqual(CNTKDictionary other) { bool ret = CNTKLibPINVOKE.CNTKDictionary_AreNotEqual(swigCPtr, CNTKDictionary.getCPtr(other)); if (CNTKLibPINVOKE.SWIGPendingException.Pending) { throw CNTKLibPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
public CNTKDictionary GetCustomAttributes() { CNTKDictionary ret = new CNTKDictionary(CNTKLibPINVOKE.Function_GetCustomAttributes(swigCPtr), false); if (CNTKLibPINVOKE.SWIGPendingException.Pending) { throw CNTKLibPINVOKE.SWIGPendingException.Retrieve(); } return(ret); }
Function Generator(Function input, Func <CNTKDictionary> weightInit, CNTKDictionary biasInit, DeviceDescriptor device, DataType dataType) { var generatorNetwork = input .Dense(128, weightInit(), biasInit, device, dataType) .ReLU() .Dense(784, weightInit(), biasInit, device, dataType) // output corresponds to input shape: 28 * 28 = 784. .Tanh(); return(generatorNetwork); }
Function Discriminator(Function input, Func <CNTKDictionary> weightInit, CNTKDictionary biasInit, DeviceDescriptor device, DataType dataType) { var discriminatorNetwork = input .Dense(128, weightInit(), biasInit, device, dataType) .ReLU() .Dense(1, weightInit(), biasInit, device, dataType) .Sigmoid(); return(discriminatorNetwork); }
public static void TestElementTimes() { var device = DeviceDescriptor.GPUDevice(0); // todo put in different values CNTKDictionary testInitializer = new CNTKDictionary(); Parameter leftOperand = new Parameter(new int[] { 2, 2 }, 1f, device, "left"); NDArrayView initValues = new NDArrayView(new int[] { 2, 2 }, new float[] { 0f, 1f, 2f, 3f }, device); leftOperand.SetValue(initValues); // leftOperand looks like: // 0 1 // 4 9 Parameter rightOperand = new Parameter(new int[] { 2, 2, 2 }, 1f, device, "right"); initValues = new NDArrayView(new int[] { 2, 2, 2 }, new float[] { 0f, 1f, 2f, 3f, 4f, 5f, 6f, 7f }, device); rightOperand.SetValue(initValues); // rightOperand looks like: // 0 1 | 4 5 // 2 3 | 6 7 Function model = CNTKLib.ElementTimes(leftOperand, rightOperand); var inputVariable = model.Inputs.First(); var inputMap = new Dictionary <Variable, Value>(); var outputVariable = model.Output; var outputDataMap = new Dictionary <Variable, Value>() { { outputVariable, null } }; model.Evaluate(inputMap, outputDataMap, device); var output = outputDataMap[outputVariable]; var outputArray = output.GetDenseData <float>(outputVariable).First(); // output looks like: // 0 1 | 0 5 // 4 9 | 12 21 // conclusion of this test: CNTKLib.ElementTimes works as espected :-) }
Function Generator(Function input, Func <CNTKDictionary> weightInit, CNTKDictionary biasInit, DeviceDescriptor device, DataType dataType) { var generatorNetwork = input .Dense(1024, weightInit(), biasInit, device, dataType) .BatchNorm(BatchNorm.Regular, device, dataType) .ReLU() .Dense(7 * 7 * 128, weightInit(), biasInit, device, dataType) .BatchNorm(BatchNorm.Regular, device, dataType) .ReLU() .Reshape(NDShape.CreateNDShape(new int[] { 7, 7, 128 })) .ConvTranspose2D((5, 5), 128, (2, 2), Padding.Zeros, (14, 14), weightInit(), biasInit, device, dataType) .BatchNorm(BatchNorm.Spatial, device, dataType) .ReLU() .ConvTranspose2D((5, 5), 1, (2, 2), Padding.Zeros, (28, 28), weightInit(), biasInit, device, dataType) .Tanh(); Trace.Write(Model.Summary(generatorNetwork)); return(generatorNetwork.Reshape(NDShape.CreateNDShape(new int[] { 784 }))); }
internal CNTKDictionary Get() { CNTKDictionary result = null; switch (Name.Trim().ToLower()) { case OptInitializers.Uniform: result = Seed.HasValue ? CNTKLib.UniformInitializer(Scale, Seed.Value) : CNTKLib.UniformInitializer(Scale); break; case OptInitializers.Normal: result = CNTKLib.NormalInitializer(Scale); result = Seed.HasValue ? CNTKLib.NormalInitializer(Scale, OutputRank.Value, FilterRank.Value, Seed.Value) : CNTKLib.NormalInitializer(Scale); break; case OptInitializers.TruncatedNormal: result = Seed.HasValue ? CNTKLib.TruncatedNormalInitializer(Scale, Seed.Value) : CNTKLib.TruncatedNormalInitializer(Scale); break; case OptInitializers.Zeros: result = CNTKLib.ConstantInitializer(0); break; case OptInitializers.Ones: result = CNTKLib.ConstantInitializer(1); break; case OptInitializers.Constant: result = CNTKLib.ConstantInitializer(Scale); break; case OptInitializers.Xavier: if (Seed.HasValue && !OutputRank.HasValue) { throw new ArithmeticException("Missing rank value when Seed is defined is defined for Xavier Initializer"); } result = Seed.HasValue ? CNTKLib.XavierInitializer(Scale, OutputRank.Value, FilterRank.Value, Seed.Value) : CNTKLib.XavierInitializer(Scale); break; case OptInitializers.GlorotNormal: if (Seed.HasValue && !OutputRank.HasValue) { throw new ArithmeticException("Missing rank value when Seed is defined is defined for Glorot Normal Initializer"); } result = Seed.HasValue ? CNTKLib.GlorotNormalInitializer(Scale, OutputRank.Value, FilterRank.Value, Seed.Value) : CNTKLib.GlorotNormalInitializer(Scale); break; case OptInitializers.GlorotUniform: if (Seed.HasValue && !OutputRank.HasValue) { throw new ArithmeticException("Missing rank value when Seed is defined is defined for Glorot Uniform Initializer"); } result = Seed.HasValue ? CNTKLib.GlorotUniformInitializer(Scale, OutputRank.Value, FilterRank.Value, Seed.Value) : CNTKLib.GlorotUniformInitializer(Scale); break; case OptInitializers.HeNormal: if (Seed.HasValue && !OutputRank.HasValue) { throw new ArithmeticException("Missing rank value when Seed is defined is defined for He Normal Initializer"); } result = CNTKLib.HeNormalInitializer(Scale); result = Seed.HasValue ? CNTKLib.HeNormalInitializer(Scale, OutputRank.Value, FilterRank.Value, Seed.Value) : CNTKLib.HeNormalInitializer(Scale); break; case OptInitializers.HeUniform: if (Seed.HasValue && !OutputRank.HasValue) { throw new ArithmeticException("Missing rank value when Seed is defined is defined for He Uniform Initializer"); } result = Seed.HasValue ? CNTKLib.HeUniformInitializer(Scale, OutputRank.Value, FilterRank.Value, Seed.Value) : CNTKLib.HeUniformInitializer(Scale); break; default: break; } return(result); }
// Assume input shape is such as (x [, y [, z]], channels) public static Function ConvolutionTranspose(Variable input, int[] filterShape, int numFilters, string activation, CNTKDictionary initializer, bool[] padding, int[] strides, bool useBias, CNTKDictionary biasInitializer, int[] outputShape, int[] dilation, int reductionRank, int maxTempMemSizeInSamples, string name) { try { NodeGroup.EnterNewGroup(name); // Initializers if (initializer == null) { initializer = CNTKLib.GlorotUniformInitializer(); } if (useBias && biasInitializer == null) { biasInitializer = CNTKLib.ConstantInitializer(0); } // Convolution map // (kernelWidth, kernelHeight, featureMapCount, kernelChannel) var convDims = new int[filterShape.Length + 2]; filterShape.CopyTo(convDims, 0); convDims[convDims.Length - 2] = numFilters; convDims[convDims.Length - 1] = input.Shape.Dimensions[filterShape.Length]; // input channel var convolutionMap = new Parameter(convDims, DataType.Float, initializer, DeviceDescriptor.UseDefaultDevice(), name + "/weight"); Register(convolutionMap); var conv = CNTKLib.ConvolutionTranspose( convolutionMap, // CNTK.Variable convolutionMap input, // CNTK.Variable operand strides, // CNTK.NDShape strides new BoolVector(new bool[] { true }), // CNTK.BoolVector sharing new BoolVector(padding), // CNTK.BoolVector autoPadding outputShape, // CNTK.NDShape outputShape dilation, // CNTK.NDShape dilation (uint)reductionRank, // uint reductionRank (uint)maxTempMemSizeInSamples, // uint maxTempMemSizeInSamples "" // string name ); Register(conv); if (useBias) { var bias = new Parameter(conv.Output.Shape, DataType.Float, biasInitializer, DeviceDescriptor.UseDefaultDevice(), name + "/bias"); Register(bias); conv = CNTKLib.Plus(conv, bias); Register(conv); } conv = ApplyActivation(conv, activation); conv.RootFunction.SetName(name); return(conv); } finally { NodeGroup.LeaveGroup(); } }
public static Function ConvolutionTransposexD(int numDimensions, bool channelFirst, Variable input, int[] filterShape, int numFilters, string activation, CNTKDictionary initializer, bool[] padding, int[] strides, bool useBias, CNTKDictionary biasInitializer, int[] outputShape, int[] dilation, int reductionRank, int maxTempMemSizeInSamples, string name) { if (filterShape.Length > numDimensions) { throw new ArgumentException("Dimensions of filterShape should be <= " + numDimensions); } if (strides.Length > numDimensions) { throw new ArgumentException("Dimensions of strides should be <= " + numDimensions); } var st = FillShapeArray(strides, numDimensions, input, channelFirst); return(ConvolutionTranspose(input, filterShape, numFilters, activation, initializer, padding, st, useBias, biasInitializer, outputShape, dilation, reductionRank, maxTempMemSizeInSamples, name)); }