public void Train() { var yt = Variable.InputVariable(new int[] { _dataSet.OutputSize }, DataType.Float); var y_yt = CNTKLib.Abs(CNTKLib.Minus(_y, yt)); var loss = CNTKLib.ReduceSum(y_yt, Axis.AllAxes()); var learner = CNTKLib.SGDLearner(new ParameterVector(_y.Parameters().ToArray()), new TrainingParameterScheduleDouble(1.0, BATCH_SIZE)); var trainer = Trainer.CreateTrainer(_y, loss, null, new List <Learner> { learner }); for (int i = 0; i < EPOCH_COUNT; i++) { var sumLoss = 0.0; var sumEval = 0.0; for (int j = 0; j < _dataSet.Count / BATCH_SIZE - 1; j++) { var x_value = Value.CreateBatch(_x.Shape, _dataSet.Input.GetRange(j * BATCH_SIZE * _dataSet.InputSize, BATCH_SIZE * _dataSet.InputSize), DeviceDescriptor.CPUDevice); var yt_value = Value.CreateBatch(yt.Shape, _dataSet.Output.GetRange(j * BATCH_SIZE * _dataSet.OutputSize, BATCH_SIZE * _dataSet.OutputSize), DeviceDescriptor.CPUDevice); var inputDataMap = new Dictionary <Variable, Value>() { { _x, x_value }, { yt, yt_value } }; trainer.TrainMinibatch(inputDataMap, false, DeviceDescriptor.CPUDevice); sumLoss += trainer.PreviousMinibatchLossAverage() * trainer.PreviousMinibatchSampleCount(); } Console.WriteLine($"Iter: {i}\tLoss: {sumLoss / _dataSet.Count}"); } }
/// <summary> /// Calculate Correlation coefficient of two sets /// </summary> /// <param name="prediction"></param> /// <param name="target"></param> /// <returns>scalar of zero rank</returns> public static Function CorrelationC(Variable prediction, Variable target, string name = null) { var meana = CNTKLib.ReduceMean(target, Axis.AllAxes()); var meanp = CNTKLib.ReduceMean(prediction, Axis.AllAxes()); // var remaindera = CNTKLib.Minus(target, meana); var remainderp = CNTKLib.Minus(prediction, meanp); var eltime1 = CNTKLib.ElementTimes(remaindera, remainderp); var frac1 = CNTKLib.ReduceSum(eltime1, Axis.AllAxes()); // var squarea = CNTKLib.Square(remaindera); var sum1 = CNTKLib.ReduceSum(squarea, Axis.AllAxes()); var squarep = CNTKLib.Square(remainderp); var sum2 = CNTKLib.ReduceSum(squarep, Axis.AllAxes()); var roota = CNTKLib.Sqrt(sum1); var rootp = CNTKLib.Sqrt(sum2); var frac2 = CNTKLib.ElementTimes(roota, rootp); var val = CNTKLib.ElementDivide(frac1, frac2); if (name != null) { val.SetName(name); } return(val); }
/// <summary> /// Calculates Sum of Squared Error of two sets /// </summary> /// <param name="prediction"></param> /// <param name="actual"></param> /// <returns>scalar of zero rank</returns> public static Function SError(Variable prediction, Variable actual, string name = null) { var remainder = CNTKLib.Minus(actual, prediction); var squared = CNTKLib.Square(remainder); var sum = CNTKLib.ReduceSum(squared, Axis.AllAxes()); return(sum); }
/// <summary> /// Weighted Squared error /// </summary> /// <param name="prediction"></param> /// <param name="actual"></param> /// <param name="weights"></param> /// <param name="name"></param> /// <returns></returns> public static Function WeightedSE(Variable prediction, Variable actual, Constant weights, string name = null) { var remainder = CNTKLib.Minus(actual, prediction); var squared = CNTKLib.Square(remainder); var ret = CNTKLib.ElementTimes(squared, weights); var sum = CNTKLib.ReduceSum(ret, Axis.AllAxes()); return(sum); }
/// <summary> /// Calculates Mean Squared Error two sets /// </summary> /// <param name="prediction"></param> /// <param name="actual"></param> /// <returns>scalar of zero rank</returns> public static Function MSError(Variable prediction, Variable actual, string name = null) { var remainder = CNTKLib.Minus(actual, prediction); var squared = CNTKLib.Square(remainder); var mean = CNTKLib.ReduceMean(squared, Axis.AllAxes()); if (name != null) { mean.SetName(name); } return(mean); }
/// <summary> /// Covariance calculation between two datasets /// </summary> /// <param name="prediction"></param> /// <param name="target"></param> /// <returns></returns> public static Function Covariance(Variable prediction, Variable target, string name = null) { var meana = CNTKLib.ReduceMean(target, Axis.AllAxes()); var meanp = CNTKLib.ReduceMean(prediction, Axis.AllAxes()); // var remaindera = CNTKLib.Minus(target, meana); var remainderp = CNTKLib.Minus(prediction, meanp); var eltime1 = CNTKLib.ElementTimes(remaindera, remainderp); var sum = CNTKLib.ReduceSum(eltime1, Axis.AllAxes()); //the last dimension indicate the number of samples var n = new Constant(new NDShape(0), DataType.Float, target.Shape.Dimensions.Last() - 1.0); var fun = CNTKLib.ElementDivide(sum, n); if (name != null) { fun.SetName(name); } return(fun); }
private Tensor _reduce(Tensor x, int[] axis, bool keepdims, Func <Variable, AxisVector, CNTK.Function> func) { var _x = In(x); Axis[] _axis; if (axis == null) { _axis = new[] { Axis.AllAxes() } } ; _axis = axis.Select(a => new Axis(a)).ToArray(); // Axes in reduce operations are 1-based (?) CNTK.Function f = _x; if (axis.Length > 0) { f = func(_x, new AxisVector(_axis)); } f = _remove_dims(f, axis, keepdims); return(Out(f)); }
protected override void EndProcessing() { var axis = Axis.AllAxes(); WriteObject(axis); }
public float Sum(Tensor x) { return(Out(C.ReduceSum(In(x), Axis.AllAxes())).ToScalar()); }
static Trainer CreateModelTrainer(Function model, Variable inputs, Variable labels) { var trainingLoss = CNTKLib.BinaryCrossEntropy(new Variable(model), labels, "lossFunction"); var prediction = CNTKLib.ReduceMean(CNTKLib.Equal(labels, CNTKLib.Round(new Variable(model))), Axis.AllAxes()); // Keras accuracy metric // set per sample learning rate CNTK.TrainingParameterScheduleDouble learningRatePerSample = new CNTK.TrainingParameterScheduleDouble(0.1, 1); IList <Learner> parameterLearners = new List <Learner>() { Learner.SGDLearner(model.Parameters(), learningRatePerSample) }; var trainer = Trainer.CreateTrainer(model, trainingLoss, prediction, parameterLearners); return(trainer); }