예제 #1
0
    internal static Optimizer GetOptimizer(NeuralNetworkSettingsEntity s)
    {
        switch (s.Optimizer)
        {
        case TensorFlowOptimizer.Adam:
            return(tf.train.AdamOptimizer((float)s.LearningRate, (float)s.LearningEpsilon));

        case TensorFlowOptimizer.GradientDescentOptimizer:
            return(tf.train.GradientDescentOptimizer((float)s.LearningRate));

        default:
            throw new InvalidOperationException("Unexpected Learner");
        }
    }
예제 #2
0
        internal static Learner GetInitializer(IList <Parameter> parameters, NeuralNetworkSettingsEntity s)
        {
            var vector = new ParameterVector((ICollection)parameters);

            switch (s.Learner)
            {
            case NeuralNetworkLearner.Adam: return(CNTKLib.AdamLearner(vector,
                                                                       s.LearningRate.ToTrainParam(),
                                                                       s.LearningMomentum?.ToTrainParam(),
                                                                       s.LearningUnitGain ?? false,
                                                                       s.LearningVarianceMomentum?.ToTrainParam()));

            case NeuralNetworkLearner.AdaDelta:
                return(CNTKLib.AdaDeltaLearner(vector,
                                               s.LearningRate.ToTrainParam()));

            case NeuralNetworkLearner.AdaGrad:
                return(CNTKLib.AdaGradLearner(vector,
                                              s.LearningRate.ToTrainParam()));

            case NeuralNetworkLearner.FSAdaGrad:
                return(CNTKLib.FSAdaGradLearner(vector,
                                                s.LearningRate.ToTrainParam(),
                                                s.LearningMomentum?.ToTrainParam(),
                                                s.LearningUnitGain ?? false,
                                                s.LearningVarianceMomentum?.ToTrainParam()));

            case NeuralNetworkLearner.RMSProp:
                return(CNTKLib.FSAdaGradLearner(vector,
                                                s.LearningRate.ToTrainParam(),
                                                s.LearningMomentum?.ToTrainParam(),
                                                s.LearningUnitGain ?? false,
                                                s.LearningVarianceMomentum?.ToTrainParam()));

            case NeuralNetworkLearner.MomentumSGD:
                return(CNTKLib.MomentumSGDLearner(vector,
                                                  s.LearningRate.ToTrainParam(),
                                                  s.LearningMomentum?.ToTrainParam(),
                                                  s.LearningUnitGain ?? false));

            case NeuralNetworkLearner.SGD:
                return(CNTKLib.SGDLearner(vector,
                                          s.LearningRate.ToTrainParam()));

            default:
                throw new InvalidOperationException("Unexpected Learner");
            }
        }
        private DeviceDescriptor GetDevice(NeuralNetworkSettingsEntity nnSettings)
        {
            if (!nnSettings.Device.HasText())
            {
                return(DeviceDescriptor.UseDefaultDevice());
            }

            var dev = DeviceDescriptor.AllDevices().FirstOrDefault(a => a.AsString() == nnSettings.Device);

            if (dev == null)
            {
                return(DeviceDescriptor.UseDefaultDevice());
            }

            return(dev);
        }