public IErrorBackPropagationTraining SetUpXOrTraining(double learningRate = 0.5d, double bias = 0d, double momentum = 0d, double slopeMultiplier = 1d) { var weightSetterMock = new Mock <IWeightSetter>(); var stepsMock = new Mock <ErrorBackPropagationStepsDependencyFactory>().As <IErrorBackPropagationStepsDependencyFactory>(); stepsMock.CallBase = true; stepsMock.Setup(s => s.CreateWeightSetter(It.IsAny <double>(), It.IsAny <double>())).Returns(weightSetterMock.Object); var dependencyFactory = new ErrorBackPropagationDependencyFactory(stepsMock.Object); var errorBackPropagationTraining = new ErrorBackPropagationBuilder() .With.ANewLayerOfInputUnits(2) .ConnectedTo.ANewLayerOfHiddenUnitsOptimizedForTrainingPatterns(_trainingPatterns) .With.UnitActivation <IdentityUnitActivationTraining>() .ConnectedTo.ANewLayerOfOutputUnits(1) .With.OutputUnitActivation <IdentityUnitActivationTraining>() .And.Bias(bias) .And.LearningRate(learningRate) .And.Momentum(momentum) .And.SlopeMultiplier(slopeMultiplier) .And.SlopeMultiplier(1d) .And.SetupNetwork() .And.NameEverything() .And.ReadyForTraining(dependencyFactory); var randomWeights = new Queue <double>(new[] { -1d, -0.5d, 0.5d, 1d, 0.3, 0.2, 0.1, 0.5, 0.2, 0.3, 0.4, -1, -0.9 }); weightSetterMock.Setup(wi => wi.SetWeight(It.IsAny <IConnectionUnderTraining>())).Callback <IConnectionUnderTraining>(c => { var weight = randomWeights.Dequeue(); c.Weight = weight; randomWeights.Enqueue(weight); }); return(errorBackPropagationTraining); }
public async Task ErrorBackPropagationCorrectPerceptronUsed(Type expectedPerceptronType) { var oneHot = typeof(OneHotPerceptron) == expectedPerceptronType; var chain = new ErrorBackPropagationBuilder() .With.ANewLayerOfInputUnits(3) .ConnectedTo.ANewLayerOfHiddenUnits(2) .With.UnitActivation <IdentityUnitActivationTraining>() .ConnectedTo.ANewLayerOfOutputUnits(1) .With.OutputUnitActivation <IdentityUnitActivationTraining>() .And.LearningRate(LearningRate) .And.Bias(Bias) .And.SlopeMultiplier(SlopeMultiplier) .And.Momentum(Momentum); if (oneHot) { chain = chain.And.UseOneHotEncoding(); } var perceptron = await chain .And.SetupNetwork(n => _network = n) .And.ReadyForTraining().TrainAsync(new List <TrainingPattern>(), 999d, 1); perceptron.GetType().Should().Be(expectedPerceptronType); }
public static IErrorBackPropagationTraining SetUpXOrTrainingSingleFold <TUnitActivation>(double learningRate = 0.5d, double bias = 0d, double momentum = 0d, double slopeMultiplier = 1d, bool batch = false, Action <TNetwork> networkCallback = null) where TUnitActivation : IUnitActivationTrainingSingleFold, new() { var inventoryAndChaining = new ErrorBackPropagationBuilder() .With.ANewLayerOfInputUnits(2) .ConnectedTo.ANewLayerOfHiddenUnits(2).With.UnitActivation <TUnitActivation>() .ConnectedTo.ANewLayerOfOutputUnits(1).With.OutputUnitActivation <TUnitActivation>(); return(SetUpXOrTraining(inventoryAndChaining, learningRate, bias, momentum, slopeMultiplier, batch, networkCallback)); }
private static IErrorBackPropagationTraining SetUpXOrTraining(double learningRate = 0.5d, double bias = 0d, double momentum = 0d, double slopeMultiplier = 1d, bool oneHot = false, Action <TNetwork> networkCallback = null) { var weightSetterMock = new Mock <IWeightSetter>(); var stepsMock = new Mock <ErrorBackPropagationStepsDependencyFactory>().As <IErrorBackPropagationStepsDependencyFactory>(); stepsMock.CallBase = true; stepsMock.Setup(s => s.CreateWeightSetter(It.IsAny <double>(), It.IsAny <double>())).Returns(weightSetterMock.Object); var dependencyFactory = new ErrorBackPropagationDependencyFactory(stepsMock.Object); TNetwork network = null; var chain = new ErrorBackPropagationBuilder() .With.ANewLayerOfInputUnits(2) .ConnectedTo.ANewLayerOfHiddenUnits(3).With.UnitActivationMultiFold <SoftmaxUnitActivationTraining>() .ConnectedTo.ANewLayerOfOutputUnits(2).With.OutputUnitActivationMultiFold <SoftmaxUnitActivationTraining>() .And.NetworkErrorFunction <CrossEntropyErrorFunction>() .And.Bias(bias) .And.LearningRate(learningRate) .And.Momentum(momentum) .And.SlopeMultiplier(slopeMultiplier); if (oneHot) { chain = chain.And.UseOneHotEncoding(); } var errorBackPropagationTraining = chain .And.NetworkErrorFunction <DifferenceErrorFunction>() .And.SetupNetwork(n => { network = n; if (networkCallback != null) { networkCallback(n); } }) .And.NameEverything() .And.ReadyForTraining(dependencyFactory); // This whole ceremony ensures weight setting is deterministic (same weight to same connection every time even if weights are set asynchronously) var connections = network.Skip(1).Select(units => units.SelectMany(u => u.IncomingConnections.Select(ic => ic.Properties))); var randomWeights = new List <double> { -1d, -0.5d, 0.5d, 1d, 0.3, 0.2, 0.1, 0.5, 0.2, 0.3, 0.4, -1, -0.9, 0.1, 0.5, 1.2, 1.3, 1.1, 0.35, 1.1, 0.1, 0.2, 0.3, -1d, -0.5d, 0.5d, 1d, 0.3, 0.2, 0.1, 0.5, 0.2, 0.3, 0.4, -1, -0.9, 0.1, 0.5, 1.2, 1.3, 1.1, 0.35, 1.1, 0.1, 0.2, 0.3 }; weightSetterMock.Setup(wi => wi.SetWeight(It.IsAny <IConnectionUnderTraining>())).Callback <IConnectionUnderTraining>(c => { var randomWeightIndex = connections.SelectMany(x => x).ToList().IndexOf(c); c.Weight = randomWeights[randomWeightIndex]; }); return(errorBackPropagationTraining); }
public void SetUp() { _trainingPatterns = new List <TrainingPattern> { new TrainingPattern(new [] { 0d, 0d }, new [] { 0d }), new TrainingPattern(new [] { 1d, 0d }, new [] { 1d }), new TrainingPattern(new [] { 0d, 1d }, new [] { 1d }), new TrainingPattern(new [] { 1d, 1d }, new [] { 0d }), }; var inventoryAndChaining = new ErrorBackPropagationBuilder() .With.ANewLayerOfInputUnits(2) .ConnectedTo.ANewLayerOfHiddenUnits(3).With.UnitActivation <SigmoidUnitActivationTraining>() .ConnectedTo.ANewLayerOfOutputUnits(1).With.OutputUnitActivation <ReluUnitActivationTraining>(); _errorBackPropagationTraining = XOrSetUp.SetUpXOrTraining(inventoryAndChaining, learningRate: 0.8d); }