public IErrorBackPropagationTraining SetUpXOrTraining(double learningRate = 0.5d, double bias = 0d, double momentum = 0d, double slopeMultiplier = 1d) { var weightSetterMock = new Mock <IWeightSetter>(); var stepsMock = new Mock <ErrorBackPropagationStepsDependencyFactory>().As <IErrorBackPropagationStepsDependencyFactory>(); stepsMock.CallBase = true; stepsMock.Setup(s => s.CreateWeightSetter(It.IsAny <double>(), It.IsAny <double>())).Returns(weightSetterMock.Object); var dependencyFactory = new ErrorBackPropagationDependencyFactory(stepsMock.Object); var errorBackPropagationTraining = new ErrorBackPropagationBuilder() .With.ANewLayerOfInputUnits(2) .ConnectedTo.ANewLayerOfHiddenUnitsOptimizedForTrainingPatterns(_trainingPatterns) .With.UnitActivation <IdentityUnitActivationTraining>() .ConnectedTo.ANewLayerOfOutputUnits(1) .With.OutputUnitActivation <IdentityUnitActivationTraining>() .And.Bias(bias) .And.LearningRate(learningRate) .And.Momentum(momentum) .And.SlopeMultiplier(slopeMultiplier) .And.SlopeMultiplier(1d) .And.SetupNetwork() .And.NameEverything() .And.ReadyForTraining(dependencyFactory); var randomWeights = new Queue <double>(new[] { -1d, -0.5d, 0.5d, 1d, 0.3, 0.2, 0.1, 0.5, 0.2, 0.3, 0.4, -1, -0.9 }); weightSetterMock.Setup(wi => wi.SetWeight(It.IsAny <IConnectionUnderTraining>())).Callback <IConnectionUnderTraining>(c => { var weight = randomWeights.Dequeue(); c.Weight = weight; randomWeights.Enqueue(weight); }); return(errorBackPropagationTraining); }
private static IErrorBackPropagationTraining SetUpXOrTraining(double learningRate = 0.5d, double bias = 0d, double momentum = 0d, double slopeMultiplier = 1d, bool oneHot = false, Action <TNetwork> networkCallback = null) { var weightSetterMock = new Mock <IWeightSetter>(); var stepsMock = new Mock <ErrorBackPropagationStepsDependencyFactory>().As <IErrorBackPropagationStepsDependencyFactory>(); stepsMock.CallBase = true; stepsMock.Setup(s => s.CreateWeightSetter(It.IsAny <double>(), It.IsAny <double>())).Returns(weightSetterMock.Object); var dependencyFactory = new ErrorBackPropagationDependencyFactory(stepsMock.Object); TNetwork network = null; var chain = new ErrorBackPropagationBuilder() .With.ANewLayerOfInputUnits(2) .ConnectedTo.ANewLayerOfHiddenUnits(3).With.UnitActivationMultiFold <SoftmaxUnitActivationTraining>() .ConnectedTo.ANewLayerOfOutputUnits(2).With.OutputUnitActivationMultiFold <SoftmaxUnitActivationTraining>() .And.NetworkErrorFunction <CrossEntropyErrorFunction>() .And.Bias(bias) .And.LearningRate(learningRate) .And.Momentum(momentum) .And.SlopeMultiplier(slopeMultiplier); if (oneHot) { chain = chain.And.UseOneHotEncoding(); } var errorBackPropagationTraining = chain .And.NetworkErrorFunction <DifferenceErrorFunction>() .And.SetupNetwork(n => { network = n; if (networkCallback != null) { networkCallback(n); } }) .And.NameEverything() .And.ReadyForTraining(dependencyFactory); // This whole ceremony ensures weight setting is deterministic (same weight to same connection every time even if weights are set asynchronously) var connections = network.Skip(1).Select(units => units.SelectMany(u => u.IncomingConnections.Select(ic => ic.Properties))); var randomWeights = new List <double> { -1d, -0.5d, 0.5d, 1d, 0.3, 0.2, 0.1, 0.5, 0.2, 0.3, 0.4, -1, -0.9, 0.1, 0.5, 1.2, 1.3, 1.1, 0.35, 1.1, 0.1, 0.2, 0.3, -1d, -0.5d, 0.5d, 1d, 0.3, 0.2, 0.1, 0.5, 0.2, 0.3, 0.4, -1, -0.9, 0.1, 0.5, 1.2, 1.3, 1.1, 0.35, 1.1, 0.1, 0.2, 0.3 }; weightSetterMock.Setup(wi => wi.SetWeight(It.IsAny <IConnectionUnderTraining>())).Callback <IConnectionUnderTraining>(c => { var randomWeightIndex = connections.SelectMany(x => x).ToList().IndexOf(c); c.Weight = randomWeights[randomWeightIndex]; }); return(errorBackPropagationTraining); }