public IBidirectionalRecurrentTrainingManager CreateBidirectionalManager( INeuralNetworkBidirectionalBatchTrainer trainer, string dataFile, ISequentialTrainingDataProvider testData, int memorySize, int?autoAdjustOnNoChangeCount = null ) { return(new BidirectionalManager(_lap, trainer, dataFile, testData, memorySize, autoAdjustOnNoChangeCount)); }
protected Tuple <float[], float[]> _Load(INeuralNetworkBidirectionalBatchTrainer network, string file, int hiddenLayerSize) { var forwardMemory = Enumerable.Range(0, hiddenLayerSize).Select(i => 0f).ToArray(); var backwardMemory = Enumerable.Range(0, hiddenLayerSize).Select(i => 0f).ToArray(); if (File.Exists(file)) { using (var stream = new FileStream(file, FileMode.Open, FileAccess.Read)) { var networkInfo = Serializer.Deserialize <BidirectionalNetwork>(stream); network.NetworkInfo = networkInfo; Array.Copy(networkInfo.ForwardMemory.Data, forwardMemory, Math.Min(networkInfo.ForwardMemory.Data.Length, forwardMemory.Length)); Array.Copy(networkInfo.BackwardMemory.Data, backwardMemory, Math.Min(networkInfo.BackwardMemory.Data.Length, backwardMemory.Length)); } } return(Tuple.Create(forwardMemory, backwardMemory)); }
public BidirectionalManager(ILinearAlgebraProvider lap, INeuralNetworkBidirectionalBatchTrainer trainer, string dataFile, ISequentialTrainingDataProvider testData, int memorySize, int?autoAdjustOnNoChangeCount = 5) : base(testData, autoAdjustOnNoChangeCount) { _lap = lap; _trainer = trainer; _dataFile = dataFile; var memory = _Load(_trainer, dataFile, memorySize); _forwardMemory = memory.Item1; _backwardMemory = memory.Item2; }
protected bool _CalculateTestScore(ITrainingContext context, float[] forwardMemory, float[] backwardMemory, ISequentialTrainingDataProvider data, INeuralNetworkBidirectionalBatchTrainer network, IRecurrentTrainingContext recurrentContext, ref double bestScore, ref BidirectionalNetwork output) { bool flag = false; var score = _GetScore(data, network, forwardMemory, backwardMemory, recurrentContext); var errorMetric = recurrentContext.TrainingContext.ErrorMetric; if ((errorMetric.HigherIsBetter && score > bestScore) || (!errorMetric.HigherIsBetter && score < bestScore)) { bestScore = score; output = network.NetworkInfo; output.ForwardMemory = new FloatArray { Data = forwardMemory }; output.BackwardMemory = new FloatArray { Data = backwardMemory }; flag = true; } context.WriteScore(score, errorMetric.DisplayAsPercentage, flag); return(flag); }
protected double _GetScore(ISequentialTrainingDataProvider data, INeuralNetworkBidirectionalBatchTrainer network, float[] forwardMemory, float[] backwardMemory, IRecurrentTrainingContext context) { return(Math.Abs(network.Execute(data, forwardMemory, backwardMemory, context).SelectMany(d => d).Select(d => context.TrainingContext.ErrorMetric.Compute(d.Output, d.ExpectedOutput)).Average())); }