public virtual void handleLearningEvent(LearningEvent @event) { BackPropagation bp = (BackPropagation)@event.Source; Console.WriteLine("Current iteration: " + bp.CurrentIteration); Console.WriteLine("Error: " + bp.TotalNetworkError); }
public async Task <long> AddLearningEventAsync(LearningEvent learningEvent) { await Context.AddAsync(learningEvent); await Context.SaveChangesAsync(); return(learningEvent.Id); }
public virtual void handleLearningEvent(LearningEvent @event) { BackPropagation bp = (BackPropagation)@event.Source; if (@event.EventType != LearningEvent.Type.LEARNING_STOPPED) { Console.WriteLine(bp.CurrentIteration + ". iteration : " + bp.TotalNetworkError); } }
// This private class is used to fire LearningEvents protected internal virtual void fireLearningEvent(LearningEvent evt) { lock (this) { foreach (LearningEventListener listener in listeners) { listener.handleLearningEvent(evt); } } }
public virtual void handleLearningEvent(LearningEvent @event) { BackPropagation bp = (BackPropagation)@event.Source; LOG.info("Epoch no#: [{}]. Error [{}]", bp.CurrentIteration, bp.TotalNetworkError); LOG.info("Epoch execution time: {} sec", (DateTimeHelperClass.CurrentUnixTimeMillis() - start) / 1000.0); // neuralNetwork.save(bp.getCurrentIteration() + "_MNIST_CNN-MIC.nnet"); start = DateTimeHelperClass.CurrentUnixTimeMillis(); // if (bp.getCurrentIteration() % 5 == 0) // Evaluation.runFullEvaluation(neuralNetwork, testSet); }
public async Task <bool> Update(long id, LearningEvent learningEvent) { var ev = await GetEvent(id); ev.Workers = learningEvent.Workers; ev.MaxScore = learningEvent.MaxScore; ev.Name = learningEvent.Name; ev.PlannedDate = learningEvent.PlannedDate; ev.Description = learningEvent.Description; ev.CompetencesId = learningEvent.CompetencesId; await Context.SaveChangesAsync(); return(true); }
public virtual void handleLearningEvent(LearningEvent @event) { BackPropagation bp = (BackPropagation)@event.Source; if (@event.EventType.Equals(LearningEvent.Type.LEARNING_STOPPED)) { double error = bp.TotalNetworkError; Console.WriteLine("Training completed in " + bp.CurrentIteration + " iterations, "); Console.WriteLine("With total error: " + formatDecimalNumber(error)); } else { Console.WriteLine("Iteration: " + bp.CurrentIteration + " | Network error: " + bp.TotalNetworkError); } }
// @Override // public void update(Observable arg0, Object arg1) { // SupervisedLearning rule = (SupervisedLearning)arg0; // System.out.println( "Training, Network Epoch " + rule.getCurrentIteration() + ", Error:" + rule.getTotalNetworkError()); // } public virtual void handleLearningEvent(LearningEvent @event) { SupervisedLearning rule = (SupervisedLearning)@event.getSource(); System.Console.WriteLine("Training, Network Epoch " + rule.CurrentIteration + ", Error:" + rule.TotalNetworkError); }
public virtual void handleLearningEvent(LearningEvent @event) { BackPropagation bp = (BackPropagation)@event.getSource(); foldErrors[bp.CurrentIteration - 1] += bp.TotalNetworkError / foldSize; }
public virtual void handleLearningEvent(LearningEvent @event) { LMS lr = (LMS)@event.Source; Console.WriteLine(lr.CurrentIteration + ". iteration | Total network error: " + lr.TotalNetworkError); }