public GradientWorker(AbstractBatchOptimizer.TrainingWorker <T> mainWorker, int threadIdx, int numThreads, IList <T> queue, AbstractDifferentiableFunction <T> fn, ConcatVector weights) { // This is to help the dynamic re-balancing of work queues this.mainWorker = mainWorker; this.threadIdx = threadIdx; this.numThreads = numThreads; this.queue = queue; this.fn = fn; this.weights = weights; localDerivative = weights.NewEmptyClone(); }
public virtual ConcatVector Optimize <T>(T[] dataset, AbstractDifferentiableFunction <T> fn, ConcatVector initialWeights, double l2regularization, double convergenceDerivativeNorm, bool quiet) { if (!quiet) { log.Info("\n**************\nBeginning training\n"); } else { log.Info("[Beginning quiet training]"); } AbstractBatchOptimizer.TrainingWorker <T> mainWorker = new AbstractBatchOptimizer.TrainingWorker <T>(this, dataset, fn, initialWeights, l2regularization, convergenceDerivativeNorm, quiet); new Thread(mainWorker).Start(); BufferedReader br = new BufferedReader(new InputStreamReader(Runtime.@in)); if (!quiet) { log.Info("NOTE: you can press any key (and maybe ENTER afterwards to jog stdin) to terminate learning early."); log.Info("The convergence criteria are quite aggressive if left uninterrupted, and will run for a while"); log.Info("if left to their own devices.\n"); while (true) { if (mainWorker.isFinished) { log.Info("training completed without interruption"); return(mainWorker.weights); } try { if (br.Ready()) { log.Info("received quit command: quitting"); log.Info("training completed by interruption"); mainWorker.isFinished = true; return(mainWorker.weights); } } catch (IOException e) { Sharpen.Runtime.PrintStackTrace(e); } } } else { while (!mainWorker.isFinished) { lock (mainWorker.naturalTerminationBarrier) { try { Sharpen.Runtime.Wait(mainWorker.naturalTerminationBarrier); } catch (Exception e) { throw new RuntimeInterruptedException(e); } } } log.Info("[Quiet training complete]"); return(mainWorker.weights); } }