/// <summary> /// Evaluate how long it takes to calculate the error for the network. This /// causes each of the training pairs to be run through the network. The /// network is evaluated 10 times and the lowest time is reported. /// </summary> /// <param name="profile">The network to evaluate with.</param> /// <param name="network">The training data to use.</param> /// <param name="training">The number of seconds that it took.</param> /// <returns></returns> public static int EvaluateTrain(OpenCLTrainingProfile profile, BasicNetwork network, INeuralDataSet training) { // train the neural network ITrain train; if (profile == null) { train = new ResilientPropagation(network, training); } else { train = new ResilientPropagation( network, training, profile, RPROPConst.DEFAULT_INITIAL_UPDATE, RPROPConst.DEFAULT_MAX_STEP); } int iterations = 0; Stopwatch watch = new Stopwatch(); watch.Start(); while (watch.ElapsedMilliseconds < (10 * MILIS)) { iterations++; train.Iteration(); } return(iterations); }
/// <summary> /// The network that is to be trained. /// </summary> /// <param name="network">The training set.</param> /// <param name="training">The OpenCL profile to use, null for CPU.</param> /// <param name="profile">The OpenCL profile, or null for none.</param> /// <param name="learnRate">The rate at which the weight matrix will be adjusted based on /// learning.</param> /// <param name="momentum">The influence that previous iteration's training deltas will /// have on the current iteration.</param> public Backpropagation(BasicNetwork network, INeuralDataSet training, OpenCLTrainingProfile profile, double learnRate, double momentum) : base(network, training) { if (profile == null) { TrainFlatNetworkBackPropagation backFlat = new TrainFlatNetworkBackPropagation( network.Structure.Flat, this.Training, learnRate, momentum); this.FlatTraining = backFlat; } #if !SILVERLIGHT else { TrainFlatNetworkOpenCL rpropFlat = new TrainFlatNetworkOpenCL( network.Structure.Flat, this.Training, profile); rpropFlat.LearnBPROP(learnRate, momentum); this.FlatTraining = rpropFlat; } #endif }
/// <summary> /// Setup the kernel. /// </summary> /// public void Init(OpenCLTrainingProfile profile) { int errorSize = profile.KernelGlobalWorkgroup; int gradientSize = profile.KernelGlobalWorkgroup * this.flat.Weights.Length; this.errors = new float[errorSize]; this.paramArray[0] = this.flat.InputCount; this.paramArray[1] = this.flat.OutputCount; this.paramArray[2] = this.flat.LayerCounts.Length; // create the buffers this.inputBuffer = CreateArrayReadOnly(this.inputArray); this.idealBuffer = CreateArrayReadOnly(this.idealArray); this.errorBuffer = CreateFloatArrayWriteOnly(errorSize); this.gradientOutBuffer = CreateFloatArrayWriteOnly(gradientSize); this.gradientInBuffer = CreateArrayReadOnly(this.gradients); this.paramBuffer = CreateArrayReadOnly(this.paramArray); this.layerIndexBuffer = CreateArrayReadOnly(this.flat.LayerIndex); this.layerCountBuffer = CreateArrayReadOnly(this.flat.LayerCounts); this.layerFeedCountBuffer = CreateArrayReadOnly(this.flat.LayerFeedCounts); this.weightInArrayBuffer = CreateArrayReadOnly(this.weightInArray); this.weightOutArrayBuffer = CreateFloatArrayWriteOnly(this.weightInArray.Length); this.weightIndexBuffer = CreateArrayReadOnly(this.flat.WeightIndex); this.activationTypeBuffer = CreateArrayReadOnly(this.flat.LayerCounts); this.tempDataInBuffer = CreateArrayReadOnly(this.tempDataArray); this.tempDataOutBuffer = CreateFloatArrayWriteOnly(this.tempDataArray.Length); }
/// <inheritDoc/> public override void CreateTrainer(OpenCLTrainingProfile profile, Boolean singleThreaded) { Propagation.Propagation train = new Backpropagation(Network, Training, profile, LearningRate, Momentum); if (singleThreaded) { train.NumThreads = 1; } foreach (IStrategy strategy in Strategies) { train.AddStrategy(strategy); } Train = train; }
/// <summary> /// Evaluate training, use OpenCL. /// </summary> /// <param name="device">The OpenCL device, null for CPU.</param> /// <param name="input">Input neurons.</param> /// <param name="hidden1">Hidden 1 neurons.</param> /// <param name="hidden2">Hidden 2 neurons.</param> /// <param name="output">Output neurons.</param> /// <returns>The result of the evaluation.</returns> public static int EvaluateTrain(EncogCLDevice device, int input, int hidden1, int hidden2, int output) { BasicNetwork network = EncogUtility.SimpleFeedForward(input, hidden1, hidden2, output, true); INeuralDataSet training = RandomTrainingFactory.Generate(1000, 10000, input, output, -1, 1); OpenCLTrainingProfile profile = null; #if !SILVERLIGHT if (device != null) { profile = new OpenCLTrainingProfile(device); } #endif return(EvaluateTrain(profile, network, training)); }
/// <inheritdoc/> public void Run() { Stopwatch watch = new Stopwatch(); try { watch.Start(); OpenCLTrainingProfile profile = null; #if !SILVERLIGHT if (this is ConcurrentTrainingPerformerOpenCL) { EncogCLDevice device = ((ConcurrentTrainingPerformerOpenCL)this).Device; profile = new OpenCLTrainingProfile(device, this.currentJob.LocalRatio, this.currentJob.GlobalRatio, this.currentJob.SegmentationRatio); } #endif this.currentJob.CreateTrainer(profile, Manager.SingleThreaded); ITrain train = this.currentJob.Train; int interation = 1; while (this.currentJob.ShouldContinue()) { train.Iteration(this.currentJob.IterationsPer); interation++; } watch.Stop(); } catch (Exception t) { this.currentJob.Error = t; } finally { lock (this) { this.ready = true; } this.Manager.JobDone(watch.ElapsedMilliseconds, this); } }
/// <inheritdoc/> public override void CreateTrainer(OpenCLTrainingProfile profile, bool singleThreaded) { Propagation.Propagation train = new ResilientPropagation(Network, Training, profile, InitialUpdate, MaxStep); if (singleThreaded) { train.NumThreads = 1; } else { train.NumThreads = 0; } foreach (IStrategy strategy in Strategies) { train.AddStrategy(strategy); } Train = train; }
/// <summary> /// Compile the kernel. /// </summary> /// /// <param name="options">The options.</param> /// <param name="network">The network to compile for.</param> /// <param name="profile">The OpenCL training profile to use.</param> public void Compile(IDictionary <String, String> options, OpenCLTrainingProfile profile, FlatNetwork network) { IActivationFunction activation = network.ActivationFunctions[0]; StringBuilder source = new StringBuilder(); source.Append("#define ACTIVATION(x,slope)"); source.Append(activation.GetOpenCLExpression(false)); source.Append("\r\n"); source.Append("#define DERIVATIVE(x,slope)"); source.Append(activation.GetOpenCLExpression(true)); source.Append("\r\n"); source.Append(ResourceLoader.LoadString(SourceName)); CLSource = source.ToString(); Compile(options); profile.CalculateKernelParams(this, training); // setup Init(profile); }
/// <summary> /// Construct a resilient training object, allow the training parameters to /// be specified. Usually the default parameters are acceptable for the /// resilient training algorithm. Therefore you should usually use the other /// constructor, that makes use of the default values. /// </summary> /// <param name="network">The network to train.</param> /// <param name="training">The training set to use.</param> /// <param name="profile">Optional EncogCL profile to execute on.</param> /// <param name="initialUpdate">The initial update values, this is the amount that the deltas /// are all initially set to.</param> /// <param name="maxStep">The maximum that a delta can reach.</param> public ResilientPropagation(BasicNetwork network, INeuralDataSet training, OpenCLTrainingProfile profile, double initialUpdate, double maxStep) : base(network, training) { if (profile == null) { TrainFlatNetworkResilient rpropFlat = new TrainFlatNetworkResilient( network.Structure.Flat, this.Training); this.FlatTraining = rpropFlat; } #if !SILVERLIGHT else { TrainFlatNetworkOpenCL rpropFlat = new TrainFlatNetworkOpenCL( network.Structure.Flat, this.Training, profile); rpropFlat.LearnRPROP(initialUpdate, maxStep); this.FlatTraining = rpropFlat; } #endif }
/// <summary> /// Construct a Manhattan propagation training object. /// </summary> /// <param name="network">The network to train.</param> /// <param name="training">The training data to use.</param> /// <param name="profile">The learning rate.</param> /// <param name="learnRate">The OpenCL profile to use, null for CPU.</param> public ManhattanPropagation(BasicNetwork network, INeuralDataSet training, OpenCLTrainingProfile profile, double learnRate) : base(network, training) { if (profile == null) { FlatTraining = new TrainFlatNetworkManhattan( network.Structure.Flat, this.Training, learnRate); } #if !SILVERLIGHT else { TrainFlatNetworkOpenCL rpropFlat = new TrainFlatNetworkOpenCL( network.Structure.Flat, this.Training, profile); rpropFlat.LearnManhattan(learnRate); this.FlatTraining = rpropFlat; } #endif }
/// <summary> /// Construct an RPROP trainer, allows an OpenCL device to be specified. Use /// the defaults for all training parameters. Usually this is the constructor /// to use as the resilient training algorithm is designed for the default /// parameters to be acceptable for nearly all problems. /// </summary> /// <param name="network">The network to train.</param> /// <param name="training">The training data to use.</param> /// <param name="profile">The profile to use.</param> public ResilientPropagation(BasicNetwork network, INeuralDataSet training, OpenCLTrainingProfile profile) : this(network, training, profile, RPROPConst.DEFAULT_INITIAL_UPDATE, RPROPConst.DEFAULT_MAX_STEP) { }
/// <summary> /// Create a trainer to use. /// </summary> /// <param name="profile">The OpenCL device to use, or null for the CPU.</param> /// <param name="singleThreaded">True, if single threaded.</param> public abstract void CreateTrainer(OpenCLTrainingProfile profile, bool singleThreaded);