/// <summary> /// Run this layer. Take relevant input values from inputs and put relevant output values in outputs registry. Each input and each output registry represents one connected layer. /// </summary> /// <param name="buffer">The buffer containing the inputs, parameters and outputs respective to this layer.</param> /// <param name="handler">The computation handler to use for computations (duh).</param> /// <param name="trainingPass">Indicate whether this is run is part of a training pass.</param> public override void Run(ILayerBuffer buffer, IComputationHandler handler, bool trainingPass) { INDArray inputs = buffer.Inputs["default"].Get <INDArray>("activations"); INDArray weights = buffer.Parameters.Get <INDArray>("weights"); string activation = buffer.Parameters.Get <string>("activation"); long batches = inputs.Shape[0]; int inputSize = Parameters.Get <int>("default_input_size"), size = Parameters.Get <int>("size"); INDArray biases = handler.StackRows((int)batches, buffer.Parameters.Get <INDArray>("biases")); INDArray activations = handler.PermuteBatchAndTime(inputs); // BatchTimeFeatures ordering by default, needs to be TimeBatchFeatures for layers operating on the time dimension activations = activations.Reshape(activations.Shape[0], activations.Shape[1] * ArrayUtils.Product(2, activations.Shape)); activations = handler.RowWise(activations, timeSlice => { timeSlice = timeSlice.Reshape(inputs.Shape[0], inputSize); timeSlice = handler.Dot(timeSlice, weights); timeSlice = handler.Add(timeSlice, biases); timeSlice = handler.Activation(activation, timeSlice); return(timeSlice.Reshape(1L, batches * size)); }); activations = activations.Reshape(activations.Shape[0], batches, size); buffer.Outputs["default"]["activations"] = handler.PermuteBatchAndTime(activations); // TODO are those the right dimensions? they should be... }
/// <summary> /// Run this layer. Take relevant input values from inputs and put relevant output values in outputs registry. Each input and each output registry represents one connected layer. /// </summary> /// <param name="buffer">The buffer containing the inputs, parameters and outputs respective to this layer.</param> /// <param name="handler">The computation handler to use for computations (duh).</param> /// <param name="trainingPass">Indicate whether this is run is part of a training pass.</param> public override void Run(ILayerBuffer buffer, IComputationHandler handler, bool trainingPass) { IRegistry costOutput = buffer.Outputs[buffer.Parameters.Get <string>("external_cost_alias")]; INDArray predictions = buffer.Inputs["default"].Get <INDArray>("activations"); INDArray targets = buffer.Inputs[buffer.Parameters.Get <string>("external_targets_alias")].Get <INDArray>("activations"); costOutput["cost"] = CalculateCost(handler.FlattenAllButLast(predictions), handler.FlattenAllButLast(targets), buffer.Parameters, handler); costOutput["importance"] = buffer.Parameters["cost_importance"]; }
public override void Run(ILayerBuffer buffer, IComputationHandler handler, bool trainingPass) { INDArray activations = buffer.Inputs["default"].Get <INDArray>("activations"); INDArray weights = buffer.Parameters.Get <INDArray>("weights"); INumber bias = buffer.Parameters.Get <INumber>("bias"); activations = handler.RowWise(activations, row => handler.Add(handler.Multiply(row, weights), bias)); buffer.Outputs["default"]["activations"] = activations; }
/// <inheritdoc /> public override void Run(ILayerBuffer buffer, IComputationHandler handler, bool trainingPass) { INDArray input = buffer.Inputs["default"].Get <INDArray>("activations"); INDArray activations = handler.FlattenTimeAndFeatures(input); INDArray weights = buffer.Parameters.Get <INDArray>("weights"); INDArray biases = handler.StackRows((int)(input.Shape[0] * input.Shape[1]), buffer.Parameters.Get <INDArray>("biases")); INDArray output = handler.Dot(activations, weights); output = handler.Add(output, biases); output = handler.Activation(buffer.Parameters.Get <string>("activation"), output); buffer.Outputs["default"]["activations"] = output.Reshape(input.Shape[0], input.Shape[1], Parameters.Get <int>("size")); }
/// <summary> /// Run this layer. Take relevant input values from inputs and put relevant output values in outputs registry. Each input and each output registry represents one connected layer. /// </summary> /// <param name="buffer">The buffer containing the inputs, parameters and outputs respective to this layer.</param> /// <param name="handler">The computation handler to use for computations (duh).</param> /// <param name="trainingPass">Indicate whether this is run is part of a training pass.</param> public override void Run(ILayerBuffer buffer, IComputationHandler handler, bool trainingPass) { if (trainingPass) { INDArray inputs = buffer.Inputs["default"].Get <INDArray>("activations"); INDArray dropoutMask = handler.NDArray((long[])inputs.Shape.Clone()); handler.FillWithProbabilityMask(dropoutMask, 1.0 - Parameters.Get <double>("dropout_probability")); INDArray activations = handler.Multiply(inputs, dropoutMask); buffer.Outputs["default"]["activations"] = activations.Reshape((long[])inputs.Shape.Clone()); } else { buffer.Outputs["default"]["activations"] = buffer.Inputs["default"]["activations"]; } }
public override void Run(ILayerBuffer buffer, IComputationHandler handler, bool trainingPass) { buffer.Outputs["default"]["activations"] = buffer.Inputs[buffer.Parameters.Get <string>("external_input_alias")]["activations"]; }
public override void Run(ILayerBuffer buffer, IComputationHandler handler, bool trainingPass) { }
public abstract void Run(ILayerBuffer buffer, IComputationHandler handler, bool trainingPass);
public override void Run(ILayerBuffer buffer, IComputationHandler handler, bool trainingPass) { buffer.Outputs[buffer.Parameters.Get <string>("external_output_alias")]["activations"] = buffer.Inputs["default"]["activations"]; buffer.Outputs["default"]["activations"] = buffer.Inputs["default"]["activations"]; // TODO create output layer without passthrough, maybe optional flag }