private void TestDenseNetwork(int inputs, int samples, int batchSize, int epochs) { var net = new NeuralNetwork("deep_dense_test", 7); var model = new Sequential(); model.AddLayer(new Dense(inputs, 5, Activation.Linear)); model.AddLayer(new Dense(model.LastLayer, 4, Activation.Linear)); model.AddLayer(new Dense(model.LastLayer, inputs, Activation.Linear)); net.Model = model; List <Data> tData = new List <Data>(); for (int i = 0; i < samples; ++i) { var input = new Tensor(model.Layer(0).InputShape); input.FillWithRand(10 * i, -2, 2); tData.Add(new Data(input, input.Mul(1.7f))); } net.Optimize(new SGD(0.02f), Loss.MeanSquareError); net.Fit(tData, batchSize, epochs, null, 2, Track.TrainError); for (int i = 0; i < tData.Count; ++i) { Assert.IsTrue(tData[i].Output.Equals(net.Predict(tData[i].Input)[0], 0.01f)); } }
private Sequential CreateSequential(JToken model) { var seq = new Sequential(controller); var layers = model.SelectToken("config").Children(); foreach (var layer in layers) { var layerType = layer.SelectToken("class_name"); switch (layerType.Value <String>()) { case "Linear": // weight float tensor var weightData = layer.SelectToken("config.weights.data").ToObject <float[]>(); var weightShape = layer.SelectToken("config.weights.shape").ToObject <int[]>(); var weightTensor = controller.floatTensorFactory.Create(_data: weightData, _shape: weightShape, _autograd: true); // bias float tensor var biasData = layer.SelectToken("config.bias.data").ToObject <float[]>(); var biasShape = layer.SelectToken("config.bias.shape").ToObject <int[]>(); var biasTensor = controller.floatTensorFactory.Create(_data: biasData, _shape: biasShape, _autograd: true); var input = layer.SelectToken("config.input").ToObject <int>(); var output = layer.SelectToken("config.output").ToObject <int>(); var linear = new Linear(controller, input: input, output: output, weights: weightTensor, bias: biasTensor); seq.AddLayer(linear); break; case "ReLU": seq.AddLayer(new ReLU(controller)); break; case "Log": seq.AddLayer(new OpenMined.Syft.Layer.Log(controller)); break; case "Dropout": var rate = layer.SelectToken("config.rate").ToObject <float>(); var dropout = new Dropout(controller, rate); seq.AddLayer(dropout); break; case "Softmax": var dim = layer.SelectToken("config.dim").ToObject <int>(); seq.AddLayer(new Softmax(controller, dim)); break; } } return(seq); }
public DQN(Shape inputShape, int numberOfActions, int[] hiddenLayersNeurons, float learningRate, float discountFactor, int batchSize, BaseExperienceReplay memory) : this(inputShape, numberOfActions, learningRate, discountFactor, batchSize, memory) { Net = new NeuralNetwork("dqn"); var Model = new Sequential(); Model.AddLayer(new Flatten(inputShape)); for (int i = 0; i < hiddenLayersNeurons.Length; ++i) { Model.AddLayer(new Dense(Model.LastLayer, hiddenLayersNeurons[i], Activation.ReLU)); } Model.AddLayer(new Dense(Model.LastLayer, numberOfActions, Activation.Linear)); Net.Model = Model; Net.Optimize(new Adam(learningRate), new CustomHuberLoss(ImportanceSamplingWeights)); }
private void TestConvolutionLayer(Shape inputShape, int kernelSize, int kernelsNum, int stride, int samples, int batchSize, int epochs, TrainDataFunc convFunc) { var net = new NeuralNetwork("convolution_test", 7); var model = new Sequential(); model.AddLayer(new Convolution(inputShape, kernelSize, kernelsNum, stride, Activation.Linear) { KernelInitializer = new Initializers.Constant(1) }); net.Model = model; var expectedKernels = new Tensor(new Shape(kernelSize, kernelSize, inputShape.Depth, kernelsNum)); expectedKernels.FillWithRand(17); var tData = GenerateTrainingData(samples, model.LastLayer.InputShape, expectedKernels, convFunc); net.Optimize(new SGD(0.02f), Loss.MeanSquareError); net.Fit(tData, batchSize, epochs, null, 0, Track.Nothing); var paramsAndGrads = model.LastLayer.GetParametersAndGradients(); for (int i = 0; i < expectedKernels.Length; ++i) { Assert.AreEqual(paramsAndGrads[0].Parameters.GetFlat(i), expectedKernels.GetFlat(i), 1e-2); } }
static void Main(string[] args) { Tensor.SetOpMode(Tensor.OpMode.GPU); var net = new NeuralNetwork("test"); var inputShape = new Shape(64, 64, 4); var model = new Sequential(); model.AddLayer(new Convolution(inputShape, 8, 32, 2, Activation.ELU)); model.AddLayer(new Convolution(model.LastLayer, 4, 64, 2, Activation.ELU)); model.AddLayer(new Convolution(model.LastLayer, 4, 128, 2, Activation.ELU)); model.AddLayer(new Flatten(model.LastLayer)); model.AddLayer(new Dense(model.LastLayer, 512, Activation.ELU)); model.AddLayer(new Dense(model.LastLayer, 3, Activation.Softmax)); net.Model = model; net.Optimize(new Adam(), Loss.Huber1); var input = new Tensor(new Shape(64, 64, 4, 32)); input.FillWithRand(); var output = new Tensor(new Shape(1, 3, 1, 32)); for (int n = 0; n < output.BatchSize; ++n) { output[0, Tools.Rng.Next(output.Height), 0, n] = 1.0f; } var timer = new Stopwatch(); timer.Start(); net.FitBatched(input, output, 10, 1, Track.Nothing); timer.Stop(); Trace.WriteLine($"{Math.Round(timer.ElapsedMilliseconds / 1000.0, 2)} seconds"); }
private NeuralNetwork CreateFitTestNet() { var net = new NeuralNetwork("fit_test", 7); var model = new Sequential(); model.AddLayer(new Dense(3, 2, Activation.Linear) { KernelInitializer = new Initializers.Constant(1), UseBias = false }); net.Model = model; net.Optimize(new SGD(0.07f), Loss.MeanSquareError); return(net); }
public void SoftCopyParameters() { var net = new NeuralNetwork("test"); var model = new Sequential(); model.AddLayer(new Dense(2, 3, Activation.Linear)); model.AddLayer(new Dense(3, 3, Activation.Linear)); net.Model = model; net.ForceInitLayers(); var net2 = net.Clone(); net.SoftCopyParametersTo(net2, 0.1f); var netParams = net2.GetParametersAndGradients(); var net2Params = net2.GetParametersAndGradients(); for (int i = 0; i < netParams.Count; ++i) { Assert.IsTrue(netParams[i].Parameters.Equals(net2Params[i].Parameters)); } }
public Sequential CreateSequential(JToken model) { var seq = new Sequential(controller); var layers = model.SelectToken("config").Children(); foreach (var layer in layers) { var layerType = layer.SelectToken("class_name"); switch (layerType.Value <String>()) { case "Linear": // weight float tensor var weightData = layer.SelectToken("config.weights.data").ToObject <float[]>(); var input = layer.SelectToken("config.input").ToObject <int>(); var output = layer.SelectToken("config.output").ToObject <int>(); float[] biasData = null; if (layer.SelectToken("config.bias") == null) { biasData = layer.SelectToken("config.bias.data").ToObject <float[]>(); } Linear linear = new Linear(controller, input: input, output: output, weights: weightData, bias: biasData); seq.AddLayer(linear); break; case "ReLU": seq.AddLayer(new ReLU(controller)); break; case "Log": seq.AddLayer(new OpenMined.Syft.Layer.Log(controller)); break; case "Dropout": var rate = layer.SelectToken("config.rate").ToObject <float>(); var dropout = new Dropout(controller, rate); seq.AddLayer(dropout); break; case "Softmax": var dim = layer.SelectToken("config.dim").ToObject <int>(); seq.AddLayer(new Softmax(controller, dim)); break; case "Sigmoid": seq.AddLayer(new Sigmoid(controller)); break; } } return(seq); }
private Sequential CreateSequential(List <String> model) { // TODO just assumes it is all in a seq model the seq model should probably // be in the JSON???? var seq = new Sequential(controller); foreach (var l in model) { var config = JObject.Parse(l); Layer layer = null; switch ((string)config["name"]) { case "linear": layer = new Linear(controller, (int)config["input"], (int)config["output"]); break; case "softmax": layer = new Softmax(controller, (int)config["dim"]); break; case "relu": layer = new ReLU(controller); break; case "log": layer = new Log(controller); break; case "dropout": layer = new Dropout(controller, (float)config["rate"]); break; } seq.AddLayer(layer); } return(seq); }
private void TestDenseLayer(int inputs, int outputs, int samples, int batchSize, int epochs) { var net = new NeuralNetwork("dense_test", 7); var model = new Sequential(); model.AddLayer(new Dense(inputs, outputs, Activation.Linear) { KernelInitializer = new Initializers.Constant(1), UseBias = false }); net.Model = model; var expectedWeights = new Tensor(new[] { 1.1f, 0.1f, -1.3f, 0.2f, -0.9f, 0.7f }, new Shape(3, 2)); var tData = GenerateTrainingData(samples, model.LastLayer.InputShape, expectedWeights, MatMult); net.Optimize(new SGD(0.07f), Loss.MeanSquareError); net.Fit(tData, batchSize, epochs, null, 2, Track.TrainError); var paramsAndGrads = model.LastLayer.GetParametersAndGradients(); for (int i = 0; i < expectedWeights.Length; ++i) { Assert.AreEqual(paramsAndGrads[0].Parameters.GetFlat(i), expectedWeights.GetFlat(i), 1e-2); } }
public DQNConv(int[] inputSize, int numberOfActions, float learningRate, float discountFactor, int batchSize, BaseExperienceReplay memory) : base(null, numberOfActions, learningRate, discountFactor, batchSize, memory) { Tensor.SetOpMode(Tensor.OpMode.GPU); InputSize = inputSize; Shape inputShape = new Shape(inputSize[0], inputSize[1], TemporalDataSize); Net = new NeuralNetwork("DQNConv"); var Model = new Sequential(); Model.AddLayer(new Convolution(inputShape, 8, 32, 2, Activation.ELU)); Model.AddLayer(new Convolution(Model.LastLayer, 4, 64, 2, Activation.ELU)); Model.AddLayer(new Convolution(Model.LastLayer, 4, 128, 2, Activation.ELU)); Model.AddLayer(new Flatten(Model.LastLayer)); Model.AddLayer(new Dense(Model.LastLayer, 512, Activation.ELU)); Model.AddLayer(new Dense(Model.LastLayer, numberOfActions, Activation.Softmax)); Net.Model = Model; Net.Optimize(new Adam(learningRate), new CustomHuberLoss(ImportanceSamplingWeights)); }
public void ProcessMessage(string json_message, MonoBehaviour owner, Action <string> response) { Command msgObj = JsonUtility.FromJson <Command> (json_message); try { switch (msgObj.objectType) { case "Optimizer": { if (msgObj.functionCall == "create") { string optimizer_type = msgObj.tensorIndexParams[0]; // Extract parameters List <int> p = new List <int>(); for (int i = 1; i < msgObj.tensorIndexParams.Length; i++) { p.Add(int.Parse(msgObj.tensorIndexParams[i])); } List <float> hp = new List <float>(); for (int i = 0; i < msgObj.hyperParams.Length; i++) { hp.Add(float.Parse(msgObj.hyperParams[i])); } Optimizer optim = null; if (optimizer_type == "sgd") { optim = new SGD(this, p, hp[0], hp[1], hp[2]); } else if (optimizer_type == "rmsprop") { optim = new RMSProp(this, p, hp[0], hp[1], hp[2], hp[3]); } else if (optimizer_type == "adam") { optim = new Adam(this, p, hp[0], hp[1], hp[2], hp[3], hp[4]); } response(optim.Id.ToString()); return; } else { Optimizer optim = this.GetOptimizer(msgObj.objectIndex); response(optim.ProcessMessage(msgObj, this)); return; } } case "FloatTensor": { if (msgObj.objectIndex == 0 && msgObj.functionCall == "create") { FloatTensor tensor = floatTensorFactory.Create(_shape: msgObj.shape, _data: msgObj.data, _shader: this.Shader); response(tensor.Id.ToString()); return; } else { FloatTensor tensor = floatTensorFactory.Get(msgObj.objectIndex); // Process message's function response(tensor.ProcessMessage(msgObj, this)); return; } } case "IntTensor": { if (msgObj.objectIndex == 0 && msgObj.functionCall == "create") { int[] data = new int[msgObj.data.Length]; for (int i = 0; i < msgObj.data.Length; i++) { data[i] = (int)msgObj.data[i]; } IntTensor tensor = intTensorFactory.Create(_shape: msgObj.shape, _data: data); response(tensor.Id.ToString()); return; } else { IntTensor tensor = intTensorFactory.Get(msgObj.objectIndex); // Process message's function response(tensor.ProcessMessage(msgObj, this)); return; } } case "agent": { if (msgObj.functionCall == "create") { Layer model = (Layer)GetModel(int.Parse(msgObj.tensorIndexParams[0])); Optimizer optimizer = optimizers[int.Parse(msgObj.tensorIndexParams[1])]; response(new Syft.NN.RL.Agent(this, model, optimizer).Id.ToString()); return; } //Debug.Log("Getting Model:" + msgObj.objectIndex); Syft.NN.RL.Agent agent = this.GetAgent(msgObj.objectIndex); response(agent.ProcessMessageLocal(msgObj, this)); return; } case "model": { if (msgObj.functionCall == "create") { string model_type = msgObj.tensorIndexParams[0]; Debug.LogFormat("<color=magenta>createModel:</color> {0}", model_type); if (model_type == "linear") { response(this.BuildLinear(msgObj.tensorIndexParams).Id.ToString()); return; } else if (model_type == "relu") { response(this.BuildReLU().Id.ToString()); return; } else if (model_type == "log") { response(this.BuildLog().Id.ToString()); return; } else if (model_type == "dropout") { response(this.BuildDropout(msgObj.tensorIndexParams).Id.ToString()); return; } else if (model_type == "sigmoid") { response(this.BuildSigmoid().Id.ToString()); return; } else if (model_type == "sequential") { response(this.BuildSequential().Id.ToString()); return; } else if (model_type == "softmax") { response(this.BuildSoftmax(msgObj.tensorIndexParams).Id.ToString()); return; } else if (model_type == "logsoftmax") { response(this.BuildLogSoftmax(msgObj.tensorIndexParams).Id.ToString()); return; } else if (model_type == "tanh") { response(new Tanh(this).Id.ToString()); return; } else if (model_type == "crossentropyloss") { response(new CrossEntropyLoss(this, int.Parse(msgObj.tensorIndexParams[1])).Id.ToString()); return; } else if (model_type == "categorical_crossentropy") { response(new CategoricalCrossEntropyLoss(this).Id.ToString()); return; } else if (model_type == "nllloss") { response(new NLLLoss(this).Id.ToString()); return; } else if (model_type == "mseloss") { response(new MSELoss(this).Id.ToString()); return; } else if (model_type == "embedding") { response(new Embedding(this, int.Parse(msgObj.tensorIndexParams[1]), int.Parse(msgObj.tensorIndexParams[2])).Id.ToString()); return; } else { Debug.LogFormat("<color=red>Model Type Not Found:</color> {0}", model_type); } } else { //Debug.Log("Getting Model:" + msgObj.objectIndex); Model model = this.GetModel(msgObj.objectIndex); response(model.ProcessMessage(msgObj, this)); return; } response("Unity Error: SyftController.processMessage: Command not found:" + msgObj.objectType + ":" + msgObj.functionCall); return; } case "controller": { if (msgObj.functionCall == "num_tensors") { response(floatTensorFactory.Count() + ""); return; } else if (msgObj.functionCall == "num_models") { response(models.Count + ""); return; } else if (msgObj.functionCall == "new_tensors_allowed") { Debug.LogFormat("New Tensors Allowed:{0}", msgObj.tensorIndexParams[0]); if (msgObj.tensorIndexParams[0] == "True") { allow_new_tensors = true; } else if (msgObj.tensorIndexParams[0] == "False") { allow_new_tensors = false; } else { throw new Exception("Invalid parameter for new_tensors_allowed. Did you mean true or false?"); } response(allow_new_tensors + ""); return; } else if (msgObj.functionCall == "load_floattensor") { FloatTensor tensor = floatTensorFactory.Create(filepath: msgObj.tensorIndexParams[0], _shader: this.Shader); response(tensor.Id.ToString()); return; } else if (msgObj.functionCall == "set_seed") { Random.InitState(int.Parse(msgObj.tensorIndexParams[0])); response("Random seed set!"); return; } else if (msgObj.functionCall == "concatenate") { List <int> tensor_ids = new List <int>(); for (int i = 1; i < msgObj.tensorIndexParams.Length; i++) { tensor_ids.Add(int.Parse(msgObj.tensorIndexParams[i])); } FloatTensor result = Functional.Concatenate(floatTensorFactory, tensor_ids, int.Parse(msgObj.tensorIndexParams[0])); response(result.Id.ToString()); return; } else if (msgObj.functionCall == "ones") { int[] dims = new int[msgObj.tensorIndexParams.Length]; for (int i = 0; i < msgObj.tensorIndexParams.Length; i++) { dims[i] = int.Parse(msgObj.tensorIndexParams[i]); } FloatTensor result = Functional.Ones(floatTensorFactory, dims); response(result.Id.ToString()); return; } else if (msgObj.functionCall == "randn") { int[] dims = new int[msgObj.tensorIndexParams.Length]; for (int i = 0; i < msgObj.tensorIndexParams.Length; i++) { dims[i] = int.Parse(msgObj.tensorIndexParams[i]); } FloatTensor result = Functional.Randn(floatTensorFactory, dims); response(result.Id.ToString()); return; } else if (msgObj.functionCall == "random") { int[] dims = new int[msgObj.tensorIndexParams.Length]; for (int i = 0; i < msgObj.tensorIndexParams.Length; i++) { dims[i] = int.Parse(msgObj.tensorIndexParams[i]); } FloatTensor result = Functional.Random(floatTensorFactory, dims); response(result.Id.ToString()); return; } else if (msgObj.functionCall == "zeros") { int[] dims = new int[msgObj.tensorIndexParams.Length]; for (int i = 0; i < msgObj.tensorIndexParams.Length; i++) { dims[i] = int.Parse(msgObj.tensorIndexParams[i]); } FloatTensor result = Functional.Zeros(floatTensorFactory, dims); response(result.Id.ToString()); return; } else if (msgObj.functionCall == "model_from_json") { Debug.Log("Loading Model from JSON:"); var json_str = msgObj.tensorIndexParams[0]; var config = JObject.Parse(json_str); Sequential model; if ((string)config["class_name"] == "Sequential") { model = this.BuildSequential(); } else { response("Unity Error: SyftController.processMessage: while Loading model, Class :" + config["class_name"] + " is not implemented"); return; } for (int i = 0; i < config["config"].ToList().Count; i++) { var layer_desc = config["config"][i]; var layer_config_desc = layer_desc["config"]; if ((string)layer_desc["class_name"] == "Linear") { int previous_output_dim; if (i == 0) { previous_output_dim = (int)layer_config_desc["batch_input_shape"][layer_config_desc["batch_input_shape"].ToList().Count - 1]; } else { previous_output_dim = (int)layer_config_desc["units"]; } string[] parameters = { "linear", previous_output_dim.ToString(), layer_config_desc["units"].ToString(), "Xavier" }; Layer layer = this.BuildLinear(parameters); model.AddLayer(layer); string activation_name = layer_config_desc["activation"].ToString(); if (activation_name != "linear") { Layer activation; if (activation_name == "softmax") { parameters = new string[] { activation_name, "1" }; activation = this.BuildSoftmax(parameters); } else if (activation_name == "relu") { activation = this.BuildReLU(); } else { response("Unity Error: SyftController.processMessage: while Loading activations, Activation :" + activation_name + " is not implemented"); return; } model.AddLayer(activation); } } else { response("Unity Error: SyftController.processMessage: while Loading layers, Layer :" + layer_desc["class_name"] + " is not implemented"); return; } } response(model.Id.ToString()); return; } else if (msgObj.functionCall == "from_proto") { Debug.Log("Loading Model from ONNX:"); var filename = msgObj.tensorIndexParams[0]; var input = File.OpenRead(filename); ModelProto modelProto = ModelProto.Parser.ParseFrom(input); Sequential model = this.BuildSequential(); foreach (NodeProto node in modelProto.Graph.Node) { Layer layer; GraphProto g = ONNXTools.GetSubGraphFromNodeAndMainGraph(node, modelProto.Graph); if (node.OpType == "Gemm") { layer = new Linear(this, g); } else if (node.OpType == "Dropout") { layer = new Dropout(this, g); } else if (node.OpType == "Relu") { layer = new ReLU(this, g); } else if (node.OpType == "Softmax") { layer = new Softmax(this, g); } else { response("Unity Error: SyftController.processMessage: Layer not yet implemented for deserialization:"); return; } model.AddLayer(layer); } response(model.Id.ToString()); return; } else if (msgObj.functionCall == "to_proto") { ModelProto model = this.ToProto(msgObj.tensorIndexParams); string filename = msgObj.tensorIndexParams[2]; string type = msgObj.tensorIndexParams[3]; if (type == "json") { response(model.ToString()); } else { using (var output = File.Create(filename)) { model.WriteTo(output); } response(new FileInfo(filename).FullName); } return; } response("Unity Error: SyftController.processMessage: Command not found:" + msgObj.objectType + ":" + msgObj.functionCall); return; } case "Grid": if (msgObj.functionCall == "learn") { var inputId = int.Parse(msgObj.tensorIndexParams[0]); var targetId = int.Parse(msgObj.tensorIndexParams[1]); response(this.grid.Run(inputId, targetId, msgObj.configurations, owner)); return; } if (msgObj.functionCall == "getResults") { this.grid.GetResults(msgObj.experimentId, response); return; } // like getResults but doesn't pause to wait for results // this function will return right away telling you if // it knows whether or not it is done if (msgObj.functionCall == "checkStatus") { this.grid.CheckStatus(msgObj.experimentId, response); return; } break; default: break; } } catch (Exception e) { Debug.LogFormat("<color=red>{0}</color>", e.ToString()); response("Unity Error: " + e.ToString()); return; } // If not executing createTensor or tensor function, return default error. response("Unity Error: SyftController.processMessage: Command not found:" + msgObj.objectType + ":" + msgObj.functionCall); return; }