override public void Init(NeuralNetwork p_network = null) { NeuralNetwork network = null; Optimizer optimizer = null; if (p_network == null) { network = new NeuralNetwork(); network.AddLayer("input", new InputLayer(GetParam(STATE_DIM)), BaseLayer.TYPE.INPUT); network.AddLayer("hidden0", new CoreLayer(SolverConfig.GetInstance().hidden_layer, ACTIVATION.RELU, BaseLayer.TYPE.HIDDEN), BaseLayer.TYPE.HIDDEN); network.AddLayer("output", new CoreLayer(GetParam(ACTION_DIM), ACTIVATION.LINEAR, BaseLayer.TYPE.OUTPUT), BaseLayer.TYPE.OUTPUT); // feed-forward connections network.AddConnection("input", "hidden0", Connection.INIT.GLOROT_UNIFORM); network.AddConnection("hidden0", "output", Connection.INIT.GLOROT_UNIFORM); } else { network = p_network; } //optimizer = new ADAM(network); optimizer = new RMSProp(network); //optimizer = new BackProp(network, 1e-5f, 0.99f, true); _critic = new DeepDoubleQLearning(optimizer, network, 0.99f, SolverConfig.GetInstance().memory_size, SolverConfig.GetInstance().batch_size, SolverConfig.GetInstance().qtupdate_size); _critic.SetAlpha(SolverConfig.GetInstance().learning_rate); }
private ITrainingAlgorithm CreateAlgorithm() { ITrainingAlgorithm algorithm = null; switch (this.TaskParameters.Algorithm.Name) { case "Adadelta": default: algorithm = new Adadelta(); break; case "Adagrad": algorithm = new Adagrad(); break; case "Adam": algorithm = new Adam(); break; case "RMSProp": algorithm = new RMSProp(); break; case "SGD": algorithm = new SGD(); break; } JsonSerializer jsonSerializer = new JsonSerializer(); using (JTokenReader jtokenReader = new JTokenReader(this.TaskParameters.Algorithm.Parameters)) { jsonSerializer.Populate(jtokenReader, algorithm); } return(algorithm); }
private void DrawRMSProp(Graphics graphics, DrawF draw) { ArrayList history = new ArrayList(); PointF2D point = new PointF2D(-4.0f, 2.0f); Function2 fun = new Function2(); RMSProp optimizer = new RMSProp(0.5f, 0.9f); for (int index = 0; index < 30; index++) { PointF2D xyPoint = draw.getBlockPoint(point.X, point.Y); history.Add(xyPoint); PointF2D diff = fun.DiffFormula(point.X, point.Y); optimizer.Update(point, diff); } PointF2D prePoint = ((PointF2D)history[0]); for (int index = 0; index < 30; index++) { draw.drawPoint(graphics, Brushes.Blue, ((PointF2D)history[index])); draw.drawLine(graphics, prePoint, ((PointF2D)history[index])); prePoint = ((PointF2D)history[index]); } }
public string processMessage(string json_message) { //Debug.LogFormat("<color=green>SyftController.processMessage {0}</color>", json_message); Command msgObj = JsonUtility.FromJson <Command> (json_message); try { switch (msgObj.objectType) { case "Optimizer": { if (msgObj.functionCall == "create") { string optimizer_type = msgObj.tensorIndexParams[0]; // Extract parameters List <int> p = new List <int>(); for (int i = 1; i < msgObj.tensorIndexParams.Length; i++) { p.Add(int.Parse(msgObj.tensorIndexParams[i])); } List <float> hp = new List <float>(); for (int i = 0; i < msgObj.hyperParams.Length; i++) { hp.Add(float.Parse(msgObj.hyperParams[i])); } Optimizer optim = null; if (optimizer_type == "sgd") { optim = new SGD(this, p, hp[0], hp[1], hp[2]); } else if (optimizer_type == "rmsprop") { optim = new RMSProp(this, p, hp[0], hp[1], hp[2], hp[3]); } else if (optimizer_type == "adam") { optim = new Adam(this, p, hp[0], hp[1], hp[2], hp[3], hp[4]); } return(optim.Id.ToString()); } else { Optimizer optim = this.getOptimizer(msgObj.objectIndex); return(optim.ProcessMessage(msgObj, this)); } } case "FloatTensor": { if (msgObj.objectIndex == 0 && msgObj.functionCall == "create") { FloatTensor tensor = floatTensorFactory.Create(_shape: msgObj.shape, _data: msgObj.data, _shader: this.Shader); return(tensor.Id.ToString()); } else { FloatTensor tensor = floatTensorFactory.Get(msgObj.objectIndex); // Process message's function return(tensor.ProcessMessage(msgObj, this)); } } case "IntTensor": { if (msgObj.objectIndex == 0 && msgObj.functionCall == "create") { int[] data = new int[msgObj.data.Length]; for (int i = 0; i < msgObj.data.Length; i++) { data[i] = (int)msgObj.data[i]; } IntTensor tensor = intTensorFactory.Create(_shape: msgObj.shape, _data: data, _shader: this.Shader); return(tensor.Id.ToString()); } else { IntTensor tensor = intTensorFactory.Get(msgObj.objectIndex); // Process message's function return(tensor.ProcessMessage(msgObj, this)); } } case "agent": { if (msgObj.functionCall == "create") { Layer model = (Layer)getModel(int.Parse(msgObj.tensorIndexParams[0])); Optimizer optimizer = optimizers[int.Parse(msgObj.tensorIndexParams[1])]; return(new Syft.NN.RL.Agent(this, model, optimizer).Id.ToString()); } //Debug.Log("Getting Model:" + msgObj.objectIndex); Syft.NN.RL.Agent agent = this.getAgent(msgObj.objectIndex); return(agent.ProcessMessageLocal(msgObj, this)); } case "model": { if (msgObj.functionCall == "create") { string model_type = msgObj.tensorIndexParams[0]; Debug.LogFormat("<color=magenta>createModel:</color> {0}", model_type); if (model_type == "linear") { return(new Linear(this, int.Parse(msgObj.tensorIndexParams[1]), int.Parse(msgObj.tensorIndexParams[2]), msgObj.tensorIndexParams[3]).Id.ToString()); } else if (model_type == "relu") { return(new ReLU(this).Id.ToString()); } else if (model_type == "log") { return(new Log(this).Id.ToString()); } else if (model_type == "dropout") { return(new Dropout(this, float.Parse(msgObj.tensorIndexParams[1])).Id.ToString()); } else if (model_type == "sigmoid") { return(new Sigmoid(this).Id.ToString()); } else if (model_type == "sequential") { return(new Sequential(this).Id.ToString()); } else if (model_type == "softmax") { return(new Softmax(this, int.Parse(msgObj.tensorIndexParams[1])).Id.ToString()); } else if (model_type == "logsoftmax") { return(new LogSoftmax(this, int.Parse(msgObj.tensorIndexParams[1])).Id.ToString()); } else if (model_type == "tanh") { return(new Tanh(this).Id.ToString()); } else if (model_type == "crossentropyloss") { return(new CrossEntropyLoss(this, int.Parse(msgObj.tensorIndexParams[1])).Id.ToString()); } else if (model_type == "nllloss") { return(new NLLLoss(this).Id.ToString()); } else if (model_type == "mseloss") { return(new MSELoss(this).Id.ToString()); } else if (model_type == "embedding") { return(new Embedding(this, int.Parse(msgObj.tensorIndexParams[1]), int.Parse(msgObj.tensorIndexParams[2])).Id.ToString()); } else { Debug.LogFormat("<color=red>Model Type Not Found:</color> {0}", model_type); } } else { //Debug.Log("Getting Model:" + msgObj.objectIndex); Model model = this.getModel(msgObj.objectIndex); return(model.ProcessMessage(msgObj, this)); } return("Unity Error: SyftController.processMessage: Command not found:" + msgObj.objectType + ":" + msgObj.functionCall); } case "controller": { if (msgObj.functionCall == "num_tensors") { return(floatTensorFactory.Count() + ""); } else if (msgObj.functionCall == "num_models") { return(models.Count + ""); } else if (msgObj.functionCall == "new_tensors_allowed") { Debug.LogFormat("New Tensors Allowed:{0}", msgObj.tensorIndexParams[0]); if (msgObj.tensorIndexParams[0] == "True") { allow_new_tensors = true; } else if (msgObj.tensorIndexParams[0] == "False") { allow_new_tensors = false; } else { throw new Exception("Invalid parameter for new_tensors_allowed. Did you mean true or false?"); } return(allow_new_tensors + ""); } else if (msgObj.functionCall == "load_floattensor") { FloatTensor tensor = floatTensorFactory.Create(filepath: msgObj.tensorIndexParams[0], _shader: this.Shader); return(tensor.Id.ToString()); } else if (msgObj.functionCall == "set_seed") { Random.InitState(int.Parse(msgObj.tensorIndexParams[0])); return("Random seed set!"); } else if (msgObj.functionCall == "concatenate") { List <int> tensor_ids = new List <int>(); for (int i = 1; i < msgObj.tensorIndexParams.Length; i++) { tensor_ids.Add(int.Parse(msgObj.tensorIndexParams[i])); } FloatTensor result = Functional.Concatenate(floatTensorFactory, tensor_ids, int.Parse(msgObj.tensorIndexParams[0])); return(result.Id.ToString()); } else if (msgObj.functionCall == "ones") { int[] dims = new int[msgObj.tensorIndexParams.Length]; for (int i = 0; i < msgObj.tensorIndexParams.Length; i++) { dims[i] = int.Parse(msgObj.tensorIndexParams[i]); } FloatTensor result = Functional.Ones(floatTensorFactory, dims); return(result.Id.ToString()); } else if (msgObj.functionCall == "randn") { int[] dims = new int[msgObj.tensorIndexParams.Length]; for (int i = 0; i < msgObj.tensorIndexParams.Length; i++) { dims[i] = int.Parse(msgObj.tensorIndexParams[i]); } FloatTensor result = Functional.Randn(floatTensorFactory, dims); return(result.Id.ToString()); } else if (msgObj.functionCall == "random") { int[] dims = new int[msgObj.tensorIndexParams.Length]; for (int i = 0; i < msgObj.tensorIndexParams.Length; i++) { dims[i] = int.Parse(msgObj.tensorIndexParams[i]); } FloatTensor result = Functional.Random(floatTensorFactory, dims); return(result.Id.ToString()); } else if (msgObj.functionCall == "zeros") { int[] dims = new int[msgObj.tensorIndexParams.Length]; for (int i = 0; i < msgObj.tensorIndexParams.Length; i++) { dims[i] = int.Parse(msgObj.tensorIndexParams[i]); } FloatTensor result = Functional.Zeros(floatTensorFactory, dims); return(result.Id.ToString()); } return("Unity Error: SyftController.processMessage: Command not found:" + msgObj.objectType + ":" + msgObj.functionCall); } default: break; } } catch (Exception e) { Debug.LogFormat("<color=red>{0}</color>", e.ToString()); return("Unity Error: " + e.ToString()); } // If not executing createTensor or tensor function, return default error. return("Unity Error: SyftController.processMessage: Command not found:" + msgObj.objectType + ":" + msgObj.functionCall); }
public void Run() { Stopwatch watch = Stopwatch.StartNew(); NeuralNetwork network = new NeuralNetwork(); network.AddLayer("input", new InputLayer(2), BaseLayer.TYPE.INPUT); network.AddLayer("hidden", new CoreLayer(8, ACTIVATION.SIGMOID, BaseLayer.TYPE.HIDDEN), BaseLayer.TYPE.HIDDEN); network.AddLayer("output", new CoreLayer(1, ACTIVATION.SIGMOID, BaseLayer.TYPE.OUTPUT), BaseLayer.TYPE.OUTPUT); network.AddConnection("input", "hidden", Connection.INIT.GLOROT_UNIFORM); network.AddConnection("hidden", "output", Connection.INIT.GLOROT_UNIFORM); /* * Optimizer optimizer = new BackProp(network, 1e-5f, 0.99f, true) * { * Alpha = 0.1f * }; */ Optimizer optimizer = new RMSProp(network) { Alpha = 0.1f }; optimizer.InitBatchMode(4); Vector[] input = new Vector[4]; Vector[] target = new Vector[4]; //Vector output = null; input[0] = Vector.Build(2, new float[] { 0f, 0f }); input[1] = Vector.Build(2, new float[] { 0f, 1f }); input[2] = Vector.Build(2, new float[] { 1f, 0f }); input[3] = Vector.Build(2, new float[] { 1f, 1f }); target[0] = Vector.Build(1, new float[] { 0f }); target[1] = Vector.Build(1, new float[] { 1f }); target[2] = Vector.Build(1, new float[] { 1f }); target[3] = Vector.Build(1, new float[] { 0f }); for (int e = 0; e < 200; e++) { //Console.Write("Start "); //BasePool.Instance.Check(); float err = 0; for (int i = 0; i < 4; i++) { err += optimizer.Train(input[i], target[i]); } Console.WriteLine(err); //Console.Write("End "); //BasePool.Instance.Check(); } Console.WriteLine(); for (int i = 0; i < 4; i++) { Console.WriteLine(network.Activate(input[i])[0]); Vector.Release(input[i]); Vector.Release(target[i]); } optimizer.Dispose(); Console.Write("Finish "); BasePool.Instance.Check(); watch.Stop(); Console.WriteLine(watch.ElapsedMilliseconds); }
public string processMessage(string json_message, MonoBehaviour owner) { //Debug.LogFormat("<color=green>SyftController.processMessage {0}</color>", json_message); Command msgObj = JsonUtility.FromJson <Command> (json_message); try { switch (msgObj.objectType) { case "Optimizer": { if (msgObj.functionCall == "create") { string optimizer_type = msgObj.tensorIndexParams[0]; // Extract parameters List <int> p = new List <int>(); for (int i = 1; i < msgObj.tensorIndexParams.Length; i++) { p.Add(int.Parse(msgObj.tensorIndexParams[i])); } List <float> hp = new List <float>(); for (int i = 0; i < msgObj.hyperParams.Length; i++) { hp.Add(float.Parse(msgObj.hyperParams[i])); } Optimizer optim = null; if (optimizer_type == "sgd") { optim = new SGD(this, p, hp[0], hp[1], hp[2]); } else if (optimizer_type == "rmsprop") { optim = new RMSProp(this, p, hp[0], hp[1], hp[2], hp[3]); } else if (optimizer_type == "adam") { optim = new Adam(this, p, hp[0], hp[1], hp[2], hp[3], hp[4]); } return(optim.Id.ToString()); } else { Optimizer optim = this.getOptimizer(msgObj.objectIndex); return(optim.ProcessMessage(msgObj, this)); } } case "FloatTensor": { if (msgObj.objectIndex == 0 && msgObj.functionCall == "create") { FloatTensor tensor = floatTensorFactory.Create(_shape: msgObj.shape, _data: msgObj.data, _shader: this.Shader); return(tensor.Id.ToString()); } else { FloatTensor tensor = floatTensorFactory.Get(msgObj.objectIndex); // Process message's function return(tensor.ProcessMessage(msgObj, this)); } } case "IntTensor": { if (msgObj.objectIndex == 0 && msgObj.functionCall == "create") { int[] data = new int[msgObj.data.Length]; for (int i = 0; i < msgObj.data.Length; i++) { data[i] = (int)msgObj.data[i]; } IntTensor tensor = intTensorFactory.Create(_shape: msgObj.shape, _data: data, _shader: this.Shader); return(tensor.Id.ToString()); } else { IntTensor tensor = intTensorFactory.Get(msgObj.objectIndex); // Process message's function return(tensor.ProcessMessage(msgObj, this)); } } case "agent": { if (msgObj.functionCall == "create") { Layer model = (Layer)getModel(int.Parse(msgObj.tensorIndexParams[0])); Optimizer optimizer = optimizers[int.Parse(msgObj.tensorIndexParams[1])]; return(new Syft.NN.RL.Agent(this, model, optimizer).Id.ToString()); } //Debug.Log("Getting Model:" + msgObj.objectIndex); Syft.NN.RL.Agent agent = this.getAgent(msgObj.objectIndex); return(agent.ProcessMessageLocal(msgObj, this)); } case "model": { if (msgObj.functionCall == "create") { string model_type = msgObj.tensorIndexParams[0]; Debug.LogFormat("<color=magenta>createModel:</color> {0}", model_type); if (model_type == "linear") { return(this.BuildLinear(msgObj.tensorIndexParams).Id.ToString()); } else if (model_type == "relu") { return(this.BuildReLU().Id.ToString()); } else if (model_type == "log") { return(this.BuildLog().Id.ToString()); } else if (model_type == "dropout") { return(this.BuildDropout(msgObj.tensorIndexParams).Id.ToString()); } else if (model_type == "sigmoid") { return(this.BuildSigmoid().Id.ToString()); } else if (model_type == "sequential") { return(this.BuildSequential().Id.ToString()); } else if (model_type == "softmax") { return(this.BuildSoftmax(msgObj.tensorIndexParams).Id.ToString()); } else if (model_type == "logsoftmax") { return(this.BuildLogSoftmax(msgObj.tensorIndexParams).Id.ToString()); } else if (model_type == "tanh") { return(new Tanh(this).Id.ToString()); } else if (model_type == "crossentropyloss") { return(new CrossEntropyLoss(this, int.Parse(msgObj.tensorIndexParams[1])).Id.ToString()); } else if (model_type == "categorical_crossentropy") { return(new CategoricalCrossEntropyLoss(this).Id.ToString()); } else if (model_type == "nllloss") { return(new NLLLoss(this).Id.ToString()); } else if (model_type == "mseloss") { return(new MSELoss(this).Id.ToString()); } else if (model_type == "embedding") { return(new Embedding(this, int.Parse(msgObj.tensorIndexParams[1]), int.Parse(msgObj.tensorIndexParams[2])).Id.ToString()); } else { Debug.LogFormat("<color=red>Model Type Not Found:</color> {0}", model_type); } } else { //Debug.Log("Getting Model:" + msgObj.objectIndex); Model model = this.getModel(msgObj.objectIndex); return(model.ProcessMessage(msgObj, this)); } return("Unity Error: SyftController.processMessage: Command not found:" + msgObj.objectType + ":" + msgObj.functionCall); } case "controller": { if (msgObj.functionCall == "num_tensors") { return(floatTensorFactory.Count() + ""); } else if (msgObj.functionCall == "num_models") { return(models.Count + ""); } else if (msgObj.functionCall == "new_tensors_allowed") { Debug.LogFormat("New Tensors Allowed:{0}", msgObj.tensorIndexParams[0]); if (msgObj.tensorIndexParams[0] == "True") { allow_new_tensors = true; } else if (msgObj.tensorIndexParams[0] == "False") { allow_new_tensors = false; } else { throw new Exception("Invalid parameter for new_tensors_allowed. Did you mean true or false?"); } return(allow_new_tensors + ""); } else if (msgObj.functionCall == "load_floattensor") { FloatTensor tensor = floatTensorFactory.Create(filepath: msgObj.tensorIndexParams[0], _shader: this.Shader); return(tensor.Id.ToString()); } else if (msgObj.functionCall == "set_seed") { Random.InitState(int.Parse(msgObj.tensorIndexParams[0])); return("Random seed set!"); } else if (msgObj.functionCall == "concatenate") { List <int> tensor_ids = new List <int>(); for (int i = 1; i < msgObj.tensorIndexParams.Length; i++) { tensor_ids.Add(int.Parse(msgObj.tensorIndexParams[i])); } FloatTensor result = Functional.Concatenate(floatTensorFactory, tensor_ids, int.Parse(msgObj.tensorIndexParams[0])); return(result.Id.ToString()); } else if (msgObj.functionCall == "ones") { int[] dims = new int[msgObj.tensorIndexParams.Length]; for (int i = 0; i < msgObj.tensorIndexParams.Length; i++) { dims[i] = int.Parse(msgObj.tensorIndexParams[i]); } FloatTensor result = Functional.Ones(floatTensorFactory, dims); return(result.Id.ToString()); } else if (msgObj.functionCall == "randn") { int[] dims = new int[msgObj.tensorIndexParams.Length]; for (int i = 0; i < msgObj.tensorIndexParams.Length; i++) { dims[i] = int.Parse(msgObj.tensorIndexParams[i]); } FloatTensor result = Functional.Randn(floatTensorFactory, dims); return(result.Id.ToString()); } else if (msgObj.functionCall == "random") { int[] dims = new int[msgObj.tensorIndexParams.Length]; for (int i = 0; i < msgObj.tensorIndexParams.Length; i++) { dims[i] = int.Parse(msgObj.tensorIndexParams[i]); } FloatTensor result = Functional.Random(floatTensorFactory, dims); return(result.Id.ToString()); } else if (msgObj.functionCall == "zeros") { int[] dims = new int[msgObj.tensorIndexParams.Length]; for (int i = 0; i < msgObj.tensorIndexParams.Length; i++) { dims[i] = int.Parse(msgObj.tensorIndexParams[i]); } FloatTensor result = Functional.Zeros(floatTensorFactory, dims); return(result.Id.ToString()); } else if (msgObj.functionCall == "model_from_json") { Debug.Log("Loading Model from JSON:"); var json_str = msgObj.tensorIndexParams[0]; var config = JObject.Parse(json_str); Sequential model; if ((string)config["class_name"] == "Sequential") { model = this.BuildSequential(); } else { return("Unity Error: SyftController.processMessage: while Loading model, Class :" + config["class_name"] + " is not implemented"); } for (int i = 0; i < config["config"].ToList().Count; i++) { var layer_desc = config["config"][i]; var layer_config_desc = layer_desc["config"]; if ((string)layer_desc["class_name"] == "Linear") { int previous_output_dim; if (i == 0) { previous_output_dim = (int)layer_config_desc["batch_input_shape"][layer_config_desc["batch_input_shape"].ToList().Count - 1]; } else { previous_output_dim = (int)layer_config_desc["units"]; } string[] parameters = new string[] { "linear", previous_output_dim.ToString(), layer_config_desc["units"].ToString(), "Xavier" }; Layer layer = this.BuildLinear(parameters); model.AddLayer(layer); string activation_name = layer_config_desc["activation"].ToString(); if (activation_name != "linear") { Layer activation; if (activation_name == "softmax") { parameters = new string[] { activation_name, "1" }; activation = this.BuildSoftmax(parameters); } else if (activation_name == "relu") { activation = this.BuildReLU(); } else { return("Unity Error: SyftController.processMessage: while Loading activations, Activation :" + activation_name + " is not implemented"); } model.AddLayer(activation); } } else { return("Unity Error: SyftController.processMessage: while Loading layers, Layer :" + layer_desc["class_name"] + " is not implemented"); } } return(model.Id.ToString()); } return("Unity Error: SyftController.processMessage: Command not found:" + msgObj.objectType + ":" + msgObj.functionCall); } case "Grid": if (msgObj.functionCall == "learn") { var inputId = int.Parse(msgObj.tensorIndexParams[0]); var targetId = int.Parse(msgObj.tensorIndexParams[1]); var g = new Grid(this); g.Run(inputId, targetId, msgObj.configurations, owner); return(""); } break; default: break; } } catch (Exception e) { Debug.LogFormat("<color=red>{0}</color>", e.ToString()); return("Unity Error: " + e.ToString()); } // If not executing createTensor or tensor function, return default error. return("Unity Error: SyftController.processMessage: Command not found:" + msgObj.objectType + ":" + msgObj.functionCall); }
public void InitNetwork(ECostType costType, CostSettings costSettings, EOptimizerType optimizerType, OptimizerSettings optimizerSettings) { Utility.Dims InShape; Utility.Dims OutShape; Utility.Dims WShape; for (int i = 1; i < Layers.Count; i++) { Data.Data["a" + i.ToString()] = new Matrix(Layers[i].NCount, 1); InShape = new Utility.Dims(Layers[i].NCount, 1); Data.Data["b" + i.ToString()] = Matrix.RandomMatrix(Layers[i].NCount, 1, 1, EDistrubution.Gaussian); OutShape = new Utility.Dims(Layers[i].NCount, 1); Data.Data["W" + i.ToString()] = Matrix.RandomMatrix(Layers[i - 1].NCount, Layers[i].NCount, 1, EDistrubution.Gaussian); WShape = new Utility.Dims(Layers[i - 1].NCount, Layers[i].NCount); Layers[i].SetSettings(new LayerSettings(InShape, OutShape, WShape)); } Data.Data["a0"] = new Matrix(Layers[0].NCount, 1); InShape = new Utility.Dims(Layers[0].NCount, 1); Data.Data["b0"] = new Matrix(Layers[0].NCount, 1); OutShape = new Utility.Dims(Layers[0].NCount, 1); Data.Data["W0"] = new Matrix(Layers[0].NCount * Layers[1].NCount, Layers[1].NCount); WShape = new Utility.Dims(Layers[0].NCount * Layers[1].NCount, Layers[1].NCount); Layers[0].SetSettings(new LayerSettings(InShape, OutShape, WShape)); switch (costType) { case ECostType.Invalid: throw new ArgumentException("Invalid Cost Function Selected!"); case ECostType.CrossEntropyCost: CostFunction = new CrossEntropyCost((CrossEntropyCostSettings)costSettings); break; case ECostType.ExponentionalCost: CostFunction = new ExponentionalCost((ExponentionalCostSettings)costSettings); break; case ECostType.GeneralizedKullbackLeiblerDivergence: CostFunction = new GeneralizedKullbackLeiblerDivergence((GeneralizedKullbackLeiblerDivergenceSettings)costSettings); break; case ECostType.HellingerDistance: CostFunction = new HellingerDistance((HellingerDistanceSettings)costSettings); break; case ECostType.ItakuraSaitoDistance: CostFunction = new ItakuraSaitoDistance((ItakuraSaitoDistanceSettings)costSettings); break; case ECostType.KullbackLeiblerDivergence: CostFunction = new KullbackLeiblerDivergence((KullbackLeiblerDivergenceSettings)costSettings); break; case ECostType.QuadraticCost: CostFunction = new QuadraticCost((QuadraticCostSettings)costSettings); break; default: throw new ArgumentException("Invalid Cost Function Selected!"); } switch (optimizerType) { case EOptimizerType.Invalid: throw new ArgumentException("Invalid Optimizer Function Selected!"); case EOptimizerType.AdaDelta: OptimizerFunction = new AdaDelta((AdaDeltaSettings)optimizerSettings); break; case EOptimizerType.AdaGrad: OptimizerFunction = new AdaGrad((AdaGradSettings)optimizerSettings); break; case EOptimizerType.Adam: OptimizerFunction = new Adam((AdamSettings)optimizerSettings); break; case EOptimizerType.Adamax: OptimizerFunction = new Adamax((AdamaxSettings)optimizerSettings); break; case EOptimizerType.GradientDescent: OptimizerFunction = new GradientDescent((GradientDescentSettings)optimizerSettings); break; case EOptimizerType.Momentum: OptimizerFunction = new Momentum((MomentumSettings)optimizerSettings); break; case EOptimizerType.Nadam: OptimizerFunction = new Nadam((NadamSettings)optimizerSettings); break; case EOptimizerType.NesterovMomentum: OptimizerFunction = new NesterovMomentum((NesterovMomentumSettings)optimizerSettings); break; case EOptimizerType.RMSProp: OptimizerFunction = new RMSProp((RMSPropSettings)optimizerSettings); break; default: throw new ArgumentException("Invalid Optimizer Function Selected!"); } }
public void ProcessMessage(string json_message, MonoBehaviour owner, Action <string> response) { Command msgObj = JsonUtility.FromJson <Command> (json_message); try { switch (msgObj.objectType) { case "Optimizer": { if (msgObj.functionCall == "create") { string optimizer_type = msgObj.tensorIndexParams[0]; // Extract parameters List <int> p = new List <int>(); for (int i = 1; i < msgObj.tensorIndexParams.Length; i++) { p.Add(int.Parse(msgObj.tensorIndexParams[i])); } List <float> hp = new List <float>(); for (int i = 0; i < msgObj.hyperParams.Length; i++) { hp.Add(float.Parse(msgObj.hyperParams[i])); } Optimizer optim = null; if (optimizer_type == "sgd") { optim = new SGD(this, p, hp[0], hp[1], hp[2]); } else if (optimizer_type == "rmsprop") { optim = new RMSProp(this, p, hp[0], hp[1], hp[2], hp[3]); } else if (optimizer_type == "adam") { optim = new Adam(this, p, hp[0], hp[1], hp[2], hp[3], hp[4]); } response(optim.Id.ToString()); return; } else { Optimizer optim = this.GetOptimizer(msgObj.objectIndex); response(optim.ProcessMessage(msgObj, this)); return; } } case "FloatTensor": { if (msgObj.objectIndex == 0 && msgObj.functionCall == "create") { FloatTensor tensor = floatTensorFactory.Create(_shape: msgObj.shape, _data: msgObj.data, _shader: this.Shader); response(tensor.Id.ToString()); return; } else { FloatTensor tensor = floatTensorFactory.Get(msgObj.objectIndex); // Process message's function response(tensor.ProcessMessage(msgObj, this)); return; } } case "IntTensor": { if (msgObj.objectIndex == 0 && msgObj.functionCall == "create") { int[] data = new int[msgObj.data.Length]; for (int i = 0; i < msgObj.data.Length; i++) { data[i] = (int)msgObj.data[i]; } IntTensor tensor = intTensorFactory.Create(_shape: msgObj.shape, _data: data); response(tensor.Id.ToString()); return; } else { IntTensor tensor = intTensorFactory.Get(msgObj.objectIndex); // Process message's function response(tensor.ProcessMessage(msgObj, this)); return; } } case "agent": { if (msgObj.functionCall == "create") { Layer model = (Layer)GetModel(int.Parse(msgObj.tensorIndexParams[0])); Optimizer optimizer = optimizers[int.Parse(msgObj.tensorIndexParams[1])]; response(new Syft.NN.RL.Agent(this, model, optimizer).Id.ToString()); return; } //Debug.Log("Getting Model:" + msgObj.objectIndex); Syft.NN.RL.Agent agent = this.GetAgent(msgObj.objectIndex); response(agent.ProcessMessageLocal(msgObj, this)); return; } case "model": { if (msgObj.functionCall == "create") { string model_type = msgObj.tensorIndexParams[0]; Debug.LogFormat("<color=magenta>createModel:</color> {0}", model_type); if (model_type == "linear") { response(this.BuildLinear(msgObj.tensorIndexParams).Id.ToString()); return; } else if (model_type == "relu") { response(this.BuildReLU().Id.ToString()); return; } else if (model_type == "log") { response(this.BuildLog().Id.ToString()); return; } else if (model_type == "dropout") { response(this.BuildDropout(msgObj.tensorIndexParams).Id.ToString()); return; } else if (model_type == "sigmoid") { response(this.BuildSigmoid().Id.ToString()); return; } else if (model_type == "sequential") { response(this.BuildSequential().Id.ToString()); return; } else if (model_type == "softmax") { response(this.BuildSoftmax(msgObj.tensorIndexParams).Id.ToString()); return; } else if (model_type == "logsoftmax") { response(this.BuildLogSoftmax(msgObj.tensorIndexParams).Id.ToString()); return; } else if (model_type == "tanh") { response(new Tanh(this).Id.ToString()); return; } else if (model_type == "crossentropyloss") { response(new CrossEntropyLoss(this, int.Parse(msgObj.tensorIndexParams[1])).Id.ToString()); return; } else if (model_type == "categorical_crossentropy") { response(new CategoricalCrossEntropyLoss(this).Id.ToString()); return; } else if (model_type == "nllloss") { response(new NLLLoss(this).Id.ToString()); return; } else if (model_type == "mseloss") { response(new MSELoss(this).Id.ToString()); return; } else if (model_type == "embedding") { response(new Embedding(this, int.Parse(msgObj.tensorIndexParams[1]), int.Parse(msgObj.tensorIndexParams[2])).Id.ToString()); return; } else { Debug.LogFormat("<color=red>Model Type Not Found:</color> {0}", model_type); } } else { //Debug.Log("Getting Model:" + msgObj.objectIndex); Model model = this.GetModel(msgObj.objectIndex); response(model.ProcessMessage(msgObj, this)); return; } response("Unity Error: SyftController.processMessage: Command not found:" + msgObj.objectType + ":" + msgObj.functionCall); return; } case "controller": { if (msgObj.functionCall == "num_tensors") { response(floatTensorFactory.Count() + ""); return; } else if (msgObj.functionCall == "num_models") { response(models.Count + ""); return; } else if (msgObj.functionCall == "new_tensors_allowed") { Debug.LogFormat("New Tensors Allowed:{0}", msgObj.tensorIndexParams[0]); if (msgObj.tensorIndexParams[0] == "True") { allow_new_tensors = true; } else if (msgObj.tensorIndexParams[0] == "False") { allow_new_tensors = false; } else { throw new Exception("Invalid parameter for new_tensors_allowed. Did you mean true or false?"); } response(allow_new_tensors + ""); return; } else if (msgObj.functionCall == "load_floattensor") { FloatTensor tensor = floatTensorFactory.Create(filepath: msgObj.tensorIndexParams[0], _shader: this.Shader); response(tensor.Id.ToString()); return; } else if (msgObj.functionCall == "set_seed") { Random.InitState(int.Parse(msgObj.tensorIndexParams[0])); response("Random seed set!"); return; } else if (msgObj.functionCall == "concatenate") { List <int> tensor_ids = new List <int>(); for (int i = 1; i < msgObj.tensorIndexParams.Length; i++) { tensor_ids.Add(int.Parse(msgObj.tensorIndexParams[i])); } FloatTensor result = Functional.Concatenate(floatTensorFactory, tensor_ids, int.Parse(msgObj.tensorIndexParams[0])); response(result.Id.ToString()); return; } else if (msgObj.functionCall == "ones") { int[] dims = new int[msgObj.tensorIndexParams.Length]; for (int i = 0; i < msgObj.tensorIndexParams.Length; i++) { dims[i] = int.Parse(msgObj.tensorIndexParams[i]); } FloatTensor result = Functional.Ones(floatTensorFactory, dims); response(result.Id.ToString()); return; } else if (msgObj.functionCall == "randn") { int[] dims = new int[msgObj.tensorIndexParams.Length]; for (int i = 0; i < msgObj.tensorIndexParams.Length; i++) { dims[i] = int.Parse(msgObj.tensorIndexParams[i]); } FloatTensor result = Functional.Randn(floatTensorFactory, dims); response(result.Id.ToString()); return; } else if (msgObj.functionCall == "random") { int[] dims = new int[msgObj.tensorIndexParams.Length]; for (int i = 0; i < msgObj.tensorIndexParams.Length; i++) { dims[i] = int.Parse(msgObj.tensorIndexParams[i]); } FloatTensor result = Functional.Random(floatTensorFactory, dims); response(result.Id.ToString()); return; } else if (msgObj.functionCall == "zeros") { int[] dims = new int[msgObj.tensorIndexParams.Length]; for (int i = 0; i < msgObj.tensorIndexParams.Length; i++) { dims[i] = int.Parse(msgObj.tensorIndexParams[i]); } FloatTensor result = Functional.Zeros(floatTensorFactory, dims); response(result.Id.ToString()); return; } else if (msgObj.functionCall == "model_from_json") { Debug.Log("Loading Model from JSON:"); var json_str = msgObj.tensorIndexParams[0]; var config = JObject.Parse(json_str); Sequential model; if ((string)config["class_name"] == "Sequential") { model = this.BuildSequential(); } else { response("Unity Error: SyftController.processMessage: while Loading model, Class :" + config["class_name"] + " is not implemented"); return; } for (int i = 0; i < config["config"].ToList().Count; i++) { var layer_desc = config["config"][i]; var layer_config_desc = layer_desc["config"]; if ((string)layer_desc["class_name"] == "Linear") { int previous_output_dim; if (i == 0) { previous_output_dim = (int)layer_config_desc["batch_input_shape"][layer_config_desc["batch_input_shape"].ToList().Count - 1]; } else { previous_output_dim = (int)layer_config_desc["units"]; } string[] parameters = { "linear", previous_output_dim.ToString(), layer_config_desc["units"].ToString(), "Xavier" }; Layer layer = this.BuildLinear(parameters); model.AddLayer(layer); string activation_name = layer_config_desc["activation"].ToString(); if (activation_name != "linear") { Layer activation; if (activation_name == "softmax") { parameters = new string[] { activation_name, "1" }; activation = this.BuildSoftmax(parameters); } else if (activation_name == "relu") { activation = this.BuildReLU(); } else { response("Unity Error: SyftController.processMessage: while Loading activations, Activation :" + activation_name + " is not implemented"); return; } model.AddLayer(activation); } } else { response("Unity Error: SyftController.processMessage: while Loading layers, Layer :" + layer_desc["class_name"] + " is not implemented"); return; } } response(model.Id.ToString()); return; } else if (msgObj.functionCall == "from_proto") { Debug.Log("Loading Model from ONNX:"); var filename = msgObj.tensorIndexParams[0]; var input = File.OpenRead(filename); ModelProto modelProto = ModelProto.Parser.ParseFrom(input); Sequential model = this.BuildSequential(); foreach (NodeProto node in modelProto.Graph.Node) { Layer layer; GraphProto g = ONNXTools.GetSubGraphFromNodeAndMainGraph(node, modelProto.Graph); if (node.OpType == "Gemm") { layer = new Linear(this, g); } else if (node.OpType == "Dropout") { layer = new Dropout(this, g); } else if (node.OpType == "Relu") { layer = new ReLU(this, g); } else if (node.OpType == "Softmax") { layer = new Softmax(this, g); } else { response("Unity Error: SyftController.processMessage: Layer not yet implemented for deserialization:"); return; } model.AddLayer(layer); } response(model.Id.ToString()); return; } else if (msgObj.functionCall == "to_proto") { ModelProto model = this.ToProto(msgObj.tensorIndexParams); string filename = msgObj.tensorIndexParams[2]; string type = msgObj.tensorIndexParams[3]; if (type == "json") { response(model.ToString()); } else { using (var output = File.Create(filename)) { model.WriteTo(output); } response(new FileInfo(filename).FullName); } return; } response("Unity Error: SyftController.processMessage: Command not found:" + msgObj.objectType + ":" + msgObj.functionCall); return; } case "Grid": if (msgObj.functionCall == "learn") { var inputId = int.Parse(msgObj.tensorIndexParams[0]); var targetId = int.Parse(msgObj.tensorIndexParams[1]); response(this.grid.Run(inputId, targetId, msgObj.configurations, owner)); return; } if (msgObj.functionCall == "getResults") { this.grid.GetResults(msgObj.experimentId, response); return; } // like getResults but doesn't pause to wait for results // this function will return right away telling you if // it knows whether or not it is done if (msgObj.functionCall == "checkStatus") { this.grid.CheckStatus(msgObj.experimentId, response); return; } break; default: break; } } catch (Exception e) { Debug.LogFormat("<color=red>{0}</color>", e.ToString()); response("Unity Error: " + e.ToString()); return; } // If not executing createTensor or tensor function, return default error. response("Unity Error: SyftController.processMessage: Command not found:" + msgObj.objectType + ":" + msgObj.functionCall); return; }