public string ProcessMessage(Command msgObj, SyftController ctrl) { switch (msgObj.functionCall) { case "forward": return(ProcessForwardMessage(msgObj, ctrl)); case "params": return(ProcessParamsMessage(msgObj, ctrl)); case "param_count": return(getParameterCount() + ""); case "activation": { return(activation + ""); } case "model_type": { return(model_type); } case "zero_grad": { ProcessZeroGradMessage(); return(""); } } return(ProcessMessageAsLayerOrLoss(msgObj, ctrl)); }
protected override string ProcessForwardMessage(Command msgObj, SyftController ctrl) { var input = ctrl.floatTensorFactory.Get(int.Parse(msgObj.tensorIndexParams[0])); var result = this.Forward(input); return(result.Id + ""); }
public Linear(SyftController _controller, int input, int output) { init("linear"); this.controller = _controller; _input = input; _output = output; int[] weightShape = { input, output }; var weights = controller.RandomWeights(input * output); _weights = controller.floatTensorFactory.Create(_shape: weightShape, _data: weights, _autograd: true, _keepgrads: true); // TODO: add bias when broadcast is available int[] biasShape = { output }; _bias = controller.floatTensorFactory.Create(_shape: biasShape, _autograd: true); parameters.Add(_weights.Id); //parameters.Add(_bias.Id); #pragma warning disable 420 id = System.Threading.Interlocked.Increment(ref nCreated); controller.addModel(this); }
protected override string ProcessMessageAsLayerOrLoss(Command msgObj, SyftController ctrl) { switch (msgObj.functionCall) { case "prepare_to_fit": { FloatTensor input = ctrl.floatTensorFactory.Get(int.Parse(msgObj.tensorIndexParams[0])); FloatTensor target = ctrl.floatTensorFactory.Get(int.Parse(msgObj.tensorIndexParams[1])); Loss.Loss criterion = ctrl.getLoss(int.Parse(msgObj.tensorIndexParams[2])); Optimizer optim = ctrl.getOptimizer(int.Parse(msgObj.tensorIndexParams[3])); int batch_size = int.Parse(msgObj.tensorIndexParams[4]); return(PrepareToFit(input, target, criterion, optim, batch_size).ToString()); } case "fit": { int start_batch_id = int.Parse(msgObj.tensorIndexParams[0]); int end_batch_id = int.Parse(msgObj.tensorIndexParams[1]); int iters = int.Parse(msgObj.tensorIndexParams[2]); return(Fit(start_batch_id, end_batch_id, iters)); } case "evaluate": { FloatTensor test_input = ctrl.floatTensorFactory.Get(int.Parse(msgObj.tensorIndexParams[0])); FloatTensor test_target = ctrl.floatTensorFactory.Get(int.Parse(msgObj.tensorIndexParams[1])); Loss.Loss criterion = ctrl.getLoss(int.Parse(msgObj.tensorIndexParams[2])); int batch_size = int.Parse(msgObj.tensorIndexParams[3]); return(Evaluate(test_input, test_target, criterion, batch_size)); } } return(ProcessMessageAsLayerObject(msgObj, ctrl)); }
public string ProcessMessage(Command msgObj, SyftController ctrl) { switch (msgObj.functionCall) { case "forward": { var input = ctrl.getTensor(int.Parse(msgObj.tensorIndexParams[0])); var result = this.Forward(input); return(result.Id + ""); } case "params": { string out_str = ""; for (int i = 0; i < models.Count; i++) { List <int> model_params = controller.getModel(models[i]).getParameters(); for (int j = 0; j < model_params.Count; j++) { out_str += model_params[j].ToString() + ","; } } for (int i = 0; i < parameters.Count; i++) { out_str += parameters[i].ToString() + ","; } return(out_str); } } return(ProcessMessageLocal(msgObj, ctrl)); }
protected override string ProcessMessageLocal(Command msgObj, SyftController ctrl) { switch (msgObj.functionCall) { case "add": { // TODO: Handle adding layers better var input = (Layer)ctrl.getModel(int.Parse(msgObj.tensorIndexParams[0])); Debug.LogFormat("<color=magenta>Layer Added to Sequential:</color> {0}", input.Id); this.AddLayer(input); return(input.Id + ""); } case "models": { string out_str = ""; for (int i = 0; i < this.layers.Count; i++) { out_str += this.layers[i].ToString() + ","; } return(out_str); } default: { return("Model.processMessage not Implemented:" + msgObj.functionCall); } } }
public override GraphProto GetProto(int inputTensorId, SyftController ctrl) { FloatTensor input_tensor = ctrl.floatTensorFactory.Get(inputTensorId); if (activation != null) { this.Forward(input_tensor); } NodeProto node = new NodeProto { Input = { inputTensorId.ToString() }, Output = { activation.ToString() }, OpType = "Softmax", Attribute = { new AttributeProto { Name = "axis", Type = AttributeProto.Types.AttributeType.Int, I = this.dim } } }; ValueInfoProto input_info = input_tensor.GetValueInfoProto(); GraphProto g = new GraphProto { Name = Guid.NewGuid().ToString("N"), Node = { node }, Initializer = { }, Input = { input_info }, Output = { ctrl.floatTensorFactory.Get(activation).GetValueInfoProto() }, }; return(g); }
private void Start() { _netMqPublisher = new NetMqPublisher(HandleMessage); _netMqPublisher.Start(); controller = new SyftController(shader); }
// Overloading the constructor to load from an ONNX proto public Linear(SyftController _controller, GraphProto graph) { init(this.name); this.controller = _controller; _weights = ONNXTools.BuildFloatTensor(graph.Initializer[0], this.controller, autograd: true, keepgrads: true); AttributeProto transB = ONNXTools.FindAttribute(graph.Node[0], "transB"); if (transB != null && transB.I == 1) { _weights = _weights.Transpose(); } parameters.Add(_weights.Id); _input = _weights.Shape[0]; _output = _weights.Shape[1]; _bias = ONNXTools.BuildFloatTensor(graph.Initializer[1], this.controller, autograd: true, keepgrads: true); _biased = true; parameters.Add(_bias.Id); #pragma warning disable 420 id = System.Threading.Interlocked.Increment(ref nCreated); controller.addModel(this); }
public Linear(SyftController _controller, int input, int output, FloatTensor weights, FloatTensor bias, string initializer = "Xavier") { initialize(_controller, input, output, initializer); _weights = weights; _bias = bias; }
private void Start() { controller = new SyftController(shader); _netMqPublisher = new NetMqPublisher(controller, this); _netMqPublisher.Start(); }
public Adam(SyftController ctrl_, List <int> parameters_, float lr_, float beta1_, float beta2_, float epsilon_, float decay_) { this.ctrl = ctrl_; this.parameters = parameters_; this.lr = lr_; this.beta1 = beta1_; this.beta2 = beta2_; this.epsilon = epsilon_; this.decay = decay_; this.t = 0; this.velocities = new List <int>(); this.squares = new List <int>(); #pragma warning disable 420 id = System.Threading.Interlocked.Increment(ref nCreated); ctrl.addOptimizer(this); foreach (int param_index in parameters) { var param = ctrl.floatTensorFactory.Get(param_index); var velInit = param.createZerosTensorLike(); velocities.Add(velInit.Id); var sInit = param.createZerosTensorLike(); squares.Add(sInit.Id); } }
public Linear(SyftController _controller, int input, int output, string initializer = "Xavier", bool biased = false, float[] weights = null, float[] bias = null) { init(name); this.controller = _controller; _input = input; _output = output; _biased = biased || bias != null; int[] weightShape = { input, output }; if (weights == null) { weights = initializer == "Xavier" ? controller.RandomWeights(input * output, input) : controller.RandomWeights(input * output); } ; _weights = controller.floatTensorFactory.Create(_shape: weightShape, _data: weights, _autograd: true, _keepgrads: true); parameters.Add(_weights.Id); if (_biased) { int[] biasShape = { 1, output }; _bias = controller.floatTensorFactory.Create(_data: bias, _shape: biasShape, _autograd: true); parameters.Add(_bias.Id); } ; #pragma warning disable 420 id = System.Threading.Interlocked.Increment(ref nCreated); controller.addModel(this); }
public Sigmoid(SyftController controller) { init("sigmoid"); #pragma warning disable 420 id = System.Threading.Interlocked.Increment(ref nCreated); controller.addModel(this); }
public CrossEntropyLoss(SyftController controller) { init("crossentropyloss"); #pragma warning disable 420 id = System.Threading.Interlocked.Increment(ref nCreated); controller.addModel(this); }
public ReLU(SyftController controller, GraphProto graph) { init(this.name); #pragma warning disable 420 id = System.Threading.Interlocked.Increment(ref nCreated); controller.addModel(this); }
protected virtual string ProcessParamsMessage(Command msgObj, SyftController ctrl) { string out_str = ""; for (int i = 0; i < parameters.Count; i++) { out_str += parameters[i].ToString() + ","; } return(out_str); }
public Softmax(SyftController controller, GraphProto graph) { init(this.name); this.dim = (int)graph.Node[0].Attribute[0].I; #pragma warning disable 420 id = System.Threading.Interlocked.Increment(ref nCreated); controller.addModel(this); }
public Sequential(SyftController _controller) { init("sequential"); this.controller = _controller; #pragma warning disable 420 id = System.Threading.Interlocked.Increment(ref nCreated); controller.addModel(this); }
public Softmax(SyftController controller, int dim) { init(this.name); this.dim = dim; #pragma warning disable 420 id = System.Threading.Interlocked.Increment(ref nCreated); controller.addModel(this); }
public Dropout(SyftController _controller, GraphProto graph) { init(this.name); this.controller = _controller; this.rate = graph.Node[0].Attribute[0].F; #pragma warning disable 420 id = System.Threading.Interlocked.Increment(ref nCreated); controller.addModel(this); }
public Dropout(SyftController _controller, float _rate) { init(this.name); this.controller = _controller; this.rate = _rate; #pragma warning disable 420 id = System.Threading.Interlocked.Increment(ref nCreated); controller.addModel(this); }
public Policy(SyftController _controller, Layer _model) { init("policy"); controller = _controller; model = _model; #pragma warning disable 420 id = System.Threading.Interlocked.Increment(ref nCreated); controller.addModel(this); }
//private List<float> mean public Agent(SyftController _controller, Layer.Layer _model, Optimizer _optimizer) { controller = _controller; model = _model; optimizer = _optimizer; #pragma warning disable 420 id = System.Threading.Interlocked.Increment(ref nCreated); controller.AddAgent(this); history = new List <FloatTensor[]>(); }
public Linear(SyftController controller, int input, int output) { _input = input; _output = output; int[] weightShape = { input, output }; var weights = controller.RandomWeights(input * output); _weights = new FloatTensor(controller, _shape: weightShape, _data: weights, _autograd: true); int[] biasShape = { output }; _bias = new FloatTensor(controller, biasShape, _autograd: true); }
private void Start() { _netMqPublisher = new NetMqPublisher(HandleMessage); _netMqPublisher.Start(); controller = new SyftController(shader); var experiment = Ipfs.Get <IpfsExperiment>("QmVPQnsuks1cCbTMFGqpmHa4M45uUuKRomiqNvJEQAtcRS"); var job = Ipfs.Get <IpfsJob>(experiment.jobs[0]); var g = new OpenMined.Network.Controllers.Grid(controller); //g.TrainModel(this, experiment.input, experiment.target, job, 1); }
protected override string ProcessForwardMessage(Command msgObj, SyftController ctrl) { var input = ctrl.floatTensorFactory.Get(int.Parse(msgObj.tensorIndexParams[0])); if (input.Autograd) { var result = this.Forward(input); return(result.Id + ""); } else { throw new Exception("Input to Model object must have autograd == true but autograd == false!!!"); } }
public string ProcessMessage(Command msgObj, SyftController ctrl) { switch (msgObj.functionCall) { case "zero_grad": ZeroGrad(); return(""); case "step": Step(int.Parse(msgObj.tensorIndexParams[0]), int.Parse(msgObj.tensorIndexParams[1])); return(""); } throw new InvalidOperationException("Could not find function for command:" + msgObj.functionCall); }
public override string ProcessMessageLocal(Command msgObj, SyftController ctrl) { switch (msgObj.functionCall) { case "add": { var input = ctrl.getModel(int.Parse(msgObj.tensorIndexParams[0])); Debug.LogFormat("<color=magenta>Model Added to Sequential:</color> {0}", input.Id); this.AddModel(input); return(input.Id + ""); } } return("Model.processMessage not Implemented:" + msgObj.functionCall); }
protected override string ProcessMessageAsLayerObject(Command msgObj, SyftController ctrl) { switch (msgObj.functionCall) { case "sample": { var input = ctrl.floatTensorFactory.Get(int.Parse(msgObj.tensorIndexParams[0])); var result = this.Sample(input); return(result.Id + ""); } default: { return("Policy.processMessage not Implemented:" + msgObj.functionCall); } } }