Exemplo n.º 1
0
        public string ProcessMessage(Command msgObj, SyftController ctrl)
        {
            switch (msgObj.functionCall)
            {
            case "forward":
                return(ProcessForwardMessage(msgObj, ctrl));

            case "params":
                return(ProcessParamsMessage(msgObj, ctrl));

            case "param_count":
                return(getParameterCount() + "");

            case "activation":
            {
                return(activation + "");
            }

            case "model_type":
            {
                return(model_type);
            }

            case "zero_grad":
            {
                ProcessZeroGradMessage();
                return("");
            }
            }

            return(ProcessMessageAsLayerOrLoss(msgObj, ctrl));
        }
Exemplo n.º 2
0
        protected override string ProcessForwardMessage(Command msgObj, SyftController ctrl)
        {
            var input  = ctrl.floatTensorFactory.Get(int.Parse(msgObj.tensorIndexParams[0]));
            var result = this.Forward(input);

            return(result.Id + "");
        }
Exemplo n.º 3
0
        public Linear(SyftController _controller, int input, int output)
        {
            init("linear");

            this.controller = _controller;

            _input  = input;
            _output = output;

            int[] weightShape = { input, output };
            var   weights     = controller.RandomWeights(input * output);

            _weights = controller.floatTensorFactory.Create(_shape: weightShape, _data: weights, _autograd: true, _keepgrads: true);

            // TODO: add bias when broadcast is available
            int[] biasShape = { output };
            _bias = controller.floatTensorFactory.Create(_shape: biasShape, _autograd: true);

            parameters.Add(_weights.Id);
            //parameters.Add(_bias.Id);

                        #pragma warning disable 420
            id = System.Threading.Interlocked.Increment(ref nCreated);
            controller.addModel(this);
        }
Exemplo n.º 4
0
        protected override string ProcessMessageAsLayerOrLoss(Command msgObj, SyftController ctrl)
        {
            switch (msgObj.functionCall)
            {
            case "prepare_to_fit":
            {
                FloatTensor input      = ctrl.floatTensorFactory.Get(int.Parse(msgObj.tensorIndexParams[0]));
                FloatTensor target     = ctrl.floatTensorFactory.Get(int.Parse(msgObj.tensorIndexParams[1]));
                Loss.Loss   criterion  = ctrl.getLoss(int.Parse(msgObj.tensorIndexParams[2]));
                Optimizer   optim      = ctrl.getOptimizer(int.Parse(msgObj.tensorIndexParams[3]));
                int         batch_size = int.Parse(msgObj.tensorIndexParams[4]);

                return(PrepareToFit(input, target, criterion, optim, batch_size).ToString());
            }

            case "fit":
            {
                int start_batch_id = int.Parse(msgObj.tensorIndexParams[0]);
                int end_batch_id   = int.Parse(msgObj.tensorIndexParams[1]);
                int iters          = int.Parse(msgObj.tensorIndexParams[2]);
                return(Fit(start_batch_id, end_batch_id, iters));
            }

            case "evaluate":
            {
                FloatTensor test_input  = ctrl.floatTensorFactory.Get(int.Parse(msgObj.tensorIndexParams[0]));
                FloatTensor test_target = ctrl.floatTensorFactory.Get(int.Parse(msgObj.tensorIndexParams[1]));
                Loss.Loss   criterion   = ctrl.getLoss(int.Parse(msgObj.tensorIndexParams[2]));
                int         batch_size  = int.Parse(msgObj.tensorIndexParams[3]);
                return(Evaluate(test_input, test_target, criterion, batch_size));
            }
            }

            return(ProcessMessageAsLayerObject(msgObj, ctrl));
        }
Exemplo n.º 5
0
        public string ProcessMessage(Command msgObj, SyftController ctrl)
        {
            switch (msgObj.functionCall)
            {
            case "forward":
            {
                var input  = ctrl.getTensor(int.Parse(msgObj.tensorIndexParams[0]));
                var result = this.Forward(input);
                return(result.Id + "");
            }

            case "params":
            {
                string out_str = "";

                for (int i = 0; i < models.Count; i++)
                {
                    List <int> model_params = controller.getModel(models[i]).getParameters();
                    for (int j = 0; j < model_params.Count; j++)
                    {
                        out_str += model_params[j].ToString() + ",";
                    }
                }

                for (int i = 0; i < parameters.Count; i++)
                {
                    out_str += parameters[i].ToString() + ",";
                }
                return(out_str);
            }
            }

            return(ProcessMessageLocal(msgObj, ctrl));
        }
Exemplo n.º 6
0
        protected override string ProcessMessageLocal(Command msgObj, SyftController ctrl)
        {
            switch (msgObj.functionCall)
            {
            case "add":
            {
                // TODO: Handle adding layers better
                var input = (Layer)ctrl.getModel(int.Parse(msgObj.tensorIndexParams[0]));
                Debug.LogFormat("<color=magenta>Layer Added to Sequential:</color> {0}", input.Id);
                this.AddLayer(input);
                return(input.Id + "");
            }

            case "models":
            {
                string out_str = "";

                for (int i = 0; i < this.layers.Count; i++)
                {
                    out_str += this.layers[i].ToString() + ",";
                }
                return(out_str);
            }

            default:
            {
                return("Model.processMessage not Implemented:" + msgObj.functionCall);
            }
            }
        }
Exemplo n.º 7
0
        public override GraphProto GetProto(int inputTensorId, SyftController ctrl)
        {
            FloatTensor input_tensor = ctrl.floatTensorFactory.Get(inputTensorId);

            if (activation != null)
            {
                this.Forward(input_tensor);
            }

            NodeProto node = new NodeProto
            {
                Input     = { inputTensorId.ToString() },
                Output    = { activation.ToString() },
                OpType    = "Softmax",
                Attribute = { new AttributeProto {
                                  Name = "axis",
                                  Type = AttributeProto.Types.AttributeType.Int,
                                  I    = this.dim
                              } }
            };

            ValueInfoProto input_info = input_tensor.GetValueInfoProto();

            GraphProto g = new GraphProto
            {
                Name        = Guid.NewGuid().ToString("N"),
                Node        = { node },
                Initializer = {  },
                Input       = { input_info },
                Output      = { ctrl.floatTensorFactory.Get(activation).GetValueInfoProto() },
            };

            return(g);
        }
Exemplo n.º 8
0
        private void Start()
        {
            _netMqPublisher = new NetMqPublisher(HandleMessage);
            _netMqPublisher.Start();

            controller = new SyftController(shader);
        }
Exemplo n.º 9
0
        // Overloading the constructor to load from an ONNX proto
        public Linear(SyftController _controller, GraphProto graph)
        {
            init(this.name);

            this.controller = _controller;

            _weights = ONNXTools.BuildFloatTensor(graph.Initializer[0], this.controller, autograd: true, keepgrads: true);
            AttributeProto transB = ONNXTools.FindAttribute(graph.Node[0], "transB");

            if (transB != null && transB.I == 1)
            {
                _weights = _weights.Transpose();
            }
            parameters.Add(_weights.Id);
            _input  = _weights.Shape[0];
            _output = _weights.Shape[1];


            _bias   = ONNXTools.BuildFloatTensor(graph.Initializer[1], this.controller, autograd: true, keepgrads: true);
            _biased = true;
            parameters.Add(_bias.Id);

                        #pragma warning disable 420
            id = System.Threading.Interlocked.Increment(ref nCreated);
            controller.addModel(this);
        }
Exemplo n.º 10
0
        public Linear(SyftController _controller, int input, int output, FloatTensor weights, FloatTensor bias, string initializer = "Xavier")
        {
            initialize(_controller, input, output, initializer);

            _weights = weights;
            _bias    = bias;
        }
Exemplo n.º 11
0
        private void Start()
        {
            controller = new SyftController(shader);

            _netMqPublisher = new NetMqPublisher(controller, this);
            _netMqPublisher.Start();
        }
Exemplo n.º 12
0
        public Adam(SyftController ctrl_, List <int> parameters_, float lr_, float beta1_, float beta2_, float epsilon_, float decay_)
        {
            this.ctrl       = ctrl_;
            this.parameters = parameters_;
            this.lr         = lr_;
            this.beta1      = beta1_;
            this.beta2      = beta2_;
            this.epsilon    = epsilon_;
            this.decay      = decay_;
            this.t          = 0;
            this.velocities = new List <int>();
            this.squares    = new List <int>();

            #pragma warning disable 420
            id = System.Threading.Interlocked.Increment(ref nCreated);
            ctrl.addOptimizer(this);

            foreach (int param_index in parameters)
            {
                var param = ctrl.floatTensorFactory.Get(param_index);

                var velInit = param.createZerosTensorLike();
                velocities.Add(velInit.Id);

                var sInit = param.createZerosTensorLike();
                squares.Add(sInit.Id);
            }
        }
Exemplo n.º 13
0
        public Linear(SyftController _controller, int input, int output, string initializer = "Xavier",
                      bool biased = false, float[] weights = null, float[] bias = null)
        {
            init(name);

            this.controller = _controller;

            _input  = input;
            _output = output;

            _biased = biased || bias != null;

            int[] weightShape = { input, output };
            if (weights == null)
            {
                weights = initializer == "Xavier" ? controller.RandomWeights(input * output, input) : controller.RandomWeights(input * output);
            }
            ;
            _weights = controller.floatTensorFactory.Create(_shape: weightShape, _data: weights, _autograd: true, _keepgrads: true);

            parameters.Add(_weights.Id);

            if (_biased)
            {
                int[] biasShape = { 1, output };
                _bias = controller.floatTensorFactory.Create(_data: bias, _shape: biasShape, _autograd: true);
                parameters.Add(_bias.Id);
            }
            ;

            #pragma warning disable 420
            id = System.Threading.Interlocked.Increment(ref nCreated);
            controller.addModel(this);
        }
Exemplo n.º 14
0
        public Sigmoid(SyftController controller)
        {
            init("sigmoid");

#pragma warning disable 420
            id = System.Threading.Interlocked.Increment(ref nCreated);
            controller.addModel(this);
        }
Exemplo n.º 15
0
        public CrossEntropyLoss(SyftController controller)
        {
            init("crossentropyloss");

                        #pragma warning disable 420
            id = System.Threading.Interlocked.Increment(ref nCreated);
            controller.addModel(this);
        }
Exemplo n.º 16
0
        public ReLU(SyftController controller, GraphProto graph)
        {
            init(this.name);

            #pragma warning disable 420
            id = System.Threading.Interlocked.Increment(ref nCreated);
            controller.addModel(this);
        }
Exemplo n.º 17
0
        protected virtual string ProcessParamsMessage(Command msgObj, SyftController ctrl)
        {
            string out_str = "";

            for (int i = 0; i < parameters.Count; i++)
            {
                out_str += parameters[i].ToString() + ",";
            }
            return(out_str);
        }
Exemplo n.º 18
0
        public Softmax(SyftController controller, GraphProto graph)
        {
            init(this.name);

            this.dim = (int)graph.Node[0].Attribute[0].I;

            #pragma warning disable 420
            id = System.Threading.Interlocked.Increment(ref nCreated);
            controller.addModel(this);
        }
Exemplo n.º 19
0
        public Sequential(SyftController _controller)
        {
            init("sequential");

            this.controller = _controller;

            #pragma warning disable 420
            id = System.Threading.Interlocked.Increment(ref nCreated);
            controller.addModel(this);
        }
Exemplo n.º 20
0
        public Softmax(SyftController controller, int dim)
        {
            init(this.name);

            this.dim = dim;

            #pragma warning disable 420
            id = System.Threading.Interlocked.Increment(ref nCreated);
            controller.addModel(this);
        }
Exemplo n.º 21
0
        public Dropout(SyftController _controller, GraphProto graph)
        {
            init(this.name);

            this.controller = _controller;
            this.rate       = graph.Node[0].Attribute[0].F;

      #pragma warning disable 420
            id = System.Threading.Interlocked.Increment(ref nCreated);
            controller.addModel(this);
        }
Exemplo n.º 22
0
        public Dropout(SyftController _controller, float _rate)
        {
            init(this.name);

            this.controller = _controller;
            this.rate       = _rate;

                        #pragma warning disable 420
            id = System.Threading.Interlocked.Increment(ref nCreated);
            controller.addModel(this);
        }
Exemplo n.º 23
0
        public Policy(SyftController _controller, Layer _model)
        {
            init("policy");
            controller = _controller;

            model = _model;

            #pragma warning disable 420
            id = System.Threading.Interlocked.Increment(ref nCreated);
            controller.addModel(this);
        }
Exemplo n.º 24
0
        //private List<float> mean

        public Agent(SyftController _controller, Layer.Layer _model, Optimizer _optimizer)
        {
            controller = _controller;
            model      = _model;
            optimizer  = _optimizer;

            #pragma warning disable 420
            id = System.Threading.Interlocked.Increment(ref nCreated);
            controller.AddAgent(this);

            history = new List <FloatTensor[]>();
        }
Exemplo n.º 25
0
        public Linear(SyftController controller, int input, int output)
        {
            _input  = input;
            _output = output;

            int[] weightShape = { input, output };
            var   weights     = controller.RandomWeights(input * output);

            _weights = new FloatTensor(controller, _shape: weightShape, _data: weights, _autograd: true);

            int[] biasShape = { output };
            _bias = new FloatTensor(controller, biasShape, _autograd: true);
        }
Exemplo n.º 26
0
        private void Start()
        {
            _netMqPublisher = new NetMqPublisher(HandleMessage);
            _netMqPublisher.Start();

            controller = new SyftController(shader);

            var experiment = Ipfs.Get <IpfsExperiment>("QmVPQnsuks1cCbTMFGqpmHa4M45uUuKRomiqNvJEQAtcRS");
            var job        = Ipfs.Get <IpfsJob>(experiment.jobs[0]);

            var g = new OpenMined.Network.Controllers.Grid(controller);
            //g.TrainModel(this, experiment.input, experiment.target, job, 1);
        }
Exemplo n.º 27
0
        protected override string ProcessForwardMessage(Command msgObj, SyftController ctrl)
        {
            var input = ctrl.floatTensorFactory.Get(int.Parse(msgObj.tensorIndexParams[0]));

            if (input.Autograd)
            {
                var result = this.Forward(input);
                return(result.Id + "");
            }
            else
            {
                throw new Exception("Input to Model object must have autograd == true but autograd == false!!!");
            }
        }
Exemplo n.º 28
0
        public string ProcessMessage(Command msgObj, SyftController ctrl)
        {
            switch (msgObj.functionCall)
            {
            case "zero_grad":
                ZeroGrad();
                return("");

            case "step":
                Step(int.Parse(msgObj.tensorIndexParams[0]), int.Parse(msgObj.tensorIndexParams[1]));
                return("");
            }
            throw new InvalidOperationException("Could not find function for command:" + msgObj.functionCall);
        }
Exemplo n.º 29
0
 public override string ProcessMessageLocal(Command msgObj, SyftController ctrl)
 {
     switch (msgObj.functionCall)
     {
     case "add":
     {
         var input = ctrl.getModel(int.Parse(msgObj.tensorIndexParams[0]));
         Debug.LogFormat("<color=magenta>Model Added to Sequential:</color> {0}", input.Id);
         this.AddModel(input);
         return(input.Id + "");
     }
     }
     return("Model.processMessage not Implemented:" + msgObj.functionCall);
 }
Exemplo n.º 30
0
        protected override string ProcessMessageAsLayerObject(Command msgObj, SyftController ctrl)
        {
            switch (msgObj.functionCall)
            {
            case "sample":
            {
                var input  = ctrl.floatTensorFactory.Get(int.Parse(msgObj.tensorIndexParams[0]));
                var result = this.Sample(input);
                return(result.Id + "");
            }

            default:
            {
                return("Policy.processMessage not Implemented:" + msgObj.functionCall);
            }
            }
        }