Esempio n. 1
0
        public string TrainModel(IpfsJob job)
        {
            var tmpInput  = Ipfs.Get <JToken>(job.input);
            var tmpTarget = Ipfs.Get <JToken>(job.target);

            var seq = CreateSequential(job.Model);

            var inputData   = tmpInput.SelectToken("data").ToObject <float[]>();
            var inputShape  = tmpInput.SelectToken("shape").ToObject <int[]>();
            var inputTensor = controller.floatTensorFactory.Create(_data: inputData, _shape: inputShape, _autograd: true);

            var targetData   = tmpTarget.SelectToken("data").ToObject <float[]>();
            var targetShape  = tmpTarget.SelectToken("shape").ToObject <int[]>();
            var targetTensor = controller.floatTensorFactory.Create(_data: targetData, _shape: targetShape, _autograd: true);

            var grad = controller.floatTensorFactory.Create(_data: new float[] { 1, 1, 1, 1 },
                                                            _shape: new int[] { 4, 1 });

            Loss loss;

            switch (job.config.criterion)
            {
            case "mseloss":
                loss = new MSELoss(this.controller);
                break;

            case "categorical_crossentropy":
                loss = new CategoricalCrossEntropyLoss(this.controller);
                break;

            case "cross_entropy_loss":
                loss = new CrossEntropyLoss(this.controller, 1);     // TODO -- real value
                break;

            case "nll_loss":
                loss = new NLLLoss(this.controller);
                break;

            default:
                loss = new MSELoss(this.controller);
                break;
            }

            var optimizer = new SGD(this.controller, seq.getParameters(), job.config.lr, 0, 0);

            for (var i = 0; i < job.config.iters; ++i)
            {
                var pred = seq.Forward(inputTensor);
                var l    = loss.Forward(pred, targetTensor);
                l.Backward();

                // TODO -- better batch size
                optimizer.Step(100, i);
            }

            var resultJob = new Ipfs();
            var response  = resultJob.Write(new IpfsJob(job.input, job.target, seq.GetConfig(), job.config));

            return(response.Hash);
        }
Esempio n. 2
0
        public float FitBatch(int batch_i)
        {
            if (((batch_i + 1) * _input_batch_offset) < _input_tensor_origin.Size)
            {
                last_input_buffer.Fill(_input_tensor_origin, starting_offset: batch_i * _input_batch_offset,
                                       length_to_fill: _input_batch_offset);
                last_target_buffer.Fill(_target_tensor_origin, starting_offset: batch_i * _target_batch_offset,
                                        length_to_fill: _target_batch_offset);

                var pred = Forward(last_input_buffer);
                var loss = _criterion.Forward(pred, last_target_buffer);

                if (cached_ones_grad_for_backprop == null || cached_ones_grad_for_backprop.Size != loss.Size)
                {
                    cached_ones_grad_for_backprop          = loss.createOnesTensorLike();
                    cached_ones_grad_for_backprop.Autograd = false;
                }

                loss.Backward(cached_ones_grad_for_backprop);

                _optimizer.Step(this.last_input_buffer.Shape[0]);

                return(loss.Data[0]);
            }
            else
            {
                return(0);
            }
        }
Esempio n. 3
0
    void Update()
    {
        CurveObject.DrawMesh();

        if (Input.GetKey(KeyCode.Space))
        {
            (NewCurve, NewMomentum) = SGD.Step(CurrentCurve, Momentum);
            CurrentCurve            = NewCurve;
            Momentum = NewMomentum;

            CurveObject.UpdateMesh(CurrentCurve);
            Debug.Log("Updated Knot");

            List <Vector3> tangents = CurrentCurve.GetTangents();
        }
    }