public async Task <ResultModel <TransformerModel> > AddAsync(TransformerModel newTransformer)
        {
            if (newTransformer.Category is null)
            {
                return(new ResultModel <TransformerModel>
                {
                    Message = $"Category need to be set."
                });
            }
            if (!await _dbContext.Categories.AnyAsync(c => c.Id == newTransformer.Category.Id))
            {
                return(new ResultModel <TransformerModel>
                {
                    Message = $"Invalid category id: {newTransformer.Category.Id}"
                });
            }
            if (await ExistsAsync(newTransformer.Id))
            {
                return(new ResultModel <TransformerModel>
                {
                    Message = $"Transformer entity with the id {newTransformer.Category.Id} already exists."
                });
            }

            var transformer = new Transformer
            {
                Guid       = newTransformer.Id,
                Name       = newTransformer.Name,
                Alliance   = newTransformer.Alliance,
                CategoryId = newTransformer.Category.Id
            };

            _dbContext.Transformers.Add(transformer);

            try
            {
                var changedObjects = await _dbContext.SaveChangesAsync();

                return(new ResultModel <TransformerModel>
                {
                    IsSuccess = changedObjects > 0,
                    Message = $"{changedObjects} objects changed.",
                    Value = transformer.ToModel()
                });
            }
            catch (Exception e)
            {
                return(new ResultModel <TransformerModel>
                {
                    Message = $"Failed to add transformer entity in the database. {e.Message}"
                });
            }
        }
Ejemplo n.º 2
0
        public TransformerFormData(TransformerModel transformer)
        {
            var tapChanger  = transformer?.RatioTapChanger;
            var measurement = tapChanger.Measurements.FirstOrDefault(m => m.MeasurementType == FTN.Common.MeasurementType.Discrete);

            Index        = measurement?.Index;
            RegisterType = measurement?.RegisterType;

            MaxValue = (int)(measurement?.MaxValue ?? int.MaxValue);
            MinValue = (int)(measurement?.MinValue ?? 0);
            value    = (int)(measurement?.Value ?? 0);
        }
Ejemplo n.º 3
0
        private static void train(int epoch, Tensor train_data, TransformerModel model, Loss criterion, int bptt, int ntokens, torch.optim.Optimizer optimizer)
        {
            model.train();

            using (var d = torch.NewDisposeScope()) {
                var total_loss = 0.0f;

                var batch        = 0;
                var log_interval = 200;

                var src_mask = model.GenerateSquareSubsequentMask(bptt);

                var tdlen = train_data.shape[0];


                for (int i = 0; i < tdlen - 1; batch++, i += bptt)
                {
                    var(data, targets) = GetBatch(train_data, i, bptt);
                    optimizer.zero_grad();

                    if (data.shape[0] != bptt)
                    {
                        src_mask = model.GenerateSquareSubsequentMask(data.shape[0]);
                    }

                    using (var output = model.forward(data, src_mask)) {
                        var loss = criterion(output.view(-1, ntokens), targets);
                        loss.backward();
                        torch.nn.utils.clip_grad_norm_(model.parameters().ToArray(), 0.5);
                        optimizer.step();

                        total_loss += loss.to(torch.CPU).item <float>();
                    }

                    if (batch % log_interval == 0 && batch > 0)
                    {
                        var cur_loss = total_loss / log_interval;
                        Console.WriteLine($"epoch: {epoch} | batch: {batch} / {tdlen / bptt} | loss: {cur_loss:0.00}");
                        total_loss = 0;
                    }

                    d.DisposeEverythingBut(src_mask);
                }
            }
        }
Ejemplo n.º 4
0
        private static void train(int epoch, TorchTensor train_data, TransformerModel model, Loss criterion, int bptt, int ntokens, Optimizer optimizer)
        {
            model.Train();

            var total_loss = 0.0f;

            var src_mask = model.GenerateSquareSubsequentMask(bptt);

            var batch        = 0;
            var log_interval = 200;

            var tdlen = train_data.shape[0];

            for (int i = 0; i < tdlen - 1; batch++, i += bptt)
            {
                var(data, targets) = GetBatch(train_data, i, bptt);
                optimizer.zero_grad();

                if (data.shape[0] != bptt)
                {
                    src_mask.Dispose();
                    src_mask = model.GenerateSquareSubsequentMask(data.shape[0]);
                }

                var output = model.forward(data, src_mask);
                var loss   = criterion(output.view(-1, ntokens), targets);
                {
                    loss.backward();
                    model.parameters().clip_grad_norm(0.5);
                    optimizer.step();

                    total_loss += loss.to(Device.CPU).DataItem <float>();
                }

                GC.Collect();

                if (batch % log_interval == 0 && batch > 0)
                {
                    var cur_loss = total_loss / log_interval;
                    Console.WriteLine($"epoch: {epoch} | batch: {batch} / {tdlen/bptt} | loss: {cur_loss:0.00}");
                    total_loss = 0;
                }
            }
        }
        public async Task <ResultModel <TransformerModel> > UpdateAsync(TransformerModel updatedTransformer)
        {
            var transformer = await GetQuery().SingleAsync(x => x.Guid == updatedTransformer.Id);

            if (transformer is null)
            {
                return(new ResultModel <TransformerModel>
                {
                    Message = $"Transformer entity with the id {updatedTransformer.Id} does not exists."
                });
            }

            if (transformer.Name != updatedTransformer.Name)
            {
                transformer.Name = updatedTransformer.Name;
            }
            if (transformer.Alliance != updatedTransformer.Alliance)
            {
                transformer.Alliance = updatedTransformer.Alliance;
            }
            if (transformer.CategoryId != updatedTransformer?.Category.Id)
            {
                transformer.CategoryId = updatedTransformer.Category.Id;
            }

            try
            {
                var changedObjects = await _dbContext.SaveChangesAsync();

                return(new ResultModel <TransformerModel>
                {
                    IsSuccess = changedObjects > 0,
                    Message = $"{changedObjects} objects changed.",
                    Value = transformer.ToModel()
                });
            }
            catch (Exception e)
            {
                return(new ResultModel <TransformerModel>
                {
                    Message = $"Failed to update transformer entity in the database. {e.Message}"
                });
            }
        }
Ejemplo n.º 6
0
        public async Task <APIRequestResult <string> > CreateTransformer([FromBody] TransformerModel transformerModel)
        {
            try
            {
                await _transformerDomainService.CreateTransformer(transformerModel);

                return(new APIRequestResult <string>
                {
                    Success = true
                });
            }
            catch (Exception ex)
            {
                _logger.LogError(ex, ex.Message);
                return(new APIRequestResult <string>
                {
                    Success = false,
                    ErrorMessage = "Server error occured."
                });
            }
        }
Ejemplo n.º 7
0
        /// <summary>
        /// Gets a transformer by its type info key.
        /// </summary>
        /// <param name="type">TypeInfo key.</param>
        /// <returns>
        /// Existing <see cref="ParameterTransformerCacheEntry"/>; otherwise the system creates it and caches it.
        /// </returns>
        public ParameterTransformerCacheEntry GetTransformer(TypeInfo type)
        {
            if (!this.cache.TryGetValue(type, out ParameterTransformerCacheEntry transformer))
            {
                TransformerModel transformerModel = this.GetTransformerModel(type);

                if (transformerModel == null)
                {
                    return(null);
                }

                transformer = new ParameterTransformerCacheEntry(
                    transformerModel.Source,
                    transformerModel.Destination,
                    type,
                    this.parameterFactory.Create,
                    transformerModel.Transformer);

                this.cache.Add(type, transformer);
            }

            return(transformer);
        }
Ejemplo n.º 8
0
        static void Main(string[] args)

        {
            Torch.SetSeed(1);

            var cwd = Environment.CurrentDirectory;

            var device = Torch.IsCudaAvailable() ? Device.CUDA : Device.CPU;

            Console.WriteLine($"Running SequenceToSequence on {device.Type.ToString()}");

            var vocab_iter = TorchText.Datasets.WikiText2("train", _dataLocation);
            var tokenizer  = TorchText.Data.Utils.get_tokenizer("basic_english");

            var counter = new TorchText.Vocab.Counter <string>();

            foreach (var item in vocab_iter)
            {
                counter.update(tokenizer(item));
            }

            var vocab = new TorchText.Vocab.Vocab(counter);

            var(train_iter, valid_iter, test_iter) = TorchText.Datasets.WikiText2(_dataLocation);

            var train_data = Batchify(ProcessInput(train_iter, tokenizer, vocab), batch_size).to(device);
            var valid_data = Batchify(ProcessInput(valid_iter, tokenizer, vocab), eval_batch_size).to(device);
            var test_data  = Batchify(ProcessInput(test_iter, tokenizer, vocab), eval_batch_size).to(device);

            var bptt = 32;

            var(data, targets) = GetBatch(train_data, 0, bptt);

            var ntokens = vocab.Count;

            var model     = new TransformerModel(ntokens, emsize, nhead, nhid, nlayers, dropout).to(device);
            var loss      = cross_entropy_loss();
            var lr        = 2.50;
            var optimizer = NN.Optimizer.SGD(model.parameters(), lr);
            var scheduler = NN.Optimizer.StepLR(optimizer, 1, 0.95, last_epoch: 15);

            var totalTime = new Stopwatch();

            totalTime.Start();

            foreach (var epoch in Enumerable.Range(1, epochs))
            {
                var sw = new Stopwatch();
                sw.Start();

                train(epoch, train_data, model, loss, bptt, ntokens, optimizer);

                var val_loss = evaluate(valid_data, model, loss, lr, bptt, ntokens, optimizer);
                sw.Stop();

                Console.WriteLine($"\nEnd of epoch: {epoch} | lr: {scheduler.LearningRate:0.00} | time: {sw.Elapsed.TotalSeconds:0.0}s | loss: {val_loss:0.00}\n");
                scheduler.step();
            }

            var tst_loss = evaluate(test_data, model, loss, lr, bptt, ntokens, optimizer);

            totalTime.Stop();

            Console.WriteLine($"\nEnd of training | time: {totalTime.Elapsed.TotalSeconds:0.0}s | loss: {tst_loss:0.00}\n");
        }
Ejemplo n.º 9
0
 /// <summary>
 /// Adds a new <see cref="TransformerModel"/> to the inner cache.
 /// </summary>
 /// <param name="transformerModel"></param>
 public void AddTransformer(TransformerModel transformerModel)
 {
     this.transformers.Add(transformerModel.Destination, transformerModel);
 }