public List <Loss> GetAllLosses() { List <Loss> lossList = new List <Loss>(); using (SqlConnection conn = new SqlConnection(connectionString)) { conn.Open(); SqlCommand cmd = new SqlCommand(sqlSelectAllLosses, conn); SqlDataReader reader = cmd.ExecuteReader(); while (reader.Read() == true) { Loss currentLoss = new Loss(); currentLoss.lossId = Convert.ToInt32(reader["loss_id"]); currentLoss.inventoryId = Convert.ToInt32(reader["inventory_id"]); currentLoss.dateAdded = Convert.ToDateTime(reader["date_added"]); currentLoss.cropName = Convert.ToString(reader["crop_name"]); currentLoss.dateLost = Convert.ToDateTime(reader["date_lost"]); currentLoss.amountLost = Convert.ToDecimal(reader["amount_lost"]); currentLoss.lossDescription = Convert.ToString(reader["loss_description"]); lossList.Add(currentLoss); } return(lossList); } }
private static double evaluate(Tensor eval_data, TransformerModel model, Loss criterion, int bptt, int ntokens, torch.optim.Optimizer optimizer) { model.eval(); using (var d = torch.NewDisposeScope()) { var src_mask = model.GenerateSquareSubsequentMask(bptt); var total_loss = 0.0f; var batch = 0; for (int i = 0; i < eval_data.shape[0] - 1; batch++, i += bptt) { var(data, targets) = GetBatch(eval_data, i, bptt); if (data.shape[0] != bptt) { src_mask = model.GenerateSquareSubsequentMask(data.shape[0]); } using (var output = model.forward(data, src_mask)) { var loss = criterion(output.view(-1, ntokens), targets); total_loss += data.shape[0] * loss.to(torch.CPU).item <float>(); } data.Dispose(); targets.Dispose(); d.DisposeEverythingBut(src_mask); } return(total_loss / eval_data.shape[0]); } }
public string InsertLoss(Loss loss) { //loss.LoseID = GetUserID(loss.LoseID); SqlCommand cmd = ConnectSql("Insert into Loss Values(@LoseID , @CategoryCode , @LossDesc , @LossColor , @LossDate ," + " @Remarks, @StatusCode , @Date, @LossCityCode, @LossLat, @LossLng)"); try { cmd.Parameters.AddWithValue("@LoseID", loss.LoseID); cmd.Parameters.AddWithValue("@CategoryCode", loss.CategoryCode); cmd.Parameters.AddWithValue("@LossDesc", loss.LossDesc); cmd.Parameters.AddWithValue("@LossColor", loss.LossColor); cmd.Parameters.AddWithValue("@LossDate", loss.LossDate); cmd.Parameters.AddWithValue("@Remarks", loss.Remarks); cmd.Parameters.AddWithValue("@StatusCode", loss.StatusCode); cmd.Parameters.AddWithValue("@Date", loss.Date); cmd.Parameters.AddWithValue("@LossCityCode", loss.LossCityCode); cmd.Parameters.AddWithValue("@LossLat", loss.LossLat); cmd.Parameters.AddWithValue("@LossLng", loss.LossLng); cmd.ExecuteNonQuery(); return("Inserting Loss Seccessfuly"); } catch (Exception e) { return("Exception Occre while inserting loss:" + e.Message + "\t" + e.GetType()); } finally { connection.DisConnectSql(); } }
public bool RecordNewLoss(Loss newLoss) { bool result = false; try { using (SqlConnection conn = new SqlConnection(connectionString)) { conn.Open(); SqlCommand cmd = new SqlCommand(sqlRecordNewLoss, conn); cmd.Parameters.AddWithValue("@inventoryId", newLoss.inventoryId); cmd.Parameters.AddWithValue("@dateLost", newLoss.dateLost); cmd.Parameters.AddWithValue("@amountLost", newLoss.amountLost); cmd.Parameters.AddWithValue("@lossDescription", newLoss.lossDescription); cmd.ExecuteNonQuery(); } result = true; } catch (Exception e) { Console.WriteLine(e); result = false; } return(result); }
public (Curve, List <Vector3>) Step(Curve curve, List <Vector3> momentum) { this.curve = curve; int N = curve.GetLength(); List <Vector3> CurrentPositions = this.curve.GetPositions(); List <Vector3> NewPositions = new List <Vector3>(); Loss loss = new Loss(this.curve); List <Vector3> grad = loss.Gradient(); List <Vector3> _momentum = new List <Vector3>(); // gradient descent for (int i = 0; i < N; i++) { Vector3 P = CurrentPositions[i]; Vector3 DP = -this.alpha * momentum[i] + (1 - this.alpha) * this.lr * grad[i]; _momentum.Add(DP); NewPositions.Add(P - DP); } Curve _curve = new Curve(NewPositions); return(_curve, _momentum); }
static private void Evaluate( Net net, Loss lossFn, Dataset dataset, int[] indices ) { var numberOfExamples = dataset.Length; var losses = new float[numberOfExamples]; var indicators = new float[numberOfExamples]; foreach (int i in indices) { var dataAndAnswer = dataset.GetItem(i); var output = net.ForwardPass(dataAndAnswer.Item1); var outputAndAnswer = new Tuple <float[], float[]>(output, dataAndAnswer.Item2); losses[i] = lossFn.Calculate(outputAndAnswer).Average(); indicators[i] = Convert.ToSingle( (MathF.Round(output[0]) == dataAndAnswer.Item2[0]) && (MathF.Round(output[1]) == dataAndAnswer.Item2[1]) ); Console.WriteLine($"true: {string.Join(", ", dataAndAnswer.Item2)}\tpredicted: {string.Join(", ", output)}"); } Console.WriteLine(UNDERLINE); Console.WriteLine($"accuracy: {indicators.Average()}\tmean loss: {losses.Average()}"); }
public float Train(TrainingSet trainingSet, float learningRate) { if (shuffle) { trainingSet.Shuffle(); } var lastLayer = Layers[Layers.Length - 1]; var outputs = lastLayer.Outputs; var errors = new Tensor(outputs.Dimensions); float sumCost = 0f; for (int i = 0, count = trainingSet.Count; i < count; i++) { errors.Clear(); (var inputs, var expected) = trainingSet[i]; var actuals = CalculateTrainingOutputs(inputs); sumCost += Loss.CalculateTotal(actuals, expected); for (int o = 0; o < actuals.Length; o++) { errors[o] = Loss.Derivative(actuals[o], expected[o]); } BackPropagate(errors, learningRate, momentumRate); } return(sumCost / trainingSet.Count); }
public virtual Trainer Configure <T>(Model model, int?epochs, Core.fDataSet dataSet, double?learningRate, double?momentum, int?batchSize, double?beta1, double?beta2) where T : CNN.Loss, new() { this.batchSize = batchSize; List <Core.fData> dataList = (List <Core.fData>)dataSet.fData; int index = 0, count; while (index < dataList.Count()) { count = (int)(dataList.Count() - index > batchSize ? batchSize : dataList.Count() - index); Core.fDataSet data = new Core.fDataSet(); data.fData = dataList.GetRange(index, count); batchImages.Add(data); index += (int)batchSize; } base.Configure(model, epochs, batchImages[0]); // 0. assert learningRate and momentum if ((learningRate == null) || (momentum == null) || (beta1 == null) || (beta2 == null)) { throw new Exception(); } this.learningRate = learningRate; this.momentum = momentum; this.beta1 = beta1; this.beta2 = beta2; this.lossfunc = new T(); return(this); }
void FillGridLosses() { Lost = new Loss(); grdPerdiasConsulta.DataSource = Lost.GetLossesList(null, null); grdPerdiasConsulta.Refresh(); tabControl1.SelectTab(0); }
/// <summary>Sets multi-class Support Vector Machine classifier</summary> /// <remarks><para>Removes any existing classifier and sets mSVM with specified settings</para></remarks> /// <param name="loss">Loss function to set</param> /// <param name="model">Model to be used with SVM classifier</param> /// <seealso cref="aceOperationSetExecutorBase"/> public void aceOperation_setmSVM( [Description("Loss function to set")] Loss loss = Loss.L2, [Description("Model to be used with SVM classifier")] mSVMModels model = mSVMModels.linear) { data.classifierSettings.type = imbNLP.Toolkit.Classifiers.ClassifierType.multiClassSVM; data.classifierSettings.lossFunctionForTraining = loss; data.classifierSettings.svmModel = model; }
public void SaveUnitTest_WhenLossObjectIdDoesntExist_ShouldBeFalse() { var loss = new Loss(); var p = new Product(); p.Load(12); p.Id = 0; Assert.IsFalse(loss.Save(p, 1)); }
public void SaveUnitTest_WhenLossObjectIsFilled_ShouldBeTrue() { var loss = new Loss(); var LostQuantity = 1; var p = new Product(); p.Load(2); loss.Comment = $"Adding a lost for {p.Description} x {LostQuantity}"; Assert.IsTrue(loss.Save(p, 1)); }
public virtual async Task <JsonResult> Update(Loss model, IList <IFormFile> files) { if (!ModelState.IsValid) { return(Json(new { IsSuccessful = false, Message = ModelState.GetModelError() })); } var update = await _LossSrv.UpdateAsync(model, _env.ContentRootPath, files); return(Json(new { update.IsSuccessful, update.Message })); }
private void CmdGuardarPerdida_Click(object sender, EventArgs e) { Lost = new Loss(); Lost.Comment = TxtComentarioPerdida.Text; if (Lost.Save(prod, int.Parse(NumCantidadPerdida.Value.ToString()))) { Program.DisplayInfoMessage("Perdida registrada"); Limpiar(); } FillGridLosses(); }
public double Function(int[,] desired, List <double[]> actual) { double sum = 0; for (int i = 0; i < actual.Count; i++) { sum += Loss.Entropy(desired.GetRow(i), actual[i]).Sum(); } return(1.0 / actual.Count * sum); }
public bool IsReliable() { return(((Micro < 0.60 || Micro > -0.50) && (Macro < 0.70 || Macro > -0.55) && (Loss < 2 || Loss > -1.5d) && (Reduction < 2 || Reduction > -2)) || (Micro.IsZero() && Macro.IsZero() && Loss.IsZero() && Reduction.IsZero())); }
/* * C=−1/n*∑x∑j[yjlnaLj+(1−yj)ln(1−aLj)]. */ public double Function(int[,] desired, double[,] actual) { double sum = 0; for (int i = 0; i < actual.GetLength(0); i++) { sum += Loss.Entropy(desired.GetRow(i), actual.GetRow(i)).Sum(); } return(1.0 / actual.GetLength(0) * sum); }
public override string ToString() { StringBuilder b = new StringBuilder(); b.AppendLine("-- Layer Result --"); b.AppendLine(Layer.ToString()); b.AppendLine(Loss.ToString("Loss")); b.AppendLine(Deltas.ToString()); return(b.ToString()); }
public void Create(Loss loss_function, int input_size, float learning_rate = 0.01f) { this.loss_function = loss_function; this.learning_rate = learning_rate; foreach (var layer in layer_list) { if (!layer.isInitialized) { layer.initialize(input_size); } } }
static void WriteLosses(IOptimizer optimizer, Variable globalSteps, Loss losses) { // tf v1 does not actually export summary.experimental.set_step context.context_().summary_step = globalSteps; void Scalar(string name, IGraphNodeBase value) => summary_ops_v2.scalar(name, value, step: globalSteps); Scalar("lr", optimizer.DynamicGet <Variable>("lr")); Scalar("loss/total_loss", losses.GIUO + losses.Conf + losses.Prob); Scalar("loss/giou_loss", losses.GIUO); Scalar("loss/conf_loss", losses.Conf); Scalar("loss/prob_loss", losses.Prob); }
private SolverType GetSolverType(Norm norm, Loss loss, bool dual, Multiclass multiclass) { if (multiclass == Multiclass.CrammerSinger) { return(SolverType.getById(SolverType.MCSVM_CS)); } if (multiclass != Multiclass.Ovr) { throw new ArgumentException("Invalid multiclass value"); } if (norm == Norm.L2 && loss == Loss.LogisticRegression && !dual) { return(SolverType.getById(SolverType.L2R_LR)); } if (norm == Norm.L2 && loss == Loss.L2 && dual) { return(SolverType.getById(SolverType.L2R_L2LOSS_SVC_DUAL)); } if (norm == Norm.L2 && loss == Loss.L2 && !dual) { return(SolverType.getById(SolverType.L2R_L2LOSS_SVC)); } if (norm == Norm.L2 && loss == Loss.L1 && dual) { return(SolverType.getById(SolverType.L2R_L1LOSS_SVC_DUAL)); } if (norm == Norm.L1 && loss == Loss.L2 && !dual) { return(SolverType.getById(SolverType.L1R_L2LOSS_SVC)); } if (norm == Norm.L1 && loss == Loss.LogisticRegression && !dual) { return(SolverType.getById(SolverType.L1R_LR)); } if (norm == Norm.L2 && loss == Loss.LogisticRegression && dual) { return(SolverType.getById(SolverType.L2R_LR_DUAL)); } throw new ArgumentException("Given combination of penalty, loss, dual params is not supported"); }
public void SaveUnitTest_WhenSaveALost_ProductQuantityGetsReduced() { var LostQuantity = 1; var TestProductID = 2; var loss = new Loss(); var p = new Product(); p.Load(TestProductID); var expectecResult = p.Quantity - LostQuantity; loss.Comment = $"Testing Product loss | {p.Description} x {LostQuantity}"; loss.Save(p, LostQuantity); p.Load(TestProductID); Assert.AreEqual(expectecResult, p.Quantity); }
public async Task <IResponse <Loss> > AdminUpdateAsync(Loss model, string root, IList <IFormFile> files) { var Loss = await _LossRepo.FirstOrDefaultAsync(conditions : x => x.LossId == model.LossId, new List <Expression <Func <Loss, object> > > { i => i.User }); if (Loss == null) { return new Response <Loss> { Message = ServiceMessage.RecordNotExist } } ; Loss.Status = model.Status; Loss.RelativeId = model.RelativeId; Loss.LossType = model.LossType; Loss.LossDateSh = model.LossDateSh; Loss.LossDateMi = PersianDateTime.Parse(model.LossDateSh).ToDateTime(); //Loss.PatientName = model.PatientName; Loss.Cost = model.Cost; Loss.Description = model.Description; _LossRepo.Update(Loss); var getAssets = await _LossAssetSrv.SaveRange(root, model.UserId, files); if (!getAssets.IsSuccessful) { return new Response <Loss> { Message = getAssets.Message } } ; foreach (var item in getAssets.Result) { item.LossId = Loss.LossId; } await _appUow.LossAssetRepo.AddRangeAsync(getAssets.Result); var updateResult = await _appUow.ElkSaveChangesAsync(); if (!updateResult.IsSuccessful) { _LossAssetSrv.DeleteRange(getAssets.Result); } return(new Response <Loss> { Result = Loss, IsSuccessful = updateResult.IsSuccessful, Message = updateResult.Message }); }
public float Test(TrainingSet validationSet) { float sumCost = 0f; for (int i = 0, count = validationSet.Count; i < count; i++) { var inputs = validationSet[i].Item1; var expected = validationSet[i].Item2; var actuals = CalculateOutputs(inputs); sumCost += Loss.CalculateTotal(actuals, expected); } return(sumCost / validationSet.Count); }
public List <Loss> GetLosesToPersonalArea(Person person) { SqlCommand cmd = ConnectSql(string.Format("Select * From Loss Where LoseID = '{0}'", person.PersonID)); SqlDataReader reader = cmd.ExecuteReader(); List <Loss> resultLoss = new List <Loss>(); Loss loss = new Loss(); while (reader.Read()) { resultLoss.Add(loss.Initialization(reader)); } connection.DisConnectSql(); int x = resultLoss.Count(); return(resultLoss); }
public virtual Trainer Configure <T>(Model model, int?epochs, fDataSet dataSet, double?learningRate, double?momentum) where T : CNN.Loss, new() { base.Configure(model, epochs, dataSet); // 0. assert learningRate and momentum if ((learningRate == null) || (momentum == null)) { throw new Exception(); } this.learningRate = learningRate; this.momentum = momentum; this.lossfunc = new T(); return(this); }
public IActionResult AddNewLoss(Loss newLoss) { bool result = false; bool inventoryResult = inventoryDAO.DebitInventory(newLoss.inventoryId, newLoss.amountLost); if (inventoryResult) { result = lossDAO.RecordNewLoss(newLoss); } if (result) { return(Created("", result)); } else { return(BadRequest()); } }
public int SaveOnLost(List <Loss> loss) { int sellCount = loss.Count; int count = 0; foreach (Loss items in loss) { Loss lossed = new Loss(); lossed.ItemId = items.ItemId; lossed.CompanyId = items.CompanyId; lossed.Quantity = items.Quantity; lossed.Price = items.Price; lossed.Discount = items.Discount; lossed.Date = items.Date; lossed.ActionDate = items.ActionDate; lossed.ActionType = items.ActionType; lossed.ActionBy = items.ActionBy; Context.Losses.Add(lossed); int rowsAffected = Context.SaveChanges(); Item item = Context.Items.Where(i => i.Id == lossed.ItemId).FirstOrDefault(); item.Quantity = item.Quantity - lossed.Quantity; Context.Items.AddOrUpdate(item); int rowsAffected2 = Context.SaveChanges(); if (rowsAffected > 0 && rowsAffected2 > 0) { count++; } } if (count == sellCount) { return(1); } else { return(0); } }
static void Test( Net net, Loss lossFn, Dataset dataset ) { Console.WriteLine(STARLINE); Console.WriteLine("TESTING"); Console.WriteLine(UNDERLINE); Evaluate( net: net, lossFn: lossFn, dataset: dataset, indices: Enumerable.Range(0, dataset.Length).ToArray() ); Console.WriteLine("\nFINISHED"); Console.WriteLine(STARLINE); }
public string ChangeLossStatus(Loss _loss) { SqlCommand cmd = ConnectSql(string.Format("UPDATE Loss SET StatusCode = '{0}' Where LossCode = '{1}'", _loss.StatusCode, _loss.LossCode)); try { cmd.Parameters.AddWithValue("@StatusCode", _loss.StatusCode); cmd.Parameters.AddWithValue("@LossCode", _loss.LossCode); cmd.ExecuteNonQuery(); return(_loss.LossCode + "Data updated!"); } catch (Exception ex) { return(ex.Message); } finally { connection.DisConnectSql(); } }