Exemple #1
0
 public override int GetHashCode()
 {
     unchecked
     {
         return((Transition.GetHashCode() * 397) ^ (Losses != null ? Losses.GetHashCode() : 0));
     }
 }
Exemple #2
0
        public void Model_Use_Case()
        {
            var inputShape      = new int[] { 28, 28, 1 };
            var numberOfClasses = 10;
            var outputShape     = new int[] { numberOfClasses };

            (var observations, var targets) = CreateArtificialData(inputShape, outputShape, observationCount: 100);

            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotNormal(random.Next());
            var biasInit = Initializers.Zero();

            // Create the architecture.
            var network = Layers.Input(inputShape, dataType)
                          .Dense(512, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .Dense(numberOfClasses, weightInit(), biasInit, device, dataType)
                          .Softmax();

            // setup input and target variables.
            var inputVariable  = network.Arguments[0];
            var targetVariable = Variable.InputVariable(network.Output.Shape, dataType);

            // loss
            var lossFunc   = Losses.CategoricalCrossEntropy(network.Output, targetVariable);
            var metricFunc = Metrics.Accuracy(network.Output, targetVariable);

            // setup trainer.
            var learner = Learners.MomentumSGD(network.Parameters());
            var trainer = CNTKLib.CreateTrainer(network, lossFunc, metricFunc, new LearnerVector {
                learner
            });

            var model = new Model(trainer, network, dataType, device);

            // setup name to data.
            var nameToData = new Dictionary <string, MemoryMinibatchData>
            {
                { "observations", observations },
                { "targets", targets }
            };

            // setup name to variable
            var nameToVariable = new Dictionary <string, Variable>
            {
                { "observations", inputVariable },
                { "targets", targetVariable },
            };

            var trainSource = new MemoryMinibatchSource(nameToVariable, nameToData, seed: 232, randomize: true);

            model.Fit(trainSource, batchSize: 8, epochs: 2);

            (var loss, var metric) = model.Evaluate(trainSource);

            Trace.WriteLine($"Final evaluation - Loss: {loss}, Metric: {metric}");
        }
        static Model CreateModel(Function inputVariable, Variable targetVariable, int targetCount,
                                 DataType dataType, DeviceDescriptor device)
        {
            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotNormal(random.Next());
            var biasInit = Initializers.Zero();

            // Create the architecture.
            var network = inputVariable

                          .Dense(32, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .Dense(32, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .Dense(targetCount, weightInit(), biasInit, device, dataType);

            // loss
            var lossFunc   = Losses.MeanSquaredError(network.Output, targetVariable);
            var metricFunc = Losses.MeanAbsoluteError(network.Output, targetVariable);

            // setup trainer.
            var learner = CntkCatalyst.Learners.Adam(network.Parameters());
            var trainer = CNTKLib.CreateTrainer(network, lossFunc, metricFunc, new LearnerVector {
                learner
            });

            var model = new Model(trainer, network, dataType, device);

            Trace.WriteLine(model.Summary());
            return(model);
        }
Exemple #4
0
        public IActionResult Put(int Id, [FromBody] Losses newLosses)
        {
            Losses oldLosses = _context.Losses.Find(Id);

            _context.Entry(oldLosses).CurrentValues.SetValues(newLosses);
            _context.SaveChanges();
            return(Ok(newLosses));
        }
Exemple #5
0
        public IActionResult Delete(int Id)
        {
            Losses oldLosses = _context.Losses.Find(Id);

            _context.Losses.Remove(oldLosses);
            _context.SaveChanges();
            return(NoContent());
        }
Exemple #6
0
 public static LossesDto FroLosses(this LossesDto dto, Losses a)
 {
     dto.Id       = a.Id;
     dto.Book     = a.Book.ToBookDto();
     dto.Cause    = a.Cause;
     dto.Data     = a.Data;
     dto.Quantity = a.Quantity;
     return(dto);
 }
        public override string ToString()
        {
            StringBuilder sb = new StringBuilder();

            sb.AppendLine(Wins.ToString());
            sb.AppendLine(Losses.ToString());
            sb.Append(BigBlindHands);
            sb.Append(SmallBlindHands);
            return(sb.ToString());
        }
Exemple #8
0
        public void CanAdd()
        {
            Losses losses = new Losses();

            losses.AddLossPair(1.0, 2.0);

            Assert.AreEqual(1, losses.TrainingLosses.Length);
            Assert.AreEqual(1, losses.ValidationLosses.Length);
            Assert.AreEqual(1.0, losses.LatestTrainingLoss);
            Assert.AreEqual(2.0, losses.LatestValidationLoss);
        }
Exemple #9
0
 public void CanMake()
 {
     try
     {
         Losses losses = new Losses();
     }
     catch (Exception e)
     {
         Assert.Fail("Losses constructor threw an exception: " + e.Message);
     }
 }
Exemple #10
0
        public async Task AddLoss(CreateViewModel lossModel)
        {
            Losses losses = new Losses
            {
                Cause = lossModel.Cause,
                State = lossModel.State
            };

            _context.Losses.Add(losses);

            await _context.SaveChangesAsync();
        }
Exemple #11
0
        public static Losses ToLosses(this LossesDto dto)
        {
            var loss = new Losses()
            {
                Id       = dto.Id,
                Book     = dto.Book.ToBook(),
                Cause    = dto.Cause,
                Data     = dto.Data,
                Quantity = dto.Quantity
            };

            return(loss);
        }
Exemple #12
0
        public void MeanSquareError()
        {
            var targetsData     = new float[] { 1.0f, 2.3f, 3.1f, 4.4f, 5.8f };
            var targetsVariable = CNTKLib.InputVariable(new int[] { targetsData.Length }, m_dataType);

            var predictionsData     = new float[] { 1.0f, 2.0f, 3.0f, 4.0f, 5.0f };
            var predictionsVariable = CNTKLib.InputVariable(new int[] { predictionsData.Length }, m_dataType);

            var sut    = Losses.MeanSquaredError(predictionsVariable, targetsVariable);
            var actual = Evaluate(sut, targetsVariable, targetsData,
                                  predictionsVariable, predictionsData);

            Assert.AreEqual(0.18f, actual, 0.00001);
        }
Exemple #13
0
        public void MeanSquareError_Zero_Error()
        {
            var targetsData     = new float[] { 0, 0, 0, 0, 0, 0 };
            var targetsVariable = CNTKLib.InputVariable(new int[] { targetsData.Length }, m_dataType);

            var predictionsData     = new float[] { 0, 0, 0, 0, 0, 0 };
            var predictionsVariable = CNTKLib.InputVariable(new int[] { predictionsData.Length }, m_dataType);

            var sut    = Losses.MeanSquaredError(predictionsVariable, targetsVariable);
            var actual = Evaluate(sut, targetsVariable, targetsData,
                                  predictionsVariable, predictionsData);

            Assert.AreEqual(0.0f, actual);
        }
Exemple #14
0
 public void AddLoss()
 {
     using (var dlg = new EditFragmentLossDlg(Losses.ToArray()))
     {
         if (dlg.ShowDialog(this) == DialogResult.OK)
         {
             Losses = new List <FragmentLoss>(Losses)
             {
                 dlg.Loss
             };
             listNeutralLosses.SelectedItem = dlg.Loss;
         }
     }
 }
Exemple #15
0
        public void Track(double[] weightVector, double loss)
        {
            Losses.AddLast(loss);
            if (Iteration == 0)
            {
                LossesMidTerm.AddLast(loss);
                LosseslongTerm.AddLast(loss);
            }
            else
            {
                LossesMidTerm.AddLast(LossesMidTerm.Last.Value * (1.0 - (1.0 / Math.Min(10, Iteration + 1))) + loss * ((1.0 / Math.Min(10, Iteration + 1))));
                LosseslongTerm.AddLast(LosseslongTerm.Last.Value * (1.0 - (1.0 / Math.Min(100, Iteration + 1))) + loss * ((1.0 / Math.Min(100, Iteration + 1))));
            }
            //add weights to memory
            for (int i = 0; i < Weights; i++)
            {
                weightHistories[i].Add(weightVector[i]);
            }

            //compute changes
            for (int i = 0; i < TrackDistances.Length; i++)
            {
                var distance          = 0.0;
                var iterationDistance = TrackDistances[i];
                if (weightHistories[0].Count > iterationDistance)
                {
                    for (int bm = 0; bm < Weights; bm++)
                    {
                        distance += (weightHistories[bm][iterationDistance] - weightVector[bm]) * (weightHistories[bm][iterationDistance] - weightVector[bm]);
                    }

                    Distances[i].Add(distance);
                    var iteration = Iteration + 1 - iterationDistance;
                    if (iteration == 0)
                    {
                        AverageChanges[i].AddLast(distance);
                        AverageChangesLong[i].AddLast(distance);
                    }
                    else
                    {
                        AverageChanges[i].AddLast(AverageChanges[i].Last.Value * (1.0 - (1.0 / Math.Min(10, iteration + 1))) + distance * ((1.0 / Math.Min(10, iteration + 1))));
                        AverageChangesLong[i].AddLast(AverageChangesLong[i].Last.Value * (1.0 - (1.0 / Math.Min(100, iteration + 1))) + distance * ((1.0 / Math.Min(100, iteration + 1))));
                    }
                }
            }

            Iteration++;
        }
Exemple #16
0
        private void CalculateWinsAndLosses(HearthStatsDbContext context, Expression <Func <GameResult, bool> > filter)
        {
            var   games    = context.Games;
            float total    = games.Where(filter).Count();
            float winsC    = games.Where(filter).Count(x => x.Victory && !x.GoFirst);
            float lossesC  = games.Where(filter).Count(x => !x.Victory && !x.GoFirst);
            float winsNC   = games.Where(filter).Count(x => x.Victory && x.GoFirst);
            float lossesNC = games.Where(filter).Count(x => !x.Victory && x.GoFirst);
            var   wins     = winsC + winsNC;
            var   losses   = lossesC + lossesNC;

            WinsAndLosses.Clear();
            Wins.Clear();
            Losses.Clear();
            WithCoin.Clear();
            WithoutCoin.Clear();
            if (total <= 0)
            {
                WinsAndLosses.Add(new StatModel("Wins", 0));
                WinsAndLosses.Add(new StatModel("Losses", 0));
                Wins.Add(new StatModel("Coin", 0));
                Wins.Add(new StatModel("No coin", 0));
                Losses.Add(new StatModel("Coin", 0));
                Losses.Add(new StatModel("No coin", 0));
                WithCoin.Add(new StatModel("Losses", 0));
                WithCoin.Add(new StatModel("Losses", 0));
                WithoutCoin.Add(new StatModel("Losses", 0));
                WithoutCoin.Add(new StatModel("Losses", 0));

                return;
            }

            WinsAndLosses.Add(new StatModel(string.Format("Wins: {0}", wins), wins / total * 100));
            WinsAndLosses.Add(new StatModel(string.Format("Losses: {0}", losses), losses / total * 100));

            Wins.Add(new StatModel(string.Format("Coin: {0}", winsC), winsC / wins * 100));
            Wins.Add(new StatModel(string.Format("No coin: {0}", winsNC), winsNC / wins * 100));

            Losses.Add(new StatModel(string.Format("Coin: {0}", lossesC), lossesC / losses * 100));
            Losses.Add(new StatModel(string.Format("No coin: {0}", lossesNC), lossesNC / losses * 100));

            WithCoin.Add(new StatModel(string.Format("Wins: {0}", winsC), winsC / (winsC + lossesC) * 100));
            WithCoin.Add(new StatModel(string.Format("Losses: {0}", lossesC), lossesC / (winsC + lossesC) * 100));

            WithoutCoin.Add(new StatModel(string.Format("Wins: {0}", winsNC), winsNC / (winsNC + lossesNC) * 100));
            WithoutCoin.Add(new StatModel(string.Format("Losses: {0}", lossesNC), lossesNC / (winsNC + lossesNC) * 100));
        }
Exemple #17
0
        internal async void Train(float[,,] trainData, int[] trainLabel, CoreDispatcher coreDispatcher, TextBlock textBlock)
        {
            float[] delta = null;
            //int numOfInstance = 10000;
            int numOfInstance = trainData.GetLength(0);

            for (int e = 0; e < Epoch; e++)
            {
                for (int instanceIndex = 0; instanceIndex < numOfInstance; instanceIndex++)
                {
                    SetInput(trainData, instanceIndex, Layers.First());

                    //Forward
                    for (int i = 1; i < Layers.Length; i++)
                    {
                        Layers[i].Forward();
                    }

                    var loss = LossFunction(Layers.Last().Outputs, trainLabel[instanceIndex]);
                    Losses[e] += loss.Average();

                    delta = LossFunctionDerivative(Layers.Last().Outputs, trainLabel[instanceIndex]);

                    //Backward
                    Layers.Last().OutputLayerBackward(delta);
                    for (int i = Layers.Length - 2; i > 0; i--)
                    {
                        Layers[i].Backward();
                    }

                    if (instanceIndex % 1000 == 0)
                    {
                        await coreDispatcher.RunAsync(CoreDispatcherPriority.Normal, () => {
                            textBlock.Text = $"Epoch: {e}, Loss: {Losses[e] / (instanceIndex + 1)}";
                        });
                    }
                }
                LearnRate *= 0.8f;
            }
            await coreDispatcher.RunAsync(CoreDispatcherPriority.Normal, () => {
                textBlock.Text = $"TrainDone Loss: {Losses.Last() / numOfInstance}";
            });
        }
Exemple #18
0
        public void Track(double[] weightVector, double loss)
        {
            Losses.AddLast(loss);
            if (Iteration == 0)
            {
                LossesMidTerm.AddLast(loss);
                LosseslongTerm.AddLast(loss);
            }
            else
            {
                LossesMidTerm.AddLast(LossesMidTerm.Last.Value * (1.0 - (1.0 / Math.Min(10, Iteration + 1))) + loss * ((1.0 / Math.Min(10, Iteration + 1))));
                LosseslongTerm.AddLast(LosseslongTerm.Last.Value * (1.0 - (1.0 / Math.Min(100, Iteration + 1))) + loss * ((1.0 / Math.Min(100, Iteration + 1))));
            }
            //add weights to memory
            for (int i = 0; i < Weights; i++)
            {
                weightHistories[i].Add(weightVector[i]);
            }

            //compute changes
            for (int i = 0; i < TrackDistances.Length; i++)
            {
                var distance          = 0.0;
                var iterationDistance = TrackDistances[i];
                if (weightHistories[0].Count > iterationDistance)
                {
                    for (int bm = 0; bm < Weights; bm++)
                    {
                        distance += (weightHistories[bm][iterationDistance] - weightVector[bm]) * (weightHistories[bm][iterationDistance] - weightVector[bm]);
                    }

                    Distances[i].Add(distance);
                    var iteration = Iteration + 1 - iterationDistance;
                    if (iteration == 0)
                    {
                        AverageChanges[i].AddLast(distance);
                        AverageChangesLong[i].AddLast(distance);
                    }
                    else
                    {
                        AverageChanges[i].AddLast(AverageChanges[i].Last.Value * (1.0 - (1.0 / Math.Min(10, iteration + 1))) + distance * ((1.0 / Math.Min(10, iteration + 1))));
                        AverageChangesLong[i].AddLast(AverageChangesLong[i].Last.Value * (1.0 - (1.0 / Math.Min(100, iteration + 1))) + distance * ((1.0 / Math.Min(100, iteration + 1))));
                    }
                }
            }

            Iteration++;

            //using (var writer = new StreamWriter(StoreLocation))
            //{
            //    //writer.WriteLine("Iteration: " + Iteration);
            //    //writer.WriteLine("loss: " + Math.Round(Losses.Last.Value, 3));
            //    //writer.WriteLine("loss2: " + Math.Round(LossesMidTerm.Last.Value, 3));
            //    //writer.WriteLine("loss3: " + Math.Round(LosseslongTerm.Last.Value, 3));
            //    //for (int i = 0; i < TrackDistances.Length; i++)
            //    //{
            //    //    if (Iteration >= TrackDistances[i])
            //    //    {
            //    //        writer.WriteLine("Distance-" + i + " :" + Math.Round(Distances[i][Distances[i].Count - 1], 10));
            //    //        writer.WriteLine("DistanceMidTerm-" + i + " :" + Math.Round(AverageChanges[i].Last.Value, 10));
            //    //        writer.WriteLine("DistanceLongTerm-" + i + " :" + Math.Round(AverageChangesLong[i].Last.Value, 10));
            //    //    }
            //    //}
            //    //if(Iteration % 10 == 0)
            //    {
            //        for (int i = 0; i < Weights; i++)
            //        {
            //            writer.WriteLine("Weights: " + WeightsInLine(weightHistories[i]));
            //        }
            //    }
            //    //writer.WriteLine();
            //}
        }
Exemple #19
0
 public IActionResult Post([FromBody] Losses losses)
 {
     _context.Add(losses);
     _context.SaveChanges();
     return(Ok(losses));
 }
Exemple #20
0
        public GetOEE OEE(string correctedDate)
        {
            GetOEE   objoee                 = new GetOEE();
            DateTime correctedDate1         = Convert.ToDateTime(correctedDate);
            double   TotalOperatingTime     = 0;
            double   TotalDownTime          = 0;
            double   TotalAcceptedQty       = 0;
            double   TotalRejectedQty       = 0;
            double   TotalPerformanceFactor = 0;
            int      MachineID              = 1;
            double   DayOperatingTime       = 0;
            double   DayDownTime            = 0;
            double   DayAcceptedQty         = 0;
            double   DayRejectedQty         = 0;
            double   DayPerformanceFactor   = 0;

            insertManMacProd(MachineID, correctedDate1);
            var GetMainLossList = db.tbllossescodes.Where(m => m.LossCodesLevel == 1 && m.IsDeleted == 0 && m.MessageType != "SETUP").OrderBy(m => m.LossCodeID).ToList();
            var prodplanchine   = db.tbl_ProdManMachine.Where(m => m.MachineID == MachineID && m.CorrectedDate == correctedDate1.Date).ToList();

            if (prodplanchine.Count > 0)
            {
                foreach (var ProdRow in prodplanchine)
                {
                    TotalOperatingTime += (double)ProdRow.TotalOperatingTime;
                    TotalDownTime      += (double)ProdRow.TotalLoss + (double)ProdRow.TotalMinorLoss + (double)ProdRow.TotalSetup;
                    //TotalAcceptedQty += ProdRow.tblworkorderentry.Yield_Qty;
                    //TotalRejectedQty += ProdRow.tblworkorderentry.ScrapQty;
                    //TotalPerformanceFactor += ProdRow.PerfromaceFactor;
                    //int TotalQty = ProdRow.tblworkorderentry.Yield_Qty + ProdRow.tblworkorderentry.ScrapQty;
                    //if (TotalQty == 0)
                    //    TotalQty = 1;
                    List <IDLELosses>      IdleLosslist   = new List <IDLELosses>();
                    List <BreakdownLosses> brkdwnlosslist = new List <BreakdownLosses>();
                    List <Losses>          losslist       = new List <Losses>();
                    //foreach (var LossRow in GetMainLossList)
                    //{
                    // var getWoLossList1 = db.tblmodes.Where(m => m.CorrectedDate == correctedDate1.Date && m.LossCodeID == LossRow.LossCodeID && m.IsCompleted==1 ).OrderBy(m=>new { m.ModeID,m.StartTime}).ToList();

                    var getmodes = db.tblmodes.Where(m => m.CorrectedDate == correctedDate1.Date && m.IsCompleted == 1 && m.ModeTypeEnd == 1 && m.IsPiWeb == 0).OrderBy(m => new { m.ModeID, m.StartTime }).ToList();

                    var     TotalLossDuration = getmodes.Where(m => m.ModeType == "IDLE").Sum(m => m.DurationInSec).ToString();
                    decimal TotalLossDura     = Convert.ToDecimal(TotalLossDuration);

                    var getIdleLosses          = getmodes.Where(m => m.ModeType == "IDLE").ToList();
                    var TotalBreakdownDuration = getmodes.Where(m => m.ModeType == "MNT").Sum(m => m.DurationInSec).ToString();
                    var getbrklosses           = getmodes.Where(m => m.ModeType == "MNT").ToList();
                    if (getIdleLosses.Count > 0)
                    {
                        decimal LossDuration = 0;
                        int     count        = 0;
                        foreach (var row in getmodes)
                        {
                            IDLELosses objloss = new IDLELosses();

                            //var lossrow = ;

                            if (row.LossCodeID != null)
                            {
                                count           += 1;
                                objloss.ID       = count;
                                objloss.LossID   = row.LossCodeID;
                                objloss.LossName = row.tbllossescode.LossCodeDesc;
                                LossDuration     = Convert.ToDecimal(row.DurationInSec);
                                decimal percent = ((LossDuration) / (TotalLossDura));
                                objloss.LossPercent = (double)((LossDuration / TotalLossDura) * 100);



                                IdleLosslist.Add(objloss);
                            }
                        }
                    }
                    if (getbrklosses.Count > 0)
                    {
                        int     count        = 0;
                        decimal LossDuration = 0;
                        foreach (var row in getbrklosses)
                        {
                            BreakdownLosses objloss = new BreakdownLosses();
                            if (row.LossCodeID != null)
                            {
                                count           += 1;
                                objloss.ID       = count;
                                objloss.LossID   = row.tbllossescode.LossCodeID;
                                objloss.LossName = row.tbllossescode.LossCodeDesc;
                                LossDuration     = Convert.ToDecimal(row.DurationInSec);
                                decimal percent = ((LossDuration) / (TotalLossDura));
                                objloss.LossPercent = (double)((LossDuration / Convert.ToInt32(TotalLossDuration)) * 100);
                                brkdwnlosslist.Add(objloss);
                            }
                        }
                    }

                    if (getmodes.Count > 0)
                    {
                        int     count        = 0;
                        decimal LossDuration = 0;
                        foreach (var row in getmodes)
                        {
                            Losses objloss = new Losses();
                            if (row.LossCodeID != null)
                            {
                                count           += 1;
                                objloss.ID       = count;
                                objloss.LossID   = row.tbllossescode.LossCodeID;
                                objloss.LossName = row.tbllossescode.LossCodeDesc;
                                LossDuration     = Convert.ToDecimal(row.DurationInSec);
                                objloss.Duration = (double)((LossDuration / Convert.ToInt32(TotalLossDuration)) * 100);
                                losslist.Add(objloss);
                            }
                        }
                    }

                    //}
                    List <IDLELosses>      IdleLosslistdist   = new List <IDLELosses>();
                    List <BreakdownLosses> brkdwnlosslistdist = new List <BreakdownLosses>();
                    List <Losses>          losslistdist       = new List <Losses>();
                    var IdleLosslist1 = IdleLosslist.Select(m => new { m.LossName, m.LossID }).Distinct().ToList();
                    foreach (var row in IdleLosslist1)
                    {
                        var lossrow = IdleLosslist.Where(m => m.LossName == row.LossName).OrderByDescending(m => m.LossPercent).FirstOrDefault();
                        IdleLosslistdist.Add(lossrow);
                    }

                    var brkLosslist1 = brkdwnlosslist.Select(m => new { m.LossName, m.LossID }).Distinct().ToList();
                    foreach (var row in brkLosslist1)
                    {
                        var lossrow = brkdwnlosslist.Where(m => m.LossName == row.LossName).OrderByDescending(m => m.LossPercent).FirstOrDefault();
                        brkdwnlosslistdist.Add(lossrow);
                    }

                    var losslistdist1 = losslist.Select(m => new { m.LossName, m.LossID }).Distinct().ToList();
                    foreach (var row in losslistdist1)
                    {
                        var lossrow = losslist.Where(m => m.LossName == row.LossName).OrderByDescending(m => m.Duration).FirstOrDefault();
                        losslistdist.Add(lossrow);
                    }

                    #region Commented
                    //IdleLosslist = IdleLosslist.OrderByDescending(m => m.LossPercent).Take(5).ToList();

                    //brkdwnlosslist = brkdwnlosslist.OrderBy(m => m.LossName).Distinct().ToList();
                    //brkdwnlosslist = brkdwnlosslist.OrderByDescending(m => m.LossPercent).Take(5).ToList();

                    //losslist = losslist.OrderBy(m => m.LossName).Distinct().ToList();
                    //losslist = losslist.OrderByDescending(m => m.Duration).Take(5).ToList();
                    #endregion

                    objoee.TopIDLELosses   = IdleLosslistdist.OrderByDescending(m => m.LossPercent).Take(5).ToList();
                    objoee.TopbrkdwnLosses = brkdwnlosslistdist.OrderByDescending(m => m.LossPercent).Take(5).ToList();
                    objoee.TopLosses       = losslistdist.OrderByDescending(m => m.Duration).Take(5).ToList();
                    DayOperatingTime      += (double)ProdRow.TotalOperatingTime;
                    DayDownTime           += (double)ProdRow.TotalLoss + (double)ProdRow.TotalMinorLoss;
                    //DayAcceptedQty += ProdRow.tblworkorderentry.Yield_Qty;
                    //DayRejectedQty += ProdRow.tblworkorderentry.ScrapQty;
                    DayPerformanceFactor += ProdRow.PerfromaceFactor;
                }
                int TotQty = (int)(DayAcceptedQty + DayRejectedQty);
                if (TotQty == 0)
                {
                    TotQty = 1;
                }
                double DayOpTime = DayOperatingTime;
                if (DayOperatingTime == 0)
                {
                    DayOpTime = 1;
                }
                decimal DayAvailPercent       = (decimal)Math.Round(DayOperatingTime / (24 * 1), 2);
                decimal DayPerformancePercent = (decimal)Math.Round(DayPerformanceFactor / DayOpTime, 2);
                decimal DayQualityPercent     = (decimal)Math.Round((DayAcceptedQty / (TotQty)), 2);
                decimal DayOEEPercent         = (decimal)Math.Round((double)(DayAvailPercent) * (double)(DayPerformancePercent) * (double)(DayQualityPercent), 2);
                objoee.Machinetimes = getModeTimings(MachineID, correctedDate1);

                if (DayOperatingTime == 0 && objoee.Machinetimes != null)
                {
                    DayOperatingTime = (Double)objoee.Machinetimes.RunningTime;
                }
                DayAvailPercent       = (decimal)Math.Round(DayOperatingTime / (24 * 1), 2);
                DayPerformancePercent = (decimal)Math.Round(DayPerformanceFactor / DayOpTime, 2);
                DayQualityPercent     = (decimal)Math.Round((DayAcceptedQty / (TotQty)), 2);
                DayOEEPercent         = (decimal)Math.Round((double)(DayAvailPercent) * (double)(DayPerformancePercent) * (double)(DayQualityPercent), 2);
                objoee.Availability   = Convert.ToInt32(DayAvailPercent);// AvailabilityPercentage
                objoee.Quality        = Convert.ToInt32(DayQualityPercent);
                objoee.Performance    = Convert.ToInt32(DayPerformancePercent);
                objoee.OEE            = Convert.ToInt32(DayOEEPercent);
            }
            objoee.Machinetimes = getModeTimings(MachineID, correctedDate1);
            return(objoee);
        }
        public void Run()
        {
            // Prepare data
            var baseDataDirectoryPath = @"E:\DataSets\Mnist";
            var trainFilePath         = Path.Combine(baseDataDirectoryPath, "Train-28x28_cntk_text.txt");
            var testFilePath          = Path.Combine(baseDataDirectoryPath, "Test-28x28_cntk_text.txt");

            // Define the input and output shape.
            var inputShape      = new int[] { 28, 28, 1 };
            var numberOfClasses = 10;
            var outputShape     = new int[] { numberOfClasses };

            // Define data type and device for the model.
            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            // Setup initializers
            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotNormal(random.Next());
            var biasInit = Initializers.Zero();

            // Ensure reproducible results with CNTK.
            CNTKLib.SetFixedRandomSeed((uint)random.Next());
            CNTKLib.ForceDeterministicAlgorithms();

            // Create the architecture.
            var input = Layers.Input(inputShape, dataType);
            // scale input between 0 and 1.
            var scaledInput = CNTKLib.ElementTimes(Constant.Scalar(0.00390625f, device), input);

            var network = scaledInput
                          .Conv2D((3, 3), 32, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .MaxPool2D((2, 2), (2, 2), Padding.None)

                          .Conv2D((3, 3), 32, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .MaxPool2D((2, 2), (2, 2), Padding.None)

                          .Conv2D((3, 3), 32, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()

                          .Dense(64, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .Dense(numberOfClasses, weightInit(), biasInit, device, dataType)
                          .Softmax();

            // Get input and target variables from network.
            var inputVariable  = network.Arguments[0];
            var targetVariable = Variable.InputVariable(outputShape, dataType);

            // setup loss and learner.
            var lossFunc   = Losses.CategoricalCrossEntropy(network.Output, targetVariable);
            var metricFunc = Metrics.Accuracy(network.Output, targetVariable);

            // setup trainer.
            var learner = Learners.RMSProp(network.Parameters());
            var trainer = Trainer.CreateTrainer(network, lossFunc, metricFunc, new List <Learner> {
                learner
            });

            // Create the network.
            var model = new Model(trainer, network, dataType, device);

            // Write model summary.
            Trace.WriteLine(model.Summary());

            // Setup minibatch sources.
            // Network will be trained using the training set,
            // and tested using the test set.

            // setup name to variable map.
            var nameToVariable = new Dictionary <string, Variable>
            {
                { "features", inputVariable },
                { "labels", targetVariable },
            };

            // The order of the training data is randomize.
            var train          = CreateMinibatchSource(trainFilePath, nameToVariable, randomize: true);
            var trainingSource = new CntkMinibatchSource(train, nameToVariable);

            // Notice randomization is switched off for test data.
            var test       = CreateMinibatchSource(testFilePath, nameToVariable, randomize: false);
            var testSource = new CntkMinibatchSource(test, nameToVariable);

            // Train the model using the training set.
            model.Fit(trainingSource, epochs: 5, batchSize: 64);

            // Evaluate the model using the test set.
            var(loss, metric) = model.Evaluate(testSource);

            // Write the test set loss and metric to debug output.
            Trace.WriteLine($"Test set - Loss: {loss}, Metric: {metric}");
        }
        public void Run()
        {
            // Prepare data
            var baseDataDirectoryPath = @"E:\DataSets\Imdb";
            var trainFilePath         = Path.Combine(baseDataDirectoryPath, "imdb_sparse_train_50w.txt");
            var testFilePath          = Path.Combine(baseDataDirectoryPath, "imdb_sparse_test_50w.txt");

            // Define the input and output shape.
            var inputShape      = new int[] { 129888 + 4 }; // Number distinct input words + offset for one-hot, sparse
            var numberOfClasses = 2;
            var outputShape     = new int[] { numberOfClasses };

            // Define data type and device for the model.
            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            // Setup initializers
            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotNormal(random.Next());
            var biasInit = Initializers.Zero();

            // Ensure reproducible results with CNTK.
            CNTKLib.SetFixedRandomSeed((uint)random.Next());
            CNTKLib.ForceDeterministicAlgorithms();

            // Create the architecture.
            var network = Layers.Input(inputShape, dataType, isSparse: true)
                          .Embedding(32, weightInit(), dataType, device)
                          .LSTMStack(32, 1, weightInit(), false, device, dataType)
                          .Dense(numberOfClasses, weightInit(), biasInit, device, dataType)
                          .Softmax();

            // Since we are processing sequence data,
            // wrap network in sequenceLast.
            network = CNTKLib.SequenceLast(network);

            // Get input and target variables from network.
            var inputVariable  = network.Arguments[0];
            var targetVariable = Variable.InputVariable(outputShape, dataType,
                                                        dynamicAxes: new List <Axis>()
            {
                Axis.DefaultBatchAxis()
            },
                                                        isSparse: false);

            // setup loss and learner.
            var lossFunc   = Losses.CategoricalCrossEntropy(network.Output, targetVariable);
            var metricFunc = Metrics.Accuracy(network.Output, targetVariable);

            // setup trainer.
            var learner = Learners.Adam(network.Parameters());
            var trainer = Trainer.CreateTrainer(network, lossFunc, metricFunc, new List <Learner> {
                learner
            });

            // Create the network.
            var model = new Model(trainer, network, dataType, device);

            // Write model summary.
            Trace.WriteLine(model.Summary());

            // Setup minibatch sources.
            // Network will be trained using the training set,
            // and tested using the test set.
            var featuresName = "x";
            var targetsName  = "y";

            // setup name to variable map.
            var nameToVariable = new Dictionary <string, Variable>
            {
                { featuresName, inputVariable },
                { targetsName, targetVariable },
            };

            // The order of the training data is randomize.
            var train = CreateMinibatchSource(trainFilePath, featuresName, targetsName,
                                              numberOfClasses, inputShape, randomize: true);
            var trainingSource = new CntkMinibatchSource(train, nameToVariable);

            // Notice randomization is switched off for test data.
            var test = CreateMinibatchSource(testFilePath, featuresName, targetsName,
                                             numberOfClasses, inputShape, randomize: false);
            var testSource = new CntkMinibatchSource(test, nameToVariable);

            // Train the model using the training set.
            var history = model.Fit(trainingSource, epochs: 100, batchSize: 512,
                                    validationMinibatchSource: testSource);

            // Trace loss and validation history
            TraceLossValidationHistory(history);

            // Evaluate the model using the test set.
            var(loss, metric) = model.Evaluate(testSource);

            // Write the test set loss and metric to debug output.
            Trace.WriteLine($"Test set - Loss: {loss}, Metric: {metric}");

            // Write first ten predictions
            var predictions = model.Predict(testSource)
                              .Take(10);

            // Use tensor data directly, since only 1 element pr. sample.
            Trace.WriteLine($"Predictions: [{string.Join(", ", predictions.Select(p => p.First()))}]");
        }
Exemple #23
0
        public async Task DeleteLoss(Losses loss)
        {
            _context.Losses.Remove(loss);

            await _context.SaveChangesAsync();
        }
Exemple #24
0
        public Dictionary <string, List <double> > Train(object trainData, object validationData, int epoches, int batchSize, On_Epoch_Start OnEpochStart, On_Epoch_End OnEpochEnd, On_Batch_Start onBatchStart, On_Batch_End OnBatchEnd, bool shuffle = false)
        {
            ImageDataGenerator train      = (ImageDataGenerator)trainData;
            ImageDataGenerator validation = validationData != null ? (ImageDataGenerator)validationData : null;
            Dictionary <string, List <double> > result = new Dictionary <string, List <double> >();
            var trainer      = Trainer.CreateTrainer(Model, lossFunc, metricFunc, learners);
            int currentEpoch = 1;
            Dictionary <string, double> metricsList = new Dictionary <string, double>();
            int imageSize  = featureVariable.Shape.Rank == 1 ? featureVariable.Shape[0] : featureVariable.Shape[0] * featureVariable.Shape[1] * featureVariable.Shape[2];
            int numClasses = labelVariable.Shape[0];
            IList <StreamConfiguration> streamConfigurations = new StreamConfiguration[] { new StreamConfiguration("features", imageSize), new StreamConfiguration("labels", numClasses) };

            if (train.GenType == ImageGenType.FromTextFile)
            {
                train.LoadTextData(featureVariable, labelVariable);
                if (validation != null)
                {
                    validation.LoadTextData(featureVariable, labelVariable);
                }
            }

            while (currentEpoch <= epoches)
            {
                metricsList.Clear();
                OnEpochStart(currentEpoch);
                int           miniBatchCount  = 1;
                List <double> miniBatchLosses = new List <double>();
                while (!train.NextBatch(batchSize))
                {
                    onBatchStart(currentEpoch, miniBatchCount);
                    trainer.TrainMinibatch(new Dictionary <Variable, Value> {
                        { featureVariable, train.CurrentBatchX }, { labelVariable, train.CurrentBatchY }
                    }, true, GlobalParameters.Device);
                    OnBatchEnd(currentEpoch, miniBatchCount, trainer.TotalNumberOfSamplesSeen(), trainer.PreviousMinibatchLossAverage(), new Dictionary <string, double>()
                    {
                        { metricName, trainer.PreviousMinibatchEvaluationAverage() }
                    });
                    miniBatchLosses.Add(trainer.PreviousMinibatchLossAverage());
                    miniBatchCount++;
                }

                if (!result.ContainsKey("loss"))
                {
                    result.Add("loss", new List <double>());
                }

                if (!result.ContainsKey(metricName))
                {
                    result.Add(metricName, new List <double>());
                }

                double lossValue   = miniBatchLosses.Average();
                double metricValue = trainer.PreviousMinibatchEvaluationAverage();
                result["loss"].Add(lossValue);
                result[metricName].Add(metricValue);
                metricsList.Add(metricName, metricValue);
                if (validation != null)
                {
                    if (!result.ContainsKey("val_loss"))
                    {
                        result.Add("val_loss", new List <double>());
                    }

                    if (!result.ContainsKey("val_" + metricName))
                    {
                        result.Add("val_" + metricName, new List <double>());
                    }

                    List <double> totalEvalBatchLossList   = new List <double>();
                    List <double> totalEvalMetricValueList = new List <double>();
                    while (validation.NextBatch(batchSize))
                    {
                        Variable actualVariable = CNTKLib.InputVariable(labelVariable.Shape, DataType.Float);
                        var      evalLossFunc   = Losses.Get(lossName, labelVariable, actualVariable);
                        var      evalMetricFunc = Metrics.Get(metricName, labelVariable, actualVariable);
                        Value    actual         = EvaluateInternal(validation.CurrentBatchX);
                        Value    expected       = validation.CurrentBatchY;
                        var      inputDataMap   = new Dictionary <Variable, Value>()
                        {
                            { labelVariable, expected }, { actualVariable, actual }
                        };
                        var outputDataMap = new Dictionary <Variable, Value>()
                        {
                            { evalLossFunc.Output, null }
                        };

                        evalLossFunc.Evaluate(inputDataMap, outputDataMap, GlobalParameters.Device);
                        var evalLoss = outputDataMap[evalLossFunc.Output].GetDenseData <float>(evalLossFunc.Output).Select(x => x.First()).ToList();
                        totalEvalBatchLossList.Add(evalLoss.Average());

                        inputDataMap = new Dictionary <Variable, Value>()
                        {
                            { labelVariable, expected }, { actualVariable, actual }
                        };
                        outputDataMap = new Dictionary <Variable, Value>()
                        {
                            { evalMetricFunc.Output, null }
                        };
                        evalMetricFunc.Evaluate(inputDataMap, outputDataMap, GlobalParameters.Device);
                        var evalMetric = outputDataMap[evalMetricFunc.Output].GetDenseData <float>(evalMetricFunc.Output).Select(x => x.First()).ToList();
                        totalEvalMetricValueList.Add(evalMetric.Average());
                    }

                    result["val_loss"].Add(totalEvalBatchLossList.Average());
                    metricsList.Add("val_loss", totalEvalBatchLossList.Average());
                    result["val_" + metricName].Add(totalEvalMetricValueList.Average());
                    metricsList.Add("val_" + metricName, totalEvalMetricValueList.Average());
                }

                OnEpochEnd(currentEpoch, trainer.TotalNumberOfSamplesSeen(), lossValue, metricsList);
                currentEpoch++;
            }

            return(result);
        }
Exemple #25
0
        public void OkDialog()
        {
            var helper = new MessageBoxHelper(this);

            string name;

            if (!helper.ValidateNameTextBox(_editing ? (Control)textName : comboMod, out name))
            {
                return;
            }

            // Allow updating the original modification
            if (!_editing || !Equals(name, Modification.Name))
            {
                if (!ModNameAvailable(name))
                {
                    helper.ShowTextBoxError(_editing ? (Control)textName : comboMod,
                                            Resources.EditStaticModDlg_OkDialog_The_modification__0__already_exists, name);
                    return;
                }
            }

            string aas = comboAA.Text;

            if (string.IsNullOrEmpty(aas))
            {
                aas = null;
            }
            else
            {
                // Use the cleanest possible format.
                var sb = new StringBuilder();
                foreach (string aaPart in aas.Split(SEPARATOR_AA))
                {
                    string aa = aaPart.Trim();
                    if (aa.Length == 0)
                    {
                        continue;
                    }
                    if (sb.Length > 0)
                    {
                        sb.Append(", "); // Not L10N
                    }
                    sb.Append(aa);
                }
            }

            string      termString = comboTerm.SelectedItem.ToString();
            ModTerminus?term       = null;

            if (!string.IsNullOrEmpty(termString))
            {
                term = (ModTerminus)Enum.Parse(typeof(ModTerminus), termString);
            }

            if (cbVariableMod.Checked && aas == null && term == null)
            {
                MessageDlg.Show(this, Resources.EditStaticModDlg_OkDialog_Variable_modifications_must_specify_amino_acid_or_terminus);
                comboAA.Focus();
                return;
            }

            string     formula    = null;
            double?    monoMass   = null;
            double?    avgMass    = null;
            LabelAtoms labelAtoms = LabelAtoms.None;

            if (cbChemicalFormula.Checked)
            {
                formula = Formula;
            }
            else
            {
                labelAtoms = LabelAtoms;
            }

            // Get the losses to know whether any exist below
            IList <FragmentLoss> losses = null;

            if (listNeutralLosses.Items.Count > 0)
            {
                losses = Losses.ToArray();
            }

            if (!string.IsNullOrEmpty(formula))
            {
                try
                {
                    SequenceMassCalc.FormulaMass(BioMassCalc.MONOISOTOPIC, formula, SequenceMassCalc.MassPrecision);
                }
                catch (ArgumentException x)
                {
                    _formulaBox.ShowTextBoxErrorFormula(helper, x.Message);
                    return;
                }
            }
            else if (labelAtoms == LabelAtoms.None)
            {
                formula = null;

                // Allow formula and both masses to be empty, if losses are present
                if (NotZero(_formulaBox.MonoMass) || NotZero(_formulaBox.AverageMass) || losses == null)
                {
                    // TODO: Maximum and minimum masses should be formalized and applied everywhere
                    double mass;
                    if (!_formulaBox.ValidateMonoText(helper, -1500, 5000, out mass))
                    {
                        return;
                    }
                    monoMass = mass;
                    if (!_formulaBox.ValidateAverageText(helper, -1500, 5000, out mass))
                    {
                        return;
                    }
                    avgMass = mass;
                }
                // Loss-only modifications may not be variable
                else if (cbVariableMod.Checked)
                {
                    MessageDlg.Show(this, Resources.EditStaticModDlg_OkDialog_The_variable_checkbox_only_applies_to_precursor_modification_Product_ion_losses_are_inherently_variable);
                    cbVariableMod.Focus();
                    return;
                }
            }
            else if (aas == null && term.HasValue)
            {
                MessageDlg.Show(this, Resources.EditStaticModDlg_OkDialog_Labeled_atoms_on_terminal_modification_are_not_valid);
                return;
            }

            RelativeRT relativeRT = RelativeRT.Matching;

            if (comboRelativeRT.Visible && comboRelativeRT.SelectedItem != null)
            {
                relativeRT = RelativeRTExtension.GetEnum(comboRelativeRT.SelectedItem.ToString());
            }

            // Store state of the chemical formula checkbox for next use.
            if (cbChemicalFormula.Visible)
            {
                Settings.Default.ShowHeavyFormula = _formulaBox.FormulaVisible;
            }

            var newMod = new StaticMod(name,
                                       aas,
                                       term,
                                       cbVariableMod.Checked,
                                       formula,
                                       labelAtoms,
                                       relativeRT,
                                       monoMass,
                                       avgMass,
                                       losses);

            foreach (StaticMod mod in _existing)
            {
                if (newMod.Equivalent(mod) && !(_editing && mod.Equals(_originalModification)))
                {
                    if (DialogResult.OK == MultiButtonMsgDlg.Show(
                            this,
                            TextUtil.LineSeparate(Resources.EditStaticModDlg_OkDialog_There_is_an_existing_modification_with_the_same_settings,
                                                  string.Format("'{0}'.", mod.Name), // Not L10N
                                                  string.Empty,
                                                  Resources.EditStaticModDlg_OkDialog_Continue),
                            MultiButtonMsgDlg.BUTTON_OK))
                    {
                        Modification = newMod;
                        DialogResult = DialogResult.OK;
                    }
                    return;
                }
            }

            var uniMod = UniMod.GetModification(name, IsStructural);

            // If the modification name is not found in Unimod, check if there exists a modification in Unimod that matches
            // the dialog modification, and prompt the user to to use the Unimod modification instead.
            if (uniMod == null)
            {
                var matchingMod = UniMod.FindMatchingStaticMod(newMod, IsStructural);
                if (matchingMod != null &&
                    (ModNameAvailable(matchingMod.Name) ||
                     (_editing && Equals(matchingMod.Name, Modification.Name))))
                {
                    var result = MultiButtonMsgDlg.Show(
                        this,
                        TextUtil.LineSeparate(Resources.EditStaticModDlg_OkDialog_There_is_a_Unimod_modification_with_the_same_settings,
                                              string.Empty,
                                              string.Format(Resources.EditStaticModDlg_OkDialog_Click__Unimod__to_use_the_name___0___, matchingMod.Name),
                                              string.Format(Resources.EditStaticModDlg_OkDialog_Click__Custom__to_use_the_name___0___, name)),
                        Resources.EditStaticModDlg_OkDialog_Unimod,
                        Resources.EditStaticModDlg_OkDialog_Custom,
                        true);
                    if (result == DialogResult.Yes)
                    {
                        newMod = matchingMod.MatchVariableAndLossInclusion(newMod);   // Unimod
                    }
                    if (result == DialogResult.Cancel)
                    {
                        return;
                    }
                }
            }
            else
            {
                // If the dialog modification matches the modification of the same name in Unimod,
                // use the UnimodId.
                if (newMod.Equivalent(uniMod))
                {
                    newMod = uniMod.MatchVariableAndLossInclusion(newMod);
                }
                else
                {
                    // Finally, if the modification name is found in Unimod, but the modification in Unimod does not
                    // match the dialog modification, prompt the user to use the Unimod modification definition instead.
                    if (DialogResult.OK != MultiButtonMsgDlg.Show(
                            this,
                            TextUtil.LineSeparate(string.Format(Resources.EditStaticModDlg_OkDialog_This_modification_does_not_match_the_Unimod_specifications_for___0___, name),
                                                  string.Empty,
                                                  Resources.EditStaticModDlg_OkDialog_Use_non_standard_settings_for_this_name),
                            MultiButtonMsgDlg.BUTTON_OK))
                    {
                        return;
                    }
                }
            }

            _modification = newMod;

            DialogResult = DialogResult.OK;
        }
Exemple #26
0
        //the html that displays the clan card used in the pop down menu's and header of searched clan
        public override string ToString()
        {
            string returnString = "<div class=\"card card-group\"> <div class=\"container-fluid d-inline-flex\"> <div class=\"col-2\"> <p><b>Name:</b>" + Name + "</p> <p><b>Tag:</b>" + Tag + "</p> <p><b>Level:</b>" + ExpLevel.ToString() + "</p>";

            if (ExpLevel == 13)
            {
                returnString += "<p><b>Star Points:</b>" + StarPoints.ToString() + "</p>";
            }

            returnString += "<p><b>Current Tropies:</b>" + Trophies.ToString() + "</p><p><b>Highest Trophies:</b>" + BestTrophies + "</p></div>";

            returnString += "<div class=\"col-2\"><p><b>All Time Wins:</b>" + Wins.ToString() + "</p><p><b>All Time Losses:</b>" + Losses.ToString() + "</p>";
            Console.WriteLine();
            returnString += "<p><b>Current Favorite Card:</b>" + CurrentFavouriteCard.Name + "</p><img class=\"text-center\" src=\"" + CurrentFavouriteCard.Url + "\" width=\"64px\" />";
            returnString += "<p><b>Cards Discovered:</b>" + CardsDiscovered.ToString() + "/" + CardsInGame.ToString() + "</p></div>";

            returnString += "<div class=\"col-3\">";
            if (Clan != null)
            {
                returnString += "<p><b>Clan Name:</b>" + Clan.Name + "</p>";
            }
            if (Clan != null)
            {
                returnString += "<p><b>Clan Tag:</b>" + ClanTag + "</p>";
            }
            else
            {
                returnString += "<h2 class=\"text-center m-2\">Not In a Clan</h2>";
            }

            returnString += "<p><b>Recent Donations:</b>" + Donations.ToString() + "</p><p><b>Recent Donations Recieved:</b>" + DonationsReceived.ToString() + "</p>";
            returnString += "<p><b>Total Donations:</b>" + TotalDonations.ToString() + "</p><p><b>Total Donations Recieved:</b>" + ClanCardsCollected.ToString() + "</p></div>";


            returnString += "<div class=\"col-5 m-0\"><div class=\"text-center\"><p><b>Current Deck</b></p> " + Deck.ToString() + "<p>Profile Updated:" + UpdateTime + "</p></div></div></div></div>";

            return(returnString);
        }
Exemple #27
0
        //[TestMethod]
        public void Fitter_Loop()
        {
            var inputShape      = new int[] { 28, 28, 1 };
            var numberOfClasses = 10;
            var outputShape     = new int[] { numberOfClasses };

            (var observations, var targets) = CreateArtificialData(inputShape, outputShape, observationCount: 10000);

            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotNormal(random.Next());
            var biasInit = Initializers.Zero();

            // Create the architecture.
            var network = Layers.Input(inputShape, dataType)
                          .Dense(512, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .Dense(numberOfClasses, weightInit(), biasInit, device, dataType)
                          .Softmax();

            // setup input and target variables.
            var inputVariable  = network.Arguments[0];
            var targetVariable = Variable.InputVariable(network.Output.Shape, dataType);

            // loss
            var loss   = Losses.CategoricalCrossEntropy(network.Output, targetVariable);
            var metric = Metrics.Accuracy(network.Output, targetVariable);

            // setup trainer.
            var learner = Learners.MomentumSGD(network.Parameters());
            var trainer = CNTKLib.CreateTrainer(network, loss, metric, new LearnerVector {
                learner
            });

            // data names
            var observationsName = "observations";
            var targetsName      = "targets";

            // setup name to variable map.
            var nameToVariable = new Dictionary <string, Variable>
            {
                { observationsName, inputVariable },
                { targetsName, targetVariable },
            };

            // setup name to data map.
            var nameToData = new Dictionary <string, MemoryMinibatchData>
            {
                { observationsName, observations },
                { targetsName, targets }
            };

            var minibatchSource = new MemoryMinibatchSource(nameToVariable, nameToData, seed: 232, randomize: true);

            // setup Fitter
            var fitter = new Fitter(trainer, device);

            // variables for training loop.
            var inputMap = new Dictionary <Variable, Value>();

            var epochs    = 10;
            int batchSize = 32;

            for (int epoch = 0; epoch < epochs;)
            {
                var(minibatch, isSweepEnd) = minibatchSource.GetNextMinibatch(batchSize, device);
                fitter.FitNextStep(minibatch, batchSize);

                if (isSweepEnd)
                {
                    var currentLoss   = fitter.CurrentLoss;
                    var currentMetric = fitter.CurrentMetric;
                    fitter.ResetLossAndMetricAccumulators();

                    var traceOutput = $"Epoch: {epoch + 1:000} Loss = {currentLoss:F8}, Metric = {currentMetric:F8}";

                    ++epoch;

                    Trace.WriteLine(traceOutput);
                }
            }
        }
Exemple #28
0
        public void Run()
        {
            // Prepare data
            var baseDataDirectoryPath = @"E:\DataSets\CatsAndDogs";
            var mapFiles = PrepareMapFiles(baseDataDirectoryPath);

            // Define the input and output shape.
            var inputShape      = new int[] { 150, 150, 3 };
            var numberOfClasses = 2;
            var outputShape     = new int[] { numberOfClasses };

            // Define data type and device for the model.
            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            // Setup initializers
            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotNormal(random.Next());
            var biasInit = Initializers.Zero();

            // Ensure reproducible results with CNTK.
            CNTKLib.SetFixedRandomSeed((uint)random.Next());
            CNTKLib.ForceDeterministicAlgorithms();

            // Create the architecture.
            var network = Layers.Input(inputShape, dataType)
                          .Conv2D((3, 3), 32, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .MaxPool2D((2, 2), (2, 2), Padding.None)

                          .Conv2D((3, 3), 64, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .MaxPool2D((2, 2), (2, 2), Padding.None)

                          .Conv2D((3, 3), 128, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .MaxPool2D((2, 2), (2, 2), Padding.None)

                          .Conv2D((3, 3), 128, (1, 1), Padding.None, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .MaxPool2D((2, 2), (2, 2), Padding.None)

                          .Dense(512, weightInit(), biasInit, device, dataType)
                          .ReLU()
                          .Dense(numberOfClasses, weightInit(), biasInit, device, dataType)
                          .Softmax();

            // Get input and target variables from network.
            var inputVariable  = network.Arguments[0];
            var targetVariable = Variable.InputVariable(outputShape, dataType);

            // setup loss and learner.
            var lossFunc   = Losses.CategoricalCrossEntropy(network.Output, targetVariable);
            var metricFunc = Metrics.Accuracy(network.Output, targetVariable);

            // setup trainer.
            var learner = Learners.RMSProp(network.Parameters());
            var trainer = Trainer.CreateTrainer(network, lossFunc, metricFunc, new List <Learner> {
                learner
            });

            // Create the network.
            var model = new Model(trainer, network, dataType, device);

            // Write model summary.
            Trace.WriteLine(model.Summary());

            // Setup minibatch sources.
            var featuresName = "features";
            var targetsName  = "targets";

            // setup name to variable map.
            var nameToVariable = new Dictionary <string, Variable>
            {
                { featuresName, inputVariable },
                { targetsName, targetVariable },
            };
            var train = CreateMinibatchSource(mapFiles.trainFilePath, featuresName, targetsName,
                                              numberOfClasses, inputShape, augmentation: true);
            var trainingSource = new CntkMinibatchSource(train, nameToVariable);

            // Notice augmentation is switched off for validation data.
            var valid = CreateMinibatchSource(mapFiles.validFilePath, featuresName, targetsName,
                                              numberOfClasses, inputShape, augmentation: false);
            var validationSource = new CntkMinibatchSource(valid, nameToVariable);

            // Notice augmentation is switched off for test data.
            var test = CreateMinibatchSource(mapFiles.testFilePath, featuresName, targetsName,
                                             numberOfClasses, inputShape, augmentation: false);
            var testSource = new CntkMinibatchSource(test, nameToVariable);

            // Train the model using the training set.
            model.Fit(trainMinibatchSource: trainingSource,
                      epochs: 100, batchSize: 32,
                      validationMinibatchSource: validationSource);

            // Evaluate the model using the test set.
            var(loss, metric) = model.Evaluate(testSource);

            // Write the test set loss and metric to debug output.
            Trace.WriteLine($"Test set - Loss: {loss}, Metric: {metric}");

            // Save model.
            model.Network.Save("cats_and_dogs_small_2.cntk");
        }
Exemple #29
0
        public Dictionary <string, List <double> > Train(object trainData, object validationData, int epoches, int batchSize, On_Epoch_Start OnEpochStart, On_Epoch_End OnEpochEnd, On_Batch_Start onBatchStart, On_Batch_End OnBatchEnd, bool shuffle = false)
        {
            XYFrame train      = (XYFrame)trainData;
            XYFrame validation = validationData != null ? (XYFrame)validationData : null;
            Dictionary <string, List <double> > result = new Dictionary <string, List <double> >();
            var trainer      = Trainer.CreateTrainer(Model, lossFunc, metricFunc, learners);
            int currentEpoch = 1;
            Dictionary <string, double> metricsList = new Dictionary <string, double>();

            while (currentEpoch <= epoches)
            {
                if (shuffle)
                {
                    train.Shuffle();
                }

                metricsList = new Dictionary <string, double>();
                OnEpochStart(currentEpoch);
                int miniBatchCount = 1;
                while (train.NextBatch(miniBatchCount, batchSize))
                {
                    onBatchStart(currentEpoch, miniBatchCount);
                    Value features = DataFrameUtil.GetValueBatch(train.CurrentBatch.XFrame);
                    Value labels   = DataFrameUtil.GetValueBatch(train.CurrentBatch.YFrame);

                    trainer.TrainMinibatch(new Dictionary <Variable, Value>()
                    {
                        { featureVariable, features }, { labelVariable, labels }
                    }, GlobalParameters.Device);
                    OnBatchEnd(currentEpoch, miniBatchCount, trainer.TotalNumberOfSamplesSeen(), trainer.PreviousMinibatchLossAverage(), new Dictionary <string, double>()
                    {
                        { metricName, trainer.PreviousMinibatchEvaluationAverage() }
                    });
                    miniBatchCount++;
                }

                if (!result.ContainsKey("loss"))
                {
                    result.Add("loss", new List <double>());
                }

                if (!result.ContainsKey(metricName))
                {
                    result.Add(metricName, new List <double>());
                }

                double lossValue   = trainer.PreviousMinibatchLossAverage();
                double metricValue = trainer.PreviousMinibatchEvaluationAverage();
                result["loss"].Add(lossValue);
                result[metricName].Add(metricValue);
                metricsList.Add(metricName, metricValue);
                if (validation != null)
                {
                    if (!result.ContainsKey("val_loss"))
                    {
                        result.Add("val_loss", new List <double>());
                    }

                    if (!result.ContainsKey("val_" + metricName))
                    {
                        result.Add("val_" + metricName, new List <double>());
                    }

                    int           evalMiniBatchCount       = 1;
                    List <double> totalEvalBatchLossList   = new List <double>();
                    List <double> totalEvalMetricValueList = new List <double>();
                    while (validation.NextBatch(evalMiniBatchCount, batchSize))
                    {
                        Variable actualVariable = CNTKLib.InputVariable(labelVariable.Shape, DataType.Float);
                        var      evalLossFunc   = Losses.Get(lossName, labelVariable, actualVariable);
                        var      evalMetricFunc = Metrics.Get(metricName, labelVariable, actualVariable);
                        Value    actual         = EvaluateInternal(validation.CurrentBatch.XFrame);
                        Value    expected       = DataFrameUtil.GetValueBatch(validation.CurrentBatch.YFrame);
                        var      inputDataMap   = new Dictionary <Variable, Value>()
                        {
                            { labelVariable, expected }, { actualVariable, actual }
                        };
                        var outputDataMap = new Dictionary <Variable, Value>()
                        {
                            { evalLossFunc.Output, null }
                        };

                        evalLossFunc.Evaluate(inputDataMap, outputDataMap, GlobalParameters.Device);
                        var evalLoss = outputDataMap[evalLossFunc.Output].GetDenseData <float>(evalLossFunc.Output).Select(x => x.First()).ToList();
                        totalEvalBatchLossList.Add(evalLoss.Average());

                        inputDataMap = new Dictionary <Variable, Value>()
                        {
                            { labelVariable, expected }, { actualVariable, actual }
                        };
                        outputDataMap = new Dictionary <Variable, Value>()
                        {
                            { evalMetricFunc.Output, null }
                        };
                        evalMetricFunc.Evaluate(inputDataMap, outputDataMap, GlobalParameters.Device);
                        var evalMetric = outputDataMap[evalMetricFunc.Output].GetDenseData <float>(evalMetricFunc.Output).Select(x => x.First()).ToList();
                        totalEvalMetricValueList.Add(evalMetric.Average());

                        evalMiniBatchCount++;
                    }

                    result["val_loss"].Add(totalEvalBatchLossList.Average());
                    metricsList.Add("val_loss", totalEvalBatchLossList.Average());
                    result["val_" + metricName].Add(totalEvalMetricValueList.Average());
                    metricsList.Add("val_" + metricName, totalEvalMetricValueList.Average());
                }

                OnEpochEnd(currentEpoch, trainer.TotalNumberOfSamplesSeen(), lossValue, metricsList);
                currentEpoch++;
            }

            return(result);
        }
        public void Run()
        {
            // Prepare data
            var baseDataDirectoryPath = @"E:\DataSets\Mnist";
            var trainFilePath         = Path.Combine(baseDataDirectoryPath, "Train-28x28_cntk_text.txt");

            // Define data type and device for the model.
            var dataType = DataType.Float;
            var device   = DeviceDescriptor.UseDefaultDevice();

            // Setup initializers
            var random = new Random(232);
            Func <CNTKDictionary> weightInit = () => Initializers.GlorotUniform(random.Next());
            var biasInit = Initializers.Zero();

            // Ensure reproducible results with CNTK.
            CNTKLib.SetFixedRandomSeed((uint)random.Next());
            CNTKLib.ForceDeterministicAlgorithms();

            // Setup input dimensions and variables.
            var inputShape          = NDShape.CreateNDShape(new int[] { 28, 28, 1 });
            var InputVariable       = Variable.InputVariable(inputShape, dataType);
            var scaledInputVariable = CNTKLib.ElementTimes(Constant.Scalar(0.00390625f, device), InputVariable);

            const int latentSpaceSize = 2;

            // Setup the encoder, this encodes the input into a mean and variance parameter.
            var(mean, logVariance) = Encoder(scaledInputVariable, latentSpaceSize, weightInit, biasInit, device, dataType);

            // add latent space sampling. This will draw a latent point using a small random epsilon.
            var epsilon            = CNTKLib.NormalRandom(new int[] { latentSpaceSize }, dataType);
            var latentSpaceSampler = CNTKLib.Plus(mean, CNTKLib.ElementTimes(CNTKLib.Exp(logVariance), epsilon));

            // add decoder, this decodes from latent space back to an image.
            var encoderDecoderNetwork = Decoder(latentSpaceSampler, weightInit, biasInit, device, dataType);

            // Create minibatch source for providing the input images.
            var nameToVariable = new Dictionary <string, Variable> {
                { "features", encoderDecoderNetwork.Arguments[0] }
            };
            var minibatchSource = CreateMinibatchSource(trainFilePath, nameToVariable, randomize: true);

            // Reconstruction loss. Forces the decoded samples to match the initial inputs.
            var reconstructionLoss = Losses.BinaryCrossEntropy(encoderDecoderNetwork.Output, scaledInputVariable);

            // Regularization loss. Helps in learning well-formed latent spaces and reducing overfitting to the training data.
            var regularizationLoss = RegularizationLoss(mean, logVariance, dataType);

            // Overall loss function. Sum of reconstruction- and regularization loss.
            var loss = CNTKLib.Plus(reconstructionLoss, regularizationLoss);

            // Setup trainer.
            var learner = Learners.Adam(encoderDecoderNetwork.Parameters(), learningRate: 0.001, momentum: 0.9);
            var trainer = Trainer.CreateTrainer(encoderDecoderNetwork, loss, loss, new List <Learner> {
                learner
            });
            // Create model.
            var model = new Model(trainer, encoderDecoderNetwork, dataType, device);

            Trace.WriteLine(model.Summary());

            // Train the model.
            model.Fit(minibatchSource, batchSize: 16, epochs: 10);

            //// Use the decoder to sample 15x15 images across the latent space.

            // Image generation only requires use of the decoder part of the network.
            // Clone and replace the latentSpaceSampler from training with a new decoder input variable.
            var decoderInputVariable = Variable.InputVariable(latentSpaceSampler.Output.Shape, dataType);
            var replacements         = new Dictionary <Variable, Variable>()
            {
                { latentSpaceSampler, decoderInputVariable }
            };
            var decoder = encoderDecoderNetwork.Clone(ParameterCloningMethod.Freeze, replacements);

            // Sample 15x15 samples from the latent space
            var minibatch = SampleMinibatchForGrid(device, decoderInputVariable, gridSize: 15);

            // Transform from points in latent space to images using the decoder.
            var predictor  = new Predictor(decoder, device);
            var images     = predictor.PredictNextStep(minibatch);
            var imagesData = images.SelectMany(t => t).ToArray();

            // Show examples.
            var app    = new Application();
            var window = new PlotWindowBitMap("Generated Images", imagesData, 28, 28, 1, true);

            window.Show();
            app.Run(window);
        }