public ActionResult CreateSamples()
        {
            var consolidator = new Consolidator();

            consolidator.CreateSampleData();
            return(View());
        }
Esempio n. 2
0
        public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func <int, bool> predicate, int n, IRandom rand = null)
        {
            ComputeNearestNeighbors();
            _host.AssertValue(_input, "_input");
            var schema = _input.Schema;

            if (predicate(_input.Schema.ColumnCount))
            {
                int featureIndex;
                if (!schema.TryGetColumnIndex(_args.column, out featureIndex))
                {
                    throw _host.Except("Unable to find column '{0}'.", _args.column);
                }

                var res = _input.GetRowCursorSet(out consolidator, predicate, n, rand)
                          .Select(c => new NearestNeighborsCursor(c, this, i => PredicatePropagation(i, featureIndex, predicate), featureIndex)).ToArray();
                consolidator = new Consolidator();
                return(res);
            }
            else
            {
                // The new column is not required. We do not need to compute it. But we need to keep the same schema.
                return(_input.GetRowCursorSet(out consolidator, predicate, n, rand)
                       .Select(c => new SameCursor(c, Schema))
                       .ToArray());
            }
        }
            public static IRowCursor[] CreateSet(out IRowCursorConsolidator consolidator,
                                                 TextLoader parent, IMultiStreamSource files, bool[] active, int n)
            {
                // Note that files is allowed to be empty.
                Contracts.AssertValue(parent);
                Contracts.AssertValue(files);
                Contracts.Assert(active == null || active.Length == parent._bindings.Infos.Length);

                int srcNeeded;
                int cthd;

                SetupCursor(parent, active, n, out srcNeeded, out cthd);
                Contracts.Assert(cthd > 0);

                var reader = new LineReader(files, BatchSize, 100, parent.HasHeader, parent._maxRows, cthd);
                var stats  = new ParseStats(parent._host, cthd);

                if (cthd <= 1)
                {
                    consolidator = null;
                    return(new IRowCursor[1] {
                        new Cursor(parent, stats, active, reader, srcNeeded, 1)
                    });
                }

                consolidator = new Consolidator(cthd);
                var cursors = new IRowCursor[cthd];

                try
                {
                    for (int i = 0; i < cursors.Length; i++)
                    {
                        cursors[i] = new Cursor(parent, stats, active, reader, srcNeeded, 1);
                    }
                    var result = cursors;
                    cursors = null;
                    return(result);
                }
                finally
                {
                    if (cursors != null)
                    {
                        foreach (var curs in cursors)
                        {
                            if (curs != null)
                            {
                                curs.Dispose();
                            }
                            else
                            {
                                reader.Release();
                                stats.Release();
                            }
                        }
                    }
                }
            }
        public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func <int, bool> predicate, int n, IRandom rand = null)
        {
            ComputeStatistics();
            _host.AssertValue(_input, "_input");
            var cursors = _input.GetRowCursorSet(out consolidator, i => PredicatePropagation(i, predicate), n, rand);
            var res     = cursors.Select(c => new ScalerCursor(c, this, i => PredicatePropagation(i, predicate))).ToArray();

            consolidator = new Consolidator();
            return(res);
        }
        public ActionResult WorkSchedule()
        {
            int         userId       = 1004;
            DateTime    startTime    = DateTime.Parse("2017-06-26 00:00:00");
            DateTime    endTime      = DateTime.Parse("2017-07-02 23:59:59");
            var         consolidator = new Consolidator();
            List <Pass> workSchedule = consolidator.GetWorkSchedule(userId, startTime, endTime);

            return(View(workSchedule));
        }
        private void Excuted_Consolidate(object sender, ExecutedRoutedEventArgs e)
        {
            //MessageBox.Show("开始执行对账逻辑");
            //首先进行简单的对比,单笔对单笔
            Consolidator.Consolidate(ViewModel);
#if DEBUG
            ListBox lbx = lbxMatchedCollection;


            return;
#endif
        }
Esempio n. 7
0
 protected Processor()
 {
     this.Store             = new Store();
     this.Extractor         = new Extractor(this);
     this.Preparer          = new Preparer(this);
     this.JsDuck            = new JsDuck(this);
     this.Reader            = new Reader(this);
     this.Consolidator      = new Consolidator(this);
     this.SpecialsGenerator = new SpecialsGenerator(this);
     this.TypesChecker      = new TypesChecker(this);
     this.ResultsGenerator  = new ResultsGenerator(this);
     this.Exceptions        = new List <Exception>();
     this.JsDuckErrors      = new List <string>();
     this.ProcessingInfo    = new ProcessingInfo();
 }
Esempio n. 8
0
        protected void processGenerateResults()
        {
            this.Consolidator = null;
            this.ProcessingInfo.StageIndex = 17;
            this.ProcessingInfo.StageName  = "Generating result TypeScript type definitions files.";
            this.allClassesCount           = (double)(
                (this.Store.ExtAllClasses.Count - this.Store.ExtCallbackClasses.Count)
                + this.Store.UnknownTypes.Count
                );
            this.ResultsGenerator.GenerateResults(
                this.progressHandlerCycleProcessing
                );

            this.ProcessingInfo.InfoText = "Finished.";
            this.ProcessingInfo.Progress = 100.0;
            this.ProcessingInfoHandler.Invoke(this.ProcessingInfo);

            this.Reader           = null;
            this.ResultsGenerator = null;
            this.Store            = null;
        }
        public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func <int, bool> needCol, int n, IRandom rand = null)
        {
            var cur = GetRowCursor(needCol, rand);

            consolidator = new Consolidator();
            if (n >= 2)
            {
                var setColumns = new HashSet <int>(_columns);
                var res        = new IRowCursor[n];
                var empty      = new EmptyCursor(this,
                                                 col => setColumns.Contains(col) || needCol(col) || (_otherValues != null && _otherValues.IsColumnActive(col)));
                for (int i = 0; i < n; ++i)
                {
                    res[i] = i == 0 ? cur : empty;
                }
                return(res);
            }
            else
            {
                return new IRowCursor[] { cur }
            };
        }
        public IRowCursor[] GetRowCursorSet(out IRowCursorConsolidator consolidator, Func <int, bool> needCol, int n, IRandom rand = null)
        {
            var cur = GetRowCursor(needCol, rand);

            consolidator = new Consolidator();
            if (n >= 2)
            {
                // This trick avoids the cursor to be split into multiple later.
                var res   = new IRowCursor[n];
                var empty = new EmptyCursor(this,
                                            col => col == _column || needCol(col) ||
                                            (_otherValues != null && _otherValues.IsColumnActive(col)));
                for (int i = 0; i < n; ++i)
                {
                    res[i] = i == 0 ? cur : empty;
                }
                return(res);
            }
            else
            {
                return new IRowCursor[] { cur }
            };
        }
Esempio n. 11
0
 /// <summary>
 /// Updates the consolidator with the specified bar.
 /// </summary>
 /// <param name="data">The latest data observation.</param>
 public virtual void Update(BaseData data)
 {
     Consolidator.Update(data);
 }
Esempio n. 12
0
        /// <summary>
        /// </summary>
        /// <param name="clusters"></param>
        /// <param name="clusterMap"></param>
        /// <param name="datasets"></param>
        /// <param name="tags"></param>
        protected override void Write(List <UMCClusterLight> clusters,
                                      Dictionary <int, List <ClusterToMassTagMap> > clusterMap,
                                      List <DatasetInformation> datasets,
                                      Dictionary <string, MassTagLight> tags)
        {
            using (TextWriter writer = File.CreateText(Path))
            {
                // Build the header.
                var mainHeader = "Cluster ID, Total Members, Dataset Members,  Tightness, Ambiguity";

                // Make blank columns for clusters that dont have enough dta.
                var blankColumns = ",,";

                // Map the dataset ID's to a list of numbers sorted from lowest to highest.
                var datasetIds = new List <int>();
                foreach (var info in datasets)
                {
                    datasetIds.Add(info.DatasetId);
                }
                datasetIds.Sort();

                if (clusterMap.Count > 0)
                {
                    mainHeader += ", MassTag ID, Conformation ID, Peptide Sequence, STAC, STAC-UP";
                }

                var header = mainHeader;
                for (var i = 0; i < datasetIds.Count; i++)
                {
                    header += string.Format(", AbundanceMax-{0}, AbundanceSum-{0}", datasetIds[i]);
                }
                writer.WriteLine(header);

                // Parse each cluster - cluster per line.
                foreach (var cluster in clusters)
                {
                    var features = Consolidator.ConsolidateUMCs(cluster.UmcList);

                    // Build the output sets.
                    var umcBuilder = new StringBuilder();
                    foreach (var id in datasetIds)
                    {
                        var containsUMC = features.ContainsKey(id);
                        if (containsUMC)
                        {
                            var umc = features[id];
                            umcBuilder.Append(string.Format(",{0},{1}", umc.Abundance, umc.AbundanceSum));
                        }
                        else
                        {
                            umcBuilder.Append(blankColumns);
                        }
                    }

                    var builder = new StringBuilder();
                    builder.Append(string.Format("{0},{1},{2},{3},{4}", cluster.Id, cluster.UmcList.Count,
                                                 features.Keys.Count, cluster.Tightness, cluster.AmbiguityScore));


                    if (clusterMap.Count > 0)
                    {
                        if (clusterMap.ContainsKey(cluster.Id))
                        {
                            foreach (var map in clusterMap[cluster.Id])
                            {
                                var clusterString = builder.ToString();
                                var key           = map.ConformerId + "-" + map.MassTagId;
                                var tag           = tags[key];
                                clusterString += string.Format(",{0},{1},{2},{3},{4}", tag.Id,
                                                               tag.ConformationId,
                                                               tag.PeptideSequence,
                                                               map.StacScore,
                                                               map.StacUP);
                                writer.WriteLine(clusterString + umcBuilder);
                            }
                        }
                        else
                        {
                            writer.WriteLine(builder.Append(",,,,," + umcBuilder));
                        }
                    }
                    else
                    {
                        writer.WriteLine(builder.Append(umcBuilder));
                    }
                }
            }
        }