Ejemplo n.º 1
0
        public void Rebuild(Job j)
        {
            Stopwatch sw = new Stopwatch();

            sw.Restart();

            this.Clear();
            HashSet <string> seen       = new HashSet <string>();
            uint             duplicates = 0;

            foreach (CSVRow r in j.Rows)
            {
                if (!ParseRow(r, seen))
                {
                    duplicates++;
                }
            }
            if (duplicates != 0)
            {
                Global.Say(duplicates.ToString() + " duplicates in #" + j.MetaData.Id);
            }
            Save();

            Global.Say("Rebuilt summary for #" + j.MetaData.Id + " in " + (sw.Elapsed.TotalSeconds) + " sec.");
        }
Ejemplo n.º 2
0
        void EnsureConnected()
        {
            if (_connection.State == System.Data.ConnectionState.Open)
            {
                return;
            }

retry:
            try
            {
                _connection.Open();
            }
            catch (Exception ex)
            {
                Global.Say("Retrying after SQL connection failure: " + ex.Message);
                Thread.Sleep(1000);
                if (--retryCount == 0)
                {
                    throw ex;
                }
                else
                {
                    goto retry;
                }
            }
        }
Ejemplo n.º 3
0
        protected void Rebuild()
        {
            Stopwatch sw = new Stopwatch();

            sw.Restart();

            _records.Clear();
            _summary = new Dictionary <string, CategoryRecord>();

            Jobs jobs = new Jobs(_dataDir, true);

            foreach (Job j in jobs)
            {
                Update(j);
                j.Dispose();
                System.GC.Collect();
            }

            Save();
            SaveSummary();

            Global.Say("Rebuilt record cache in " + sw.Elapsed.TotalSeconds + " sec.");
        }
Ejemplo n.º 4
0
        protected void Rebuild()
        {
            Stopwatch sw = new Stopwatch();

            sw.Restart();

            SetupData();

            if (_j.MetaData.Reference != 0)
            {
                _reference = new Job(_dataDir, _j.MetaData.Reference);
            }

            Dictionary <string, double> referenceTimes = null;

            if (_reference != null)
            {
                referenceTimes = new Dictionary <string, double>();
                foreach (CSVRow r in _reference.Rows)
                {
                    if (r.ResultCode == ResultCode.OK)
                    {
                        // Duplicates?
                        if (referenceTimes.ContainsKey(r.Filename))
                        {
                            referenceTimes[r.Filename] = r.Runtime;
                        }
                        else
                        {
                            referenceTimes.Add(r.Filename, r.Runtime);
                        }
                    }
                }
            }

            foreach (CSVRow r in _j.Rows)
            {
                uint rc = r.ResultCode;

                if (ResultCode.IsBug(rc))
                {
                    string fn  = r.Filename;
                    string cat = fn.Substring(0, fn.IndexOf('\\'));
                    _bugsByCategory[cat].Add(fn);
                    _bugsByCategory[""].Add(fn);
                }
                else if (ResultCode.IsError(rc))
                {
                    string fn  = r.Filename;
                    string cat = fn.Substring(0, fn.IndexOf('\\'));
                    _errorsByCategory[cat].Add(fn);
                    _errorsByCategory[""].Add(fn);
                }
                else if (ResultCode.IsOK(rc))
                {
                    uint have   = r.SAT + r.UNSAT;
                    uint target = r.TargetSAT + r.TargetUNSAT;
                    if (have < target)
                    {
                        string fn  = r.Filename;
                        string cat = fn.Substring(0, fn.IndexOf('\\'));
                        _underperformersByCategory[cat].Add(fn);
                        _underperformersByCategory[""].Add(fn);
                    }

                    if (referenceTimes != null && referenceTimes.ContainsKey(r.Filename))
                    {
                        double new_time = r.Runtime;
                        double old_time = referenceTimes[r.Filename];

                        if (new_time > 1 && old_time > 1 &&
                            new_time >= 10.0 * old_time)
                        {
                            string fn  = r.Filename;
                            string cat = fn.Substring(0, fn.IndexOf('\\'));
                            string msg = fn + " [" + (new_time - old_time) + " sec. slower]";
                            _dippersByCategory[cat].Add(msg);
                            _dippersByCategory[""].Add(msg);
                        }
                    }
                }
                else if (ResultCode.IsTime(rc))
                {
                    if (referenceTimes != null && referenceTimes.ContainsKey(r.Filename))
                    {
                        double old_time = referenceTimes[r.Filename];
                        double new_time = _j.MetaData.Timeout;

                        if ((new_time - old_time) > 10)
                        {
                            string fn  = r.Filename;
                            string cat = fn.Substring(0, fn.IndexOf('\\'));
                            string msg = fn + " [more than " + (new_time - old_time) + " sec. slower]";
                            _dippersByCategory[cat].Add(msg);
                            _dippersByCategory[""].Add(msg);
                        }
                    }
                }
                else if (ResultCode.IsMemory(rc))
                {
                    if (referenceTimes != null && referenceTimes.ContainsKey(r.Filename))
                    {
                        double old_time = referenceTimes[r.Filename];

                        string fn  = r.Filename;
                        string cat = fn.Substring(0, fn.IndexOf('\\'));
                        string msg = fn + " [went from " + old_time + " sec. to memory-out]";
                        _dippersByCategory[cat].Add(msg);
                        _dippersByCategory[""].Add(msg);
                    }
                }
            }

            Save();
            Global.Say("Rebuilding cache for #" + _j.MetaData.Id + " in " + sw.Elapsed.TotalSeconds + " sec.");
        }
Ejemplo n.º 5
0
        public void Download(SQLInterface sql)
        {
            // Global.Say("Downloading #" + _metaData.Id);

            Dictionary <string, Object> r = sql.Read("SELECT " +
                                                     "ID,SubmissionTime,SharedDir,Binary,Parameters,Timeout,Memout," +
                                                     "Cluster,ClusterJobId,Nodegroup,Locality,Longparams FROM Experiments " +
                                                     "WHERE ID=" + _metaData.Id.ToString() + ";");

            if (SQLInterface.getuint(ref r, "ID") != _metaData.Id)
            {
                throw new Exception("Job ID mismatch");
            }

            _metaData.SubmissionTime = Convert.ToDateTime(r["SubmissionTime"], Global.culture);
            _metaData.BaseDirectory  = r["SharedDir"].ToString();
            _metaData.BinaryId       = Convert.ToUInt32(r["Binary"]);
            if (r["Parameters"].Equals(DBNull.Value))
            {
                _metaData.Parameters = r["Longparams"].ToString();
            }
            else
            {
                _metaData.Parameters = r["Parameters"].ToString();
            }
            _metaData.Timeout      = Convert.ToUInt32(r["Timeout"]);
            _metaData.Memoryout    = Convert.ToUInt32(r["Memout"]);
            _metaData.Cluster      = r["Cluster"].ToString();
            _metaData.ClusterJobId = SQLInterface.getuint(ref r, "ClusterJobId");
            _metaData.Nodegroup    = r["Nodegroup"].ToString();
            _metaData.Locality     = r["Locality"].ToString();
            _metaData.isFinished   = false;
            _metaData.Save();

            r.Clear();

            _data = new CSVData(_dataFilename);

            bool have_new_data = false;

            while (GetBatch(sql) > 0)
            {
                have_new_data = true;
            }

            if (have_new_data)
            {
                _summary.Rebuild(this);
            }

            string ids = _metaData.Id.ToString();

            bool clusterDone = false;

            if (_metaData.Cluster != "" && _metaData.ClusterJobId != 0)
            {
                try
                {
                    r = sql.Read("SELECT COUNT(1) FROM JobQueue WHERE ExperimentID=" + ids);
                    if ((int)r.First().Value != 0)
                    {
                        clusterDone = false;
                    }
                    else
                    {
                        Scheduler scheduler = new Scheduler();
                        scheduler.Connect(_metaData.Cluster);
                        ISchedulerJob job   = scheduler.OpenJob((int)_metaData.ClusterJobId);
                        JobState      state = job.State;
                        if (state == JobState.Finished ||
                            state == JobState.Finishing ||
                            state == JobState.Failed ||
                            state == JobState.Canceled ||
                            state == JobState.Canceling)
                        {
                            clusterDone = true;
                        }
                    }
                }
                catch (SchedulerException ex)
                {
                    if (ex.Code == ErrorCode.Operation_InvalidJobId)
                    {
                        clusterDone = true;
                    }
                    else
                    {
                        Global.Say("Job considered not finished because the scheduler threw: " + ex.Message);
                    }
                }
                catch
                {
                    clusterDone = false;
                }
            }

            if (clusterDone)
            {
                // Delete the experiment only if the job on the cluster is done and there are no more jobs.
                Dictionary <string, object> q =
                    sql.Read("(SELECT (SELECT COUNT(1) FROM Data WHERE ExperimentID=" + ids + ") + " +
                             "(SELECT COUNT(1) FROM JobQueue WHERE ExperimentID=" + ids + "))");

                if (q.Count > 0 && (int)q.First().Value == 0)
                {
                    // Cluster is done & database is done.
                    sql.Query("DELETE FROM Experiments WHERE ID=" + ids);
                    _metaData.isFinished = true;
                    _metaData.Save();
                }
            }
        }