internal HeuristicData(string heuristicName, string heuristicValue, RawData clone, Features.Mode featureMode) : base(clone) { HeuristicName = heuristicName; HeuristicValue = heuristicValue; _featureMode = featureMode; Data.Columns.Add("Makespan", typeof (int)); Data.Columns.Add("BestFoundMakespan", typeof(int)); Data.Columns.Add(heuristicName, typeof (string)); }
public RetraceSet(string distribution, string dimension, Trajectory track, int iter, bool extended, int numFeat, int model, string stepwiseBias, Features.Mode featureMode, DirectoryInfo data) : base(distribution, dimension, track, iter, extended, numFeat, model, stepwiseBias, data) { Read(); FeatureMode = featureMode; if (FeatureMode != Features.Mode.Local) FileInfo = new FileInfo(FileInfo.FullName.Replace(Features.Mode.Local.ToString(), FeatureMode.ToString())); }
internal HeuristicData(string distribution, string dimension, DataSet set, bool extended, string heuristicName, string heuristicValue, DirectoryInfo data, Features.Mode featureMode) : base(distribution, dimension, set, extended, data) { HeuristicName = heuristicName; HeuristicValue = heuristicValue; _featureMode = featureMode; Data.Columns.Add("Makespan", typeof(int)); Data.Columns.Add("BestFoundMakespan", typeof(int)); Data.Columns.Add(heuristicName, typeof(string)); }
public PreferenceSet(string distribution, string dimension, Trajectory track, int iter, bool extended, int numFeat, int model, string stepwiseBias, Ranking rank, Features.Mode featMode, DirectoryInfo data) : base(distribution, dimension, track, iter, extended, numFeat, model, stepwiseBias, featMode, data) { FileInfo trainingFileInfo = new FileInfo(FileInfo.FullName); this.FileInfo = new FileInfo(string.Format( @"{0}\Training\{1}.diff.{2}.csv", data.FullName, FileInfo.Name.Substring(0, FileInfo.Name.Length - FileInfo.Extension.Length), (char) rank)); Data.Columns.Add("Rank", typeof (int)); var ranking = rank; switch (ranking) { case Ranking.All: _rankingFunction = AllRankings; break; case Ranking.Basic: _rankingFunction = BasicRanking; break; case Ranking.FullPareto: _rankingFunction = FullParetoRanking; break; case Ranking.PartialPareto: _rankingFunction = PartialParetoRanking; break; } if (FeatureMode == Features.Mode.Local) ApplyAll(Retrace, null, null); else Read(trainingFileInfo); _diffData = new List<Preference>[NumInstances, NumDimension]; for (int pid = 1; pid <= AlreadySavedPID; pid++) for (int step = 0; step < NumDimension; step++) _diffData[pid - 1, step] = new List<Preference>(); }
// commits dispatch! public Features Dispatch1(int job, Features.Mode mode, LinearModel model) { Dispatch dispatch; int slot = FindDispatch(job, out dispatch); int time = _prob.Procs[job, dispatch.Mac]; int arrivalTime, slotReduced; Features phi = new Features(); switch (mode) { case Features.Mode.Equiv: phi.GetEquivPhi(job, this); break; } _macs[dispatch.Mac].Update(dispatch.StartTime, time, slot, out slotReduced); _jobs[job].Update(dispatch.StartTime, time, dispatch.Mac, out arrivalTime); Sequence.Add(dispatch); if (_jobs[job].MacCount == _prob.NumMachines) ReadyJobs.Remove(job); Makespan = _macs.Max(x => x.Makespan); if (mode == Features.Mode.None) return null; phi.GetLocalPhi(_jobs[job], _macs[dispatch.Mac], _prob.Procs[job, dispatch.Mac], _jobs.Sum(p => p.WorkRemaining), _macs.Sum(p => p.TotSlack), Makespan, Sequence.Count, dispatch.StartTime, arrivalTime, slotReduced, _totProcTime); if (mode == Features.Mode.Global) phi.GetGlobalPhi(this, model); return phi; }
public Features Difference(Features other) { Features diff = new Features(); for (int i = 0; i < LocalCount; i++) diff.PhiLocal[i] = PhiLocal[i] - other.PhiLocal[i]; for (int i = 0; i < GlobalCount; i++) diff.PhiGlobal[i] = PhiGlobal[i] - other.PhiGlobal[i]; for (int i = 0; i < SDRData.SDRCount; i++) diff.Equiv[i] = Equiv[i] == other.Equiv[i]; diff.RND = null; return diff; }
public Preference(Schedule.Dispatch dispatch, Features features) { Dispatch = dispatch; Feature = features; }
public double PriorityIndex(Features phi) { var step = TimeIndependent ? 0 : phi.XiExplanatory[(int) Features.Explanatory.step] - 1; double index = 0; for (var i = 0; i < Features.LocalCount; i++) index += LocalWeights[i][step]*phi.PhiLocal[i]; if (FeatureMode != Features.Mode.Global) return index; for (var i = 0; i < Features.GlobalCount; i++) index += GlobalWeights[i][step]*phi.PhiGlobal[i]; return index; }
protected LinearModel(FileInfo file, Features.Mode featureMode, int timeDependentSteps, int numFeatures, int modelID, string distribution, string dimension, Model type) : this(file, featureMode, numFeatures, modelID, timeDependentSteps == 1, distribution, dimension, type) { for (int i = 0; i < Features.LocalCount; i++) LocalWeights[i] = new double[timeDependentSteps]; if (featureMode != Features.Mode.Global) return; for (int i = 0; i < Features.GlobalCount; i++) GlobalWeights[i] = new double[timeDependentSteps]; }
protected LinearModel(FileInfo file, Features.Mode featureMode, int numFeatures, int modelID, bool timeIndependent, string distribution, string dimension, Model type) { FileInfo = file; FeatureMode = featureMode; _numFeatures = numFeatures; _modelID = modelID; Name = String.Format("F{0}.M{1}", _numFeatures, _modelID); TimeIndependent = timeIndependent; Distribution = distribution; Dimension = dimension; Type = type; }
public LinearModel(string distribution, string dimension, TrainingSet.Trajectory track, bool extended, PreferenceSet.Ranking rank, bool timedependent, DirectoryInfo dataDir, int numFeatures, int modelID, string stepwiseBias, int iter = -1, Features.Mode featMode = Features.Mode.Local) : this(null, featMode, numFeatures, modelID, !timedependent, distribution, dimension, Model.PREF) { switch (track) { case TrainingSet.Trajectory.ILFIXSUP: case TrainingSet.Trajectory.ILUNSUP: case TrainingSet.Trajectory.ILSUP: LinearModel model; Iteration = GetImitationLearningFile(out model, distribution, dimension, track, extended, numFeatures, modelID, dataDir.FullName, stepwiseBias, timedependent, iter); FileInfo = model.FileInfo; LocalWeights = model.LocalWeights; return; default: string pat = String.Format("\\b(exhaust|full)\\.{0}.{1}.{2}.{3}{4}.{5}.{6}weights.{7}.csv", distribution, dimension, (char) rank, track, extended ? "EXT" : "", stepwiseBias, FeatureMode == Features.Mode.Global ? "(Global|SDR)" : "", timedependent ? "timedependent" : "timeindependent"); DirectoryInfo dir = new DirectoryInfo(String.Format(@"{0}\PREF\weights", dataDir.FullName)); Regex reg = new Regex(pat); var files = dir.GetFiles("*.csv").Where(path => reg.IsMatch(path.ToString())).ToList(); if (files.Count <= 0) throw new Exception(String.Format("Cannot find any weights belonging to {0}!", pat)); foreach (var file in files) { LinearModel[] logWeights = ReadLoggedLinearWeights(file, distribution, dimension, Model.PREF); FileInfo = file; foreach ( var w in logWeights.Where(w => w._numFeatures == _numFeatures && w._modelID == _modelID)) { LocalWeights = w.LocalWeights; GlobalWeights = w.GlobalWeights; return; } } throw new Exception(String.Format("Cannot find weights {0} to user requirements from {1}!", Name, files[0].Name)); } }