internal int UpdateFeatures(int pid, int step, Schedule jssp) { LinearModel dummy = new LinearModel(); foreach (var p in Preferences[pid - 1, step]) { var lookahead = jssp.Clone(); p.Feature = lookahead.Dispatch1(p.Dispatch.Job, FeatureMode, dummy); } return Preferences[pid - 1, step].Count; }
public CDRData(RawData data, LinearModel model) : base("CDR", model.Name, data, model.FeatureMode) { Model = model; FileInfo = new FileInfo(string.Format( @"{0}\..\CDR\{1}\{2}.{3}.{4}.csv", Model.FileInfo.Directory, Model.FileInfo.Name.Substring(0, Model.FileInfo.Name.Length - Model.FileInfo.Extension.Length), Distribution, Dimension, Set)); Read(false); }
public int ApplyCDR(LinearModel model) { int bestFoundMakespan = int.MaxValue; for (int step = Sequence.Count; step < _prob.Dimension; step++) { List<double> priority = new List<double>(ReadyJobs.Count); foreach ( var phi in from j in ReadyJobs let lookahead = Clone() select lookahead.Dispatch1(j, model.FeatureMode, model)) { priority.Add(model.PriorityIndex(phi)); if (model.FeatureMode != Features.Mode.Global) continue; var rollout = (from sdr in new List<Features.Global> { Features.Global.SPT, Features.Global.LPT, Features.Global.LWR, Features.Global.MWR, Features.Global.RNDmin } where Math.Abs(phi.PhiGlobal[(int) sdr]) > 0 select (int) phi.PhiGlobal[(int) sdr]).ToList(); var bestRollout = rollout.Min(); if (bestRollout < bestFoundMakespan) bestFoundMakespan = bestRollout; } List<int> ix = priority.Select((x, i) => new {x, i}) .Where(x => Math.Abs(x.x - priority.Max()) < 1e-10) .Select(x => x.i).ToList(); var highestPriority = ReadyJobs.Select((x, i) => new {x, i}) .Where(x => ix.Contains(x.i)) .Select(x => x.x).ToList(); // break ties randomly var job = highestPriority[_random.Next(0, highestPriority.Count())]; Dispatch1(job); } return bestFoundMakespan == int.MaxValue ? Makespan : bestFoundMakespan; }
// commits dispatch! public Features Dispatch1(int job, Features.Mode mode, LinearModel model) { Dispatch dispatch; int slot = FindDispatch(job, out dispatch); int time = _prob.Procs[job, dispatch.Mac]; int arrivalTime, slotReduced; Features phi = new Features(); switch (mode) { case Features.Mode.Equiv: phi.GetEquivPhi(job, this); break; } _macs[dispatch.Mac].Update(dispatch.StartTime, time, slot, out slotReduced); _jobs[job].Update(dispatch.StartTime, time, dispatch.Mac, out arrivalTime); Sequence.Add(dispatch); if (_jobs[job].MacCount == _prob.NumMachines) ReadyJobs.Remove(job); Makespan = _macs.Max(x => x.Makespan); if (mode == Features.Mode.None) return null; phi.GetLocalPhi(_jobs[job], _macs[dispatch.Mac], _prob.Procs[job, dispatch.Mac], _jobs.Sum(p => p.WorkRemaining), _macs.Sum(p => p.TotSlack), Makespan, Sequence.Count, dispatch.StartTime, arrivalTime, slotReduced, _totProcTime); if (mode == Features.Mode.Global) phi.GetGlobalPhi(this, model); return phi; }
public void GetGlobalPhi(Schedule current, LinearModel model) { Schedule lookahead; for (int i = 0; i < SDRData.SDRCount; i++) { SDRData.SDR sdr = (SDRData.SDR) i; if (!(Math.Abs(model.GlobalWeights[(int) sdr][0]) > LinearModel.WEIGHT_TOLERANCE)) continue; lookahead = current.Clone(); lookahead.ApplySDR(sdr); PhiGlobal[(int) (Global) (sdr)] = lookahead.Makespan; } if ((Math.Abs(model.GlobalWeights[(int) Global.RNDmin][0]) < LinearModel.WEIGHT_TOLERANCE) && (Math.Abs(model.GlobalWeights[(int) Global.RNDmax][0]) < LinearModel.WEIGHT_TOLERANCE) && (Math.Abs(model.GlobalWeights[(int) Global.RNDstd][0]) < LinearModel.WEIGHT_TOLERANCE) && (Math.Abs(model.GlobalWeights[(int) Global.RNDmean][0]) < LinearModel.WEIGHT_TOLERANCE)) return; for (int i = 0; i < RND.Length; i++) { lookahead = current.Clone(); lookahead.ApplySDR(SDRData.SDR.RND); RND[i] = lookahead.Makespan; } PhiGlobal[(int) Global.RNDmin] = RND.Min(); PhiGlobal[(int) Global.RNDmax] = RND.Max(); PhiGlobal[(int) Global.RNDmean] = RND.Average(); PhiGlobal[(int) Global.RNDstd] = StandardDev(RND, PhiGlobal[(int) Global.RNDmean]); }
private string GetImitationModel(out LinearModel model, out double beta, ref int currentIter, bool extended, int numFeatures, int modelID, string stepwiseBias) { model = new LinearModel(Distribution, Dimension, Track, extended, PreferenceSet.Ranking.PartialPareto, false, new DirectoryInfo(String.Format(@"{0}\..", FileInfo.DirectoryName)), numFeatures, modelID, stepwiseBias); if (currentIter < 0) // use latest iteration currentIter = model.Iteration + 1; switch (Track) { case Trajectory.ILFIXSUP: beta = 0.5; break; case Trajectory.ILSUP: beta = Math.Pow(0.5, currentIter); break; case Trajectory.ILUNSUP: beta = 0; break; default: throw new Exception(String.Format("{0} is not supported as imitation learning!", Track)); } string track = String.Format("IL{0}{1}", currentIter, Track.ToString().Substring(2)); if (stepwiseBias != "equal") track += "_" + stepwiseBias; return numFeatures < Features.LocalCount ? String.Format("{0}_{1}", track, model.Name.Replace(".", "")) : track; }
internal TrainingSet(string distribution, string dimension, Trajectory track, int iter, bool extended, int numFeatures, int modelID, string stepwiseBias, DirectoryInfo data) : base(distribution, dimension, DataSet.train, extended, data) { Track = track; string strTrack = track.ToString(); NumInstances = ResetNumInstances(extended); NumTraining = ResetNumInstances(false); switch (Track) { case Trajectory.ILFIXSUP: case Trajectory.ILSUP: case Trajectory.ILUNSUP: strTrack = GetImitationModel(out Model, out _beta, ref iter, extended, numFeatures, modelID, stepwiseBias); if (Track == Trajectory.ILUNSUP) _trajectory = ChooseWeightedJob; else _trajectory = UseImitationLearning; if (extended) { AlreadySavedPID = Math.Max(AlreadySavedPID, NumTraining*iter); NumInstances = Math.Min(Data.Rows.Count, NumTraining*(iter + 1)); } break; case Trajectory.CMAESMINCMAX: GetCMAESModel(out Model, CMAESData.ObjectiveFunction.MinimumMakespan); _trajectory = ChooseWeightedJob; break; case Trajectory.CMAESMINRHO: GetCMAESModel(out Model, CMAESData.ObjectiveFunction.MinimumRho); _trajectory = ChooseWeightedJob; break; case Trajectory.OPT: Model = null; _trajectory = ChooseOptJob; break; case Trajectory.LOCOPT: Model = null; _trajectory = ChooseLocalOptJob; break; default: // SDR Model = new LinearModel((SDRData.SDR) Track, distribution, Dimension); _trajectory = ChooseSDRJob; break; } if (extended) strTrack += "EXT"; FileInfo = new FileInfo(string.Format( @"{0}\Training\trdat.{1}.{2}.{3}.{4}.csv", data.FullName, Distribution, Dimension, strTrack, FeatureMode)); Data.Columns.Add("Step", typeof (int)); Data.Columns.Add("Dispatch", typeof (Schedule.Dispatch)); Data.Columns.Add("Followed", typeof (bool)); Data.Columns.Add("ResultingOptMakespan", typeof (int)); Data.Columns.Add("Features", typeof (Features)); SetAlreadySavedPID(); Preferences = new List<Preference>[NumInstances, NumDimension]; }
private void GetCMAESModel(out LinearModel model, CMAESData.ObjectiveFunction objFun) { model = new LinearModel(Distribution, Dimension, objFun, false, new DirectoryInfo(String.Format(@"{0}\..", FileInfo.DirectoryName))); }
private int GetImitationLearningFile(out LinearModel model, string distribution, string dimension, TrainingSet.Trajectory track, bool extended, int numFeatures, int modelID, string directoryName, string stepwiseBias, bool timedependent = false, int iter = -1) { DirectoryInfo dir = new DirectoryInfo(String.Format(@"{0}\PREF\weights", directoryName)); string pat = String.Format("\\b(exhaust|full)\\.{0}.{1}.{2}.(OPT|IL([0-9]+){3}{4}{5}).{6}.weights.{7}", distribution, dimension, (char) PreferenceSet.Ranking.PartialPareto, track.ToString().Substring(2), numFeatures < Features.LocalCount ? String.Format("_F{0}M{1}", numFeatures, modelID) : "", extended ? "EXT" : "", stepwiseBias, timedependent ? "timedependent" : "timeindependent"); Regex reg = new Regex(pat); var files = dir.GetFiles("*.csv").Where(path => reg.IsMatch(path.ToString())).ToList(); if (files.Count <= (iter >= 0 ? iter : 0)) throw new Exception(String.Format("Cannot find any weights belonging to {0}. Start with optimal!", track)); int[] iters = new int[files.Count]; for (int i = 0; i < iters.Length; i++) { Match m = reg.Match(files[i].Name); if (m.Groups[2].Value == "OPT") iters[i] = 0; else iters[i] = Convert.ToInt32(m.Groups[3].Value); } if (iter < 0) iter = iters.Max(); // then take the latest version FileInfo weightFile = files[Array.FindIndex(iters, x => x == iter)]; model = new LinearModel(weightFile, numFeatures, modelID, Distribution, Dimension); return iters.Max(); }
private static LinearModel[] ReadLoggedLinearWeights(FileInfo file, string distribution, string dimension, Model model) { if (!file.Exists) throw new Exception(String.Format("File {0} doesn't exist! Cannot read weights.", file.Name)); bool timeIndependent = Regex.IsMatch(file.Name, "timeindependent"); Features.Mode featureMode = Regex.IsMatch(file.Name, Features.Mode.Local.ToString()) ? Features.Mode.Local : Features.Mode.Global; List<string> header; List<string[]> content = CSV.Read(file, out header); // Weight,NrFeat,Model,Feature,NA,values const int WEIGHT = 0; const int NRFEAT = 1; const int MODEL = 2; const int FEATURE = 3; const int VALUE = 5; var strGlobalFeature = new string[Features.GlobalCount]; for (var i = 0; i < Features.GlobalCount; i++) strGlobalFeature[i] = String.Format("phi.{0}", (Features.Global) i); var strLocalFeature = new string[Features.LocalCount]; for (var i = 0; i < Features.LocalCount; i++) strLocalFeature[i] = String.Format("phi.{0}", (Features.Local)i); var models = new List<LinearModel>(); LinearModel linearWeights = null; var uniqueTimeSteps = !timeIndependent ? RawData.DimString2Num(dimension) : 1; int nrFeat = -1, featFound = -1; foreach (var line in content.Where(line => line[WEIGHT].Equals("Weight"))) { if (featFound == nrFeat | featFound == -1) { if (linearWeights != null) models.Add(linearWeights); nrFeat = Convert.ToInt32(line[NRFEAT]); var idModel = Convert.ToInt32(line[MODEL]); linearWeights = new LinearModel(file, featureMode, uniqueTimeSteps, nrFeat, idModel, distribution, dimension, model); featFound = 0; } var phi = line[FEATURE]; if (timeIndependent) // robust model { var value = Convert.ToDouble(line[VALUE], CultureInfo.InvariantCulture); if (!Char.IsUpper(phi[4])) { for (var i = 0; i < Features.LocalCount; i++) { if (String.Compare(phi, strLocalFeature[i], StringComparison.InvariantCultureIgnoreCase) != 0) continue; if (linearWeights != null) linearWeights.LocalWeights[i][0] = value; featFound++; break; } } else { for (var i = 0; i < Features.GlobalCount; i++) { if (String.Compare(phi, strGlobalFeature[i], StringComparison.InvariantCultureIgnoreCase) != 0) continue; if (linearWeights != null) linearWeights.GlobalWeights[i][0] = value; featFound++; break; } } } else { if (!Char.IsUpper(phi[4])) { for (var i = 0; i < Features.LocalCount; i++) { if (String.Compare(phi, strLocalFeature[i], StringComparison.InvariantCultureIgnoreCase) != 0) continue; for (var step = 0; step < uniqueTimeSteps - 1; step++) { var value = Convert.ToDouble(line[VALUE + step], CultureInfo.InvariantCulture); if (linearWeights != null) linearWeights.LocalWeights[i][step] = value; } featFound++; break; } } else { for (var i = 0; i < Features.GlobalCount; i++) { if (String.Compare(phi, strGlobalFeature[i], StringComparison.InvariantCultureIgnoreCase) != 0) continue; for (var step = 0; step < uniqueTimeSteps - 1; step++) { var value = Convert.ToDouble(line[VALUE + step], CultureInfo.InvariantCulture); if (linearWeights != null) linearWeights.GlobalWeights[i][step] = value; } featFound++; break; } } } } if (linearWeights != null && featFound == nrFeat) models.Add(linearWeights); int d = Features.LocalCount; int minNum = Regex.IsMatch(file.Name, "exhaust") ? NChooseK(d, 1) + NChooseK(d, 2) + NChooseK(d, 3) + NChooseK(d, d) : 1; return models.Count == minNum ? models.ToArray() : null; }
public static LinearModel[] GetAllVaryLmaxModels(string distribution, string dimension, TrainingSet.Trajectory track, int iter, bool extended, PreferenceSet.Ranking rank, bool timedependent, DirectoryInfo dataDir, string stepwiseBias) { string pat = String.Format("full.{0}.{1}.{2}.{3}.{4}.weights.{5}_lmax[0-9]+.csv", distribution, dimension, (char) rank, Trajectory2String(track, iter, extended), stepwiseBias, timedependent ? "timedependent" : "timeindependent"); DirectoryInfo dir = new DirectoryInfo(String.Format(@"{0}\PREF\weights", dataDir.FullName)); Regex reg = new Regex(pat); var files = dir.GetFiles("*.csv").Where(path => reg.IsMatch(path.ToString())).ToList(); if (files.Count < 1) return null; LinearModel[] models = new LinearModel[0]; foreach ( var model in files.Select(file => ReadLoggedLinearWeights(file, distribution, dimension, Model.PREF)) .Where(model => model != null)) { Array.Resize(ref models, models.Length + model.Length); Array.Copy(model, 0, models, models.Length - model.Length, model.Length); } return models; }
private void WriteFileInfo(LinearModel linearModel) { var fs = new FileStream(FileInfo.FullName, FileMode.Create, FileAccess.Write); using (var st = new StreamWriter(fs)) { string header = "Type,NrFeat,Model,Feature,mean"; int numSteps = linearModel.LocalWeights[0].Length; for (int step = 1; step <= numSteps; step++) header += String.Format(CultureInfo.InvariantCulture, ",Step.{0}", step); st.WriteLine(header); for (int iFeat = 0; iFeat < Features.LocalCount; iFeat++) { Features.Local feat = (Features.Local) iFeat; string info = String.Format("Weight,{0},1,phi.{1},NA", NUM_FEATURES, feat); for (int step = 0; step < numSteps; step++) info += String.Format(CultureInfo.InvariantCulture, ",{0:R9}", linearModel.LocalWeights[iFeat][step]); st.WriteLine(info); } st.Close(); } fs.Close(); }