public void AddJobs(NewJobs newJ, string expectedPreviousScheduleId) { var newJobs = new List <JobPlan>(); foreach (var j in newJ.Jobs) { j.Archived = true; j.JobCopiedToSystem = true; if (!_jobDB.DoesJobExist(j.UniqueStr)) { for (int proc = 1; proc <= j.NumProcesses; proc++) { for (int path = 1; path <= j.GetNumPaths(proc); path++) { foreach (var stop in j.GetMachiningStop(proc, path)) { //The station group name on the job and the LocationName from the //generated log entries must match. Rather than store and try and lookup //the station name when creating log entries, since we only support a single //machine group, just set the group name to MC here during storage and //always create log entries with MC. stop.StationGroup = "MC"; } } } newJobs.Add(j); } } _jobDB.AddJobs(newJ, expectedPreviousScheduleId); OrderXML.WriteOrderXML(System.IO.Path.Combine(_xmlPath, "sail.xml"), newJobs, _onlyOrders); }
private void AddJobsToDB(NewJobs newJ) { foreach (var j in newJ.Jobs) { j.Archived = true; j.JobCopiedToSystem = false; } jobDB.AddJobs(newJ, null); //update the station group name foreach (var j in newJ.Jobs) { for (int proc = 1; proc <= j.NumProcesses; proc++) { for (int path = 1; path <= j.GetNumPaths(proc); path++) { foreach (var stop in j.GetMachiningStop(proc, path)) { if (!string.IsNullOrEmpty(stop.StationGroup)) { _machineGroupName = stop.StationGroup; goto foundGroup; } } } } } foundGroup :; }
private void AddJobsToDB(NewJobs newJ) { foreach (var j in newJ.Jobs) { j.Archived = true; j.JobCopiedToSystem = false; if (!jobDB.DoesJobExist(j.UniqueStr)) { for (int proc = 1; proc <= j.NumProcesses; proc++) { for (int path = 1; path <= j.GetNumPaths(proc); path++) { foreach (var stop in j.GetMachiningStop(proc, path)) { //The station group name on the job and the LocationName from the //generated log entries must match. Rather than store and try and lookup //the station name when creating log entries, since we only support a single //machine group, just set the group name to MC here during storage and //always create log entries with MC. stop.StationGroup = "MC"; } } } } } jobDB.AddJobs(newJ, null); }
public void AddJobs(NewJobs newJ, string expectedPreviousScheduleId) { if (!OpenDatabaseKitDB.MazakTransactionLock.WaitOne(TimeSpan.FromMinutes(2), true)) { throw new Exception("Unable to obtain mazak database lock"); } try { _writeJobs.AddJobs(newJ, expectedPreviousScheduleId); } finally { OpenDatabaseKitDB.MazakTransactionLock.ReleaseMutex(); } }
public void AddJobs(NewJobs newJ, string expectedPreviousScheduleId) { // check previous schedule id if (!string.IsNullOrEmpty(newJ.ScheduleId)) { var recentDbSchedule = jobDB.LoadMostRecentSchedule(); if (!string.IsNullOrEmpty(expectedPreviousScheduleId) && expectedPreviousScheduleId != recentDbSchedule.LatestScheduleId) { throw new BlackMaple.MachineFramework.BadRequestException( "Expected previous schedule ID does not match current schedule ID. Another user may have already created a schedule."); } } //check for an old schedule that has not yet been copied var oldJobs = jobDB.LoadJobsNotCopiedToSystem(DateTime.UtcNow.AddDays(-1), DateTime.UtcNow.AddHours(1), includeDecremented: false); if (oldJobs.Jobs.Count > 0) { //there are jobs to copy Log.Information("Resuming copy of job schedules into mazak {uniqs}", oldJobs.Jobs.Select(j => j.UniqueStr).ToList()); AddSchedules(oldJobs.Jobs); } // add programs here first so that they exist in the database when looking up most recent revision for use in parts jobDB.AddPrograms(newJ.Programs, DateTime.UtcNow); //add fixtures, pallets, parts. If this fails, just throw an exception, //they will be deleted during the next download. AddFixturesPalletsParts(newJ); //Now that the parts have been added and we are confident that there no problems with the jobs, //add them to the database. Once this occurrs, the timer will pick up and eventually //copy them to the system AddJobsToDB(newJ); System.Threading.Thread.Sleep(TimeSpan.FromSeconds(5)); AddSchedules(newJ.Jobs); hold.SignalNewSchedules(); }
public void StatusSnapshot(string scenario) { /* * Symlinks not supported on Windows * var newJobs = JsonConvert.DeserializeObject<NewJobs>( * File.ReadAllText( * Path.Combine("..", "..", "..", "mazak", "read-snapshots", scenario + ".jobs.json")), * jsonSettings * ); */ NewJobs newJobs = null; if (scenario.Contains("basic")) { newJobs = JsonConvert.DeserializeObject <NewJobs>( File.ReadAllText( Path.Combine("..", "..", "..", "sample-newjobs", "fixtures-queues.json")), jsonSettings ); } else if (scenario.Contains("multiface")) { newJobs = JsonConvert.DeserializeObject <NewJobs>( File.ReadAllText( Path.Combine("..", "..", "..", "sample-newjobs", "multi-face.json")), jsonSettings ); } else if (scenario.Contains("pathgroups")) { newJobs = JsonConvert.DeserializeObject <NewJobs>( File.ReadAllText( Path.Combine("..", "..", "..", "sample-newjobs", "path-groups.json")), jsonSettings ); } _jobDB.AddJobs(newJobs, null); var allData = JsonConvert.DeserializeObject <MazakAllData>( File.ReadAllText( Path.Combine("..", "..", "..", "mazak", "read-snapshots", scenario + ".data.json")), jsonSettings ); var logDb = _emptyLog; bool close = false; var existingLogPath = Path.Combine("..", "..", "..", "mazak", "read-snapshots", scenario + ".log.db"); if (File.Exists(existingLogPath)) { logDb = new JobLogDB(new FMSSettings()); logDb.Open(existingLogPath); close = true; } if (scenario == "basic-no-material") { queueSyncFault.CurrentQueueMismatch.Returns(true); } CurrentStatus status; try { status = BuildCurrentStatus.Build(_jobDB, logDb, _settings, queueSyncFault, MazakDbType.MazakSmooth, allData, new DateTime(2018, 7, 19, 20, 42, 3, DateTimeKind.Utc)); } finally { if (close) { logDb.Close(); } } var expectedStatus = JsonConvert.DeserializeObject <CurrentStatus>( File.ReadAllText( Path.Combine("..", "..", "..", "mazak", "read-snapshots", scenario + ".status.json")), jsonSettings ); status.Should().BeEquivalentTo(expectedStatus, options => options.Excluding(c => c.TimeOfCurrentStatusUTC) ); }
public void AddJobs(NewJobs jobs, string expectedPreviousScheduleId) { JobDB.AddJobs(jobs, expectedPreviousScheduleId); }
private void AddFixturesPalletsParts(NewJobs newJ) { var mazakData = readDatabase.LoadAllData(); //first allocate a UID to use for this download int UID = 0; while (UID < int.MaxValue) { //check schedule rows for UID foreach (var schRow in mazakData.Schedules) { if (MazakPart.ParseUID(schRow.PartName) == UID) { goto found; } } //check fixture rows for UID foreach (var fixRow in mazakData.Fixtures) { if (MazakPart.ParseUID(fixRow.FixtureName) == UID) { goto found; } } break; found: UID += 1; } if (UID == int.MaxValue) { throw new Exception("Unable to find unused UID"); } Log.Debug("Creating new schedule with UID {uid}", UID); var(transSet, savedParts) = BuildMazakSchedules.RemoveCompletedAndDecrementSchedules( mazakData, UseStartingOffsetForDueDate ); if (transSet.Schedules.Any()) { writeDb.Save(transSet, "Update schedules"); } Log.Debug("Saved Parts: {parts}", savedParts); var jobErrs = new List <string>(); var mazakJobs = ConvertJobsToMazakParts.JobsToMazak( newJ.Jobs, UID, mazakData, savedParts, writeDb.MazakType, CheckPalletsUsedOnce, fmsSettings, LookupProgram, jobErrs); if (jobErrs.Any()) { throw new BlackMaple.MachineFramework.BadRequestException( string.Join(Environment.NewLine, jobErrs) ); } //delete everything transSet = mazakJobs.DeleteOldPartPalletRows(); if (transSet.Parts.Any() || transSet.Pallets.Any()) { try { writeDb.Save(transSet, "Delete Parts Pallets"); } catch (ErrorModifyingParts e) { foreach (var partName in e.PartNames) { if (readDatabase.CheckPartExists(partName)) { throw new Exception("Mazak returned an error when attempting to delete part " + partName); } } } } //have to delete fixtures after schedule, parts, and pallets are already deleted //also, add new fixtures transSet = mazakJobs.CreateDeleteFixtureAndProgramDatabaseRows(jobDB.LoadProgramContent, ProgramDirectory); writeDb.Save(transSet, "Fixtures"); //now save the pallets and parts transSet = mazakJobs.CreatePartPalletDatabaseRows(); writeDb.Save(transSet, "Add Parts"); }
static async Task <int> Main(string[] args) { try { var result = Parser.Default.ParseArguments <Options>(args); if (result.Tag == ParserResultType.NotParsed) { return(1); } var options = ((Parsed <Options>)result).Value; if (!options.StartUTC.HasValue) { options.StartUTC = (new DateTime(2016, 11, 5, 7, 0, 0, DateTimeKind.Local)).ToUniversalTime(); } if (!options.EndUTC.HasValue) { options.EndUTC = (new DateTime(2016, 11, 6, 7, 0, 0, DateTimeKind.Local)).ToUniversalTime(); } //load inputs var jsonSettings = new JsonSerializerSettings(); jsonSettings.Converters.Add(new BlackMaple.FMSInsight.API.TimespanConverter()); jsonSettings.Converters.Add(new Newtonsoft.Json.Converters.StringEnumConverter()); jsonSettings.DateTimeZoneHandling = DateTimeZoneHandling.Utc; var flex = JsonConvert.DeserializeObject <FlexPlan>( System.IO.File.ReadAllText(options.FlexJsonFile), jsonSettings); IEnumerable <StationDowntime> downtime; if (!string.IsNullOrEmpty(options.DowntimeJsonFile)) { downtime = JsonConvert.DeserializeObject <List <StationDowntime> >( System.IO.File.ReadAllText(options.DowntimeJsonFile), jsonSettings); } else if (!string.IsNullOrEmpty(options.DowntimeJson)) { downtime = JsonConvert.DeserializeObject <List <StationDowntime> >( options.DowntimeJson, jsonSettings); } else { downtime = new StationDowntime[] { } }; UnscheduledStatus bookings; if (!string.IsNullOrEmpty(options.BookingsJsonFile)) { bookings = JsonConvert.DeserializeObject <UnscheduledStatus>( File.ReadAllText(options.BookingsJsonFile), jsonSettings); } else if (!string.IsNullOrEmpty(options.BookingsCsvFile)) { using (var f = File.OpenRead(options.BookingsCsvFile)) using (var csv = new CsvHelper.CsvReader(new StreamReader(f))) { var bookingMap = new Dictionary <string, Booking>(); foreach (var row in csv.GetRecords <dynamic>()) { var bookingId = row.Id; Booking work; if (bookingMap.ContainsKey(bookingId)) { work = bookingMap[bookingId]; } else { work = new Booking { BookingId = bookingId, Priority = int.Parse(row.Priority), DueDate = DateTime.Parse(row.DueDate), Parts = new List <BookingDemand>(), ScheduleId = null }; bookingMap.Add(bookingId, work); } work.Parts.Add(new BookingDemand { BookingId = bookingId, Part = row.Part, Quantity = int.Parse(row.Quantity), CastingId = null, }); } bookings = new UnscheduledStatus() { UnscheduledBookings = bookingMap.Values, ScheduledParts = Enumerable.Empty <ScheduledPartWithoutBooking>(), }; } } else { using (var reader = new StreamReader(Console.OpenStandardInput(), Console.InputEncoding)) { var s = JsonSerializer.Create(jsonSettings); bookings = s.Deserialize <UnscheduledStatus>(new JsonTextReader(reader)); } } if (string.IsNullOrEmpty(options.ScheduleId)) { options.ScheduleId = CreateScheduleId.Create(); } //run allocation var loader = new AssemblyLoader(Path.GetFullPath(options.Plugin)); var allocate = loader.LoadPlugin(); if (allocate == null) { return(1); } var results = allocate.Allocate( bookings, default(BlackMaple.FMSInsight.API.PlannedSchedule), default(BlackMaple.FMSInsight.API.CurrentStatus), flex, options.StartUTC.Value, options.EndUTC.Value, options.FillMethod, options.ScheduleId, downtime); if (string.IsNullOrEmpty(options.DownloadServer)) { //print results System.Console.WriteLine( JsonConvert.SerializeObject(results, Formatting.Indented, jsonSettings)); } else { // download var newJobs = new NewJobs(); newJobs.ScheduleId = options.ScheduleId; newJobs.Jobs = new ObservableCollection <JobPlan>(results.Jobs); newJobs.StationUse = new ObservableCollection <SimulatedStationUtilization>(results.SimStations); newJobs.ExtraParts = results.NewExtraParts.ToDictionary(x => x.Part, x => x.Quantity); newJobs.ArchiveCompletedJobs = true; newJobs.QueueSizes = new Dictionary <string, QueueSize>(results.QueueSizes); if (bookings.Programs != null) { var programsInJobs = new HashSet <string>( results.Jobs .SelectMany(j => j.ProcsAndPaths) .SelectMany(p => p.Paths) .SelectMany(p => p.Stops) .Select(p => p.Program) ); newJobs.Programs = bookings.Programs .Where(p => programsInJobs.Contains(p.ProgramName)) .Select( prog => new BlackMaple.FMSInsight.API.ProgramEntry() { ProgramName = prog.ProgramName, Revision = prog.Revision ?? 0, Comment = prog.Comment, ProgramContent = prog.ProgramContent } ).ToList(); } var builder = new UriBuilder(options.DownloadServer); if (builder.Scheme == "") { builder.Scheme = "http"; } if (builder.Port == 80) { builder.Port = 5000; } var client = new JobsClient(builder.Uri.ToString(), new System.Net.Http.HttpClient()); await client.AddAsync(newJobs, null); } return(0); } catch (Exception ex) { System.Console.Error.WriteLine("Error during allocate. " + Environment.NewLine + ex.ToString()); return(1); } }
public void Add([FromBody] NewJobs newJobs, [FromQuery] string expectedPreviousScheduleId) { _control.AddJobs(newJobs, expectedPreviousScheduleId); }