public BuildCurrentStatusSpec() { var logConn = new Microsoft.Data.Sqlite.SqliteConnection("Data Source=:memory:"); logConn.Open(); _emptyLog = new JobLogDB(new FMSSettings(), logConn); _emptyLog.CreateTables(firstSerialOnEmpty: null); var jobConn = new Microsoft.Data.Sqlite.SqliteConnection("Data Source=:memory:"); jobConn.Open(); _jobDB = new JobDB(jobConn); _jobDB.CreateTables(); _settings = new FMSSettings(); _settings.Queues["castings"] = new QueueSize(); _settings.Queues["queueAAA"] = new QueueSize(); _settings.Queues["queueBBB"] = new QueueSize(); _settings.Queues["queueCCC"] = new QueueSize(); jsonSettings = new JsonSerializerSettings(); jsonSettings.Converters.Add(new BlackMaple.MachineFramework.TimespanConverter()); jsonSettings.Converters.Add(new Newtonsoft.Json.Converters.StringEnumConverter()); jsonSettings.DateTimeZoneHandling = DateTimeZoneHandling.Utc; jsonSettings.Formatting = Formatting.Indented; jsonSettings.ConstructorHandling = ConstructorHandling.AllowNonPublicDefaultConstructor; queueSyncFault = Substitute.For <IQueueSyncFault>(); queueSyncFault.CurrentQueueMismatch.Returns(false); }
public EventDBUpgradeSpec() { _log = new JobLogDB(new FMSSettings()); _tempFile = System.IO.Path.GetTempFileName(); System.IO.File.Copy("log.v17.db", _tempFile, overwrite: true); _log.Open(_tempFile); }
public CincronBackend(IConfigurationSection config, FMSSettings cfg) { try { string msgFile; #if DEBUG var path = System.Reflection.Assembly.GetExecutingAssembly().Location; msgFile = System.IO.Path.Combine( System.IO.Path.GetDirectoryName(path), "..\\..\\..\\test\\Cincron\\parker-example-messages"); #else msgFile = config.GetValue <string>("Message File"); #endif Log.Information("Starting cincron backend with message file {file}", msgFile); if (!System.IO.File.Exists(msgFile)) { Log.Error("Message file {file} does not exist", msgFile); } _log = new JobLogDB(cfg); _log.Open( System.IO.Path.Combine(cfg.DataDirectory, "log.db"), startingSerial: cfg.StartingSerial ); _msgWatcher = new MessageWatcher(msgFile, _log, cfg); _msgWatcher.Start(); } catch (Exception ex) { Log.Error(ex, "Unhandled exception when initializing cincron backend"); } }
public MazakQueues(JobLogDB log, JobDB jDB, IWriteData trans) { _jobDB = jDB; _log = log; _transDB = trans; CurrentQueueMismatch = false; }
public InspectionTest() { var logConn = new Microsoft.Data.Sqlite.SqliteConnection("Data Source=:memory:"); logConn.Open(); _insp = new JobLogDB(new FMSSettings(), logConn); _insp.CreateTables(firstSerialOnEmpty: null); }
public MessageWatcher(string msgFile, JobLogDB log, FMSSettings s) { _msgFile = msgFile; _settings = s; _log = log; _lock = new object(); _timer = new System.Timers.Timer(TimeSpan.FromMinutes(1).TotalMilliseconds); _timer.Elapsed += CheckMessages; }
public MakinoBackend(IConfiguration config, FMSSettings st) { try { var cfg = config.GetSection("Makino"); string adePath = cfg.GetValue <string>("ADE Path"); if (string.IsNullOrEmpty(adePath)) { adePath = @"c:\Makino\ADE"; } string dbConnStr = cfg.GetValue <string>("SQL Server Connection String"); if (string.IsNullOrEmpty(dbConnStr)) { dbConnStr = DetectSqlConnectionStr(); } bool downloadOnlyOrders = cfg.GetValue <bool>("Download Only Orders"); Log.Information( "Starting makino backend. Connection Str: {connStr}, ADE Path: {path}, DownloadOnlyOrders: {downOnlyOrders}", dbConnStr, adePath, downloadOnlyOrders); _dataDirectory = st.DataDirectory; _log = new JobLogDB(st); _log.Open( System.IO.Path.Combine(_dataDirectory, "log.db"), System.IO.Path.Combine(_dataDirectory, "inspections.db"), startingSerial: st.StartingSerial ); _jobDB = new BlackMaple.MachineFramework.JobDB(); _jobDB.Open(System.IO.Path.Combine(_dataDirectory, "jobs.db")); _status = new StatusDB(System.IO.Path.Combine(_dataDirectory, "makino.db")); #if DEBUG _makinoDB = new MakinoDB(MakinoDB.DBTypeEnum.SqlLocal, "", _status, _log); #else _makinoDB = new MakinoDB(MakinoDB.DBTypeEnum.SqlConnStr, dbConnStr, _status, _log); #endif _logTimer = new LogTimer(_log, _jobDB, _makinoDB, _status, st); _jobs = new Jobs(_makinoDB, _jobDB, adePath, downloadOnlyOrders); _logTimer.LogsProcessed += OnLogsProcessed; } catch (Exception ex) { Log.Error(ex, "Error when initializing makino backend"); } }
public MockServerBackend() { string path = null; // dataDir string dbFile(string f) => System.IO.Path.Combine(path, f + ".db"); if (path != null) { if (System.IO.File.Exists(dbFile("log"))) { System.IO.File.Delete(dbFile("log")); } LogDB = new JobLogDB(new FMSSettings()); LogDB.Open(dbFile("log"), dbFile("insp")); if (System.IO.File.Exists(dbFile("job"))) { System.IO.File.Delete(dbFile("job")); } JobDB = new JobDB(); JobDB.Open(dbFile("job")); } else { var conn = new Microsoft.Data.Sqlite.SqliteConnection("Data Source=:memory:"); conn.Open(); LogDB = new JobLogDB(new FMSSettings(), conn); LogDB.CreateTables(firstSerialOnEmpty: null); conn = new Microsoft.Data.Sqlite.SqliteConnection("Data Source=:memory:"); conn.Open(); JobDB = new JobDB(conn); JobDB.CreateTables(); } _jsonSettings = new JsonSerializerSettings(); _jsonSettings.Converters.Add(new Newtonsoft.Json.Converters.StringEnumConverter()); _jsonSettings.Converters.Add(new BlackMaple.MachineFramework.TimespanConverter()); _jsonSettings.ContractResolver = new Newtonsoft.Json.Serialization.DefaultContractResolver(); _jsonSettings.ConstructorHandling = Newtonsoft.Json.ConstructorHandling.AllowNonPublicDefaultConstructor; var sampleDataPath = System.IO.Path.Combine( System.IO.Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "../../../sample-data/" ); // sample data starts at Jan 1, 2018. Need to offset to current month var jan1_18 = new DateTime(2018, 1, 1, 0, 0, 0, DateTimeKind.Utc); var offset = DateTime.UtcNow.AddDays(-28).Subtract(jan1_18); LoadEvents(sampleDataPath, offset); LoadJobs(sampleDataPath, offset); LoadStatus(sampleDataPath, offset); }
public void Dispose() { if (_msgWatcher != null) { _msgWatcher.Halt(); } if (_log != null) { _log.Close(); } _msgWatcher = null; _log = null; }
public LogTimer( JobLogDB log, JobDB jobDB, MakinoDB makinoDB, StatusDB status, FMSSettings settings) { _lock = new object(); _log = log; _jobDB = jobDB; Settings = settings; _makinoDB = makinoDB; _status = status; TimerSignaled(null, null); _timer = new System.Timers.Timer(TimeSpan.FromMinutes(1).TotalMilliseconds); _timer.Elapsed += TimerSignaled; _timer.Start(); }
public static void CreateSerial(long matID, string jobUniqe, string partName, int process, string face, JobLogDB _log, FMSSettings Settings) { foreach (var stat in _log.GetLogForMaterial(matID)) { if (stat.LogType == LogType.PartMark && stat.LocationNum == 1) { foreach (LogMaterial mat in stat.Material) { if (mat.Process == process) { //We have recorded the serial already return; } } } } var serial = Settings.ConvertMaterialIDToSerial(matID); //length 10 gets us to 1.5e18 which is not quite 2^64 //still large enough so we will practically never roll around serial = serial.Substring(0, Math.Min(Settings.SerialLength, serial.Length)); serial = serial.PadLeft(Settings.SerialLength, '0'); Log.Debug("Recording serial for matid: {matid} {serial}", matID, serial); var logMat = new JobLogDB.EventLogMaterial() { MaterialID = matID, Process = process, Face = face }; _log.RecordSerialForMaterialID(logMat, serial); }
public static CurrentStatus Build(JobDB jobDB, JobLogDB log, FMSSettings fmsSettings, IMachineGroupName machineGroupName, IQueueSyncFault queueSyncFault, MazakDbType dbType, MazakAllData mazakData, DateTime utcNow) { //Load process and path numbers Dictionary <string, int> uniqueToMaxPath; Dictionary <string, int> uniqueToMaxProcess; CalculateMaxProcAndPath(mazakData, out uniqueToMaxPath, out uniqueToMaxProcess); var currentLoads = new List <LoadAction>(mazakData.LoadActions); var curStatus = new CurrentStatus(); foreach (var k in fmsSettings.Queues) { curStatus.QueueSizes[k.Key] = k.Value; } if (mazakData.Alarms != null) { foreach (var alarm in mazakData.Alarms) { if (!string.IsNullOrEmpty(alarm.AlarmMessage)) { curStatus.Alarms.Add(alarm.AlarmMessage); } } } if (queueSyncFault.CurrentQueueMismatch) { curStatus.Alarms.Add("Queue contents and Mazak schedule quantity mismatch."); } var jobsBySchID = new Dictionary <long, InProcessJob>(); var pathBySchID = new Dictionary <long, MazakPart.IProcToPath>(); foreach (var schRow in mazakData.Schedules) { if (!MazakPart.IsSailPart(schRow.PartName)) { continue; } MazakPartRow partRow = null; foreach (var p in mazakData.Parts) { if (p.PartName == schRow.PartName) { partRow = p; break; } } if (partRow == null) { continue; } //Parse data from the database var partName = partRow.PartName; int loc = partName.IndexOf(':'); if (loc >= 0) { partName = partName.Substring(0, loc); } string jobUnique = ""; MazakPart.IProcToPath procToPath = null; bool manual = false; if (!string.IsNullOrEmpty(partRow.Comment)) { MazakPart.ParseComment(partRow.Comment, out jobUnique, out procToPath, out manual); } if (!uniqueToMaxProcess.ContainsKey(jobUnique)) { continue; } int numProc = uniqueToMaxProcess[jobUnique]; int maxProc1Path = uniqueToMaxPath[jobUnique]; InProcessJob job; //Create or lookup the job if (curStatus.Jobs.ContainsKey(jobUnique)) { job = curStatus.Jobs[jobUnique]; } else { var jobPaths = new int[numProc]; for (int i = 0; i < numProc; i++) { jobPaths[i] = maxProc1Path; } job = new InProcessJob(jobUnique, numProc, jobPaths); job.PartName = partName; job.JobCopiedToSystem = true; curStatus.Jobs.Add(jobUnique, job); } jobsBySchID.Add(schRow.Id, job); pathBySchID.Add(schRow.Id, procToPath); //Job Basics job.SetPlannedCyclesOnFirstProcess(procToPath.PathForProc(proc: 1), schRow.PlanQuantity); AddCompletedToJob(schRow, job, procToPath); job.Priority = schRow.Priority; if (((HoldPattern.HoldMode)schRow.HoldMode) == HoldPattern.HoldMode.FullHold) { job.HoldEntireJob.UserHold = true; } else { job.HoldEntireJob.UserHold = false; } AddRoutingToJob(mazakData, partRow, job, machineGroupName, procToPath, dbType); } var loadedJobs = new HashSet <string>(); foreach (var j in jobsBySchID.Values) { if (loadedJobs.Contains(j.UniqueStr)) { continue; } loadedJobs.Add(j.UniqueStr); AddDataFromJobDB(jobDB, j); } //Now add pallets foreach (var palRow in mazakData.Pallets) { if (palRow.PalletNumber > 0 && !curStatus.Pallets.ContainsKey(palRow.PalletNumber.ToString())) { var palName = palRow.PalletNumber.ToString(); var palLoc = FindPalletLocation(machineGroupName, mazakData, dbType, palRow.PalletNumber); //Create the pallet PalletStatus status = new PalletStatus() { Pallet = palName, CurrentPalletLocation = palLoc, FixtureOnPallet = palRow.Fixture, NumFaces = 1, OnHold = false }; curStatus.Pallets.Add(status.Pallet, status); var oldCycles = log.CurrentPalletLog(palName); //Add the material currently on the pallet foreach (var palSub in mazakData.PalletSubStatuses) { if (palSub.PalletNumber != palRow.PalletNumber) { continue; } if (palSub.FixQuantity <= 0) { continue; } if (!jobsBySchID.ContainsKey(palSub.ScheduleID)) { continue; } status.NumFaces = Math.Max(status.NumFaces, palSub.PartProcessNumber); var job = jobsBySchID[palSub.ScheduleID]; var procToPath = pathBySchID[palSub.ScheduleID]; var matIDs = new Queue <long>(FindMatIDsFromOldCycles(oldCycles, job.UniqueStr, palSub.PartProcessNumber)); for (int i = 1; i <= palSub.FixQuantity; i++) { int face = palSub.PartProcessNumber; long matID = -1; if (matIDs.Count > 0) { matID = matIDs.Dequeue(); } var matDetails = log.GetMaterialDetails(matID); var inProcMat = new InProcessMaterial() { MaterialID = matID, JobUnique = job.UniqueStr, PartName = job.PartName, Process = palSub.PartProcessNumber, Path = procToPath.PathForProc(palSub.PartProcessNumber), Serial = matDetails?.Serial, WorkorderId = matDetails?.Workorder, SignaledInspections = log.LookupInspectionDecisions(matID) .Where(x => x.Inspect) .Select(x => x.InspType) .Distinct() .ToList(), LastCompletedMachiningRouteStopIndex = oldCycles.Any( c => c.LogType == LogType.MachineCycle && !c.StartOfCycle && c.Material.Any(m => m.MaterialID == matID && m.Process == palSub.PartProcessNumber) ) ? (int?)0 : null, Location = new InProcessMaterialLocation() { Type = InProcessMaterialLocation.LocType.OnPallet, Pallet = status.Pallet, Face = face }, Action = new InProcessMaterialAction() { Type = InProcessMaterialAction.ActionType.Waiting } }; curStatus.Material.Add(inProcMat); //check for unloading or transfer var loadNext = CheckLoadOfNextProcess(currentLoads, job.UniqueStr, palSub.PartProcessNumber, palLoc); var unload = CheckUnload(currentLoads, job.UniqueStr, palSub.PartProcessNumber, palLoc); if (loadNext != null) { var start = FindLoadStartFromOldCycles(oldCycles, matID); inProcMat.Action = new InProcessMaterialAction() { Type = InProcessMaterialAction.ActionType.Loading, LoadOntoFace = palSub.PartProcessNumber + 1, LoadOntoPallet = status.Pallet, ProcessAfterLoad = palSub.PartProcessNumber + 1, PathAfterLoad = procToPath.PathForProc(palSub.PartProcessNumber + 1), ElapsedLoadUnloadTime = start != null ? (TimeSpan?)utcNow.Subtract(start.EndTimeUTC) : null }; } else if (unload != null) { var start = FindLoadStartFromOldCycles(oldCycles, matID); inProcMat.Action = new InProcessMaterialAction() { Type = palSub.PartProcessNumber == job.NumProcesses ? InProcessMaterialAction.ActionType.UnloadToCompletedMaterial : InProcessMaterialAction.ActionType.UnloadToInProcess, UnloadIntoQueue = job.GetOutputQueue( process: palSub.PartProcessNumber, path: procToPath.PathForProc(palSub.PartProcessNumber)), ElapsedLoadUnloadTime = start != null ? (TimeSpan?)utcNow.Subtract(start.EndTimeUTC) : null }; } else { // detect if machining var start = FindMachineStartFromOldCycles(oldCycles, matID); if (start != null) { var machStop = job.GetMachiningStop(inProcMat.Process, inProcMat.Path).FirstOrDefault(); var elapsedTime = utcNow.Subtract(start.EndTimeUTC); inProcMat.Action = new InProcessMaterialAction() { Type = InProcessMaterialAction.ActionType.Machining, ElapsedMachiningTime = elapsedTime, ExpectedRemainingMachiningTime = machStop != null?machStop.ExpectedCycleTime.Subtract(elapsedTime) : TimeSpan.Zero }; } } } } if (palLoc.Location == PalletLocationEnum.LoadUnload) { var start = FindLoadStartFromOldCycles(oldCycles); var elapsedLoad = start != null ? (TimeSpan?)utcNow.Subtract(start.EndTimeUTC) : null; AddLoads(log, currentLoads, status.Pallet, palLoc, elapsedLoad, curStatus); AddUnloads(log, currentLoads, status, elapsedLoad, oldCycles, curStatus); } } } //now queued var seenMatIds = new HashSet <long>(curStatus.Material.Select(m => m.MaterialID)); foreach (var mat in log.GetMaterialInAllQueues()) { // material could be in the process of being loaded if (seenMatIds.Contains(mat.MaterialID)) { continue; } var matLogs = log.GetLogForMaterial(mat.MaterialID); int lastProc = 0; foreach (var entry in log.GetLogForMaterial(mat.MaterialID)) { foreach (var entryMat in entry.Material) { if (entryMat.MaterialID == mat.MaterialID) { lastProc = Math.Max(lastProc, entryMat.Process); } } } var matDetails = log.GetMaterialDetails(mat.MaterialID); curStatus.Material.Add(new InProcessMaterial() { MaterialID = mat.MaterialID, JobUnique = mat.Unique, PartName = mat.PartName, Process = lastProc, Path = 1, Serial = matDetails?.Serial, WorkorderId = matDetails?.Workorder, SignaledInspections = log.LookupInspectionDecisions(mat.MaterialID) .Where(x => x.Inspect) .Select(x => x.InspType) .Distinct() .ToList(), Location = new InProcessMaterialLocation() { Type = InProcessMaterialLocation.LocType.InQueue, CurrentQueue = mat.Queue, QueuePosition = mat.Position, }, Action = new InProcessMaterialAction() { Type = InProcessMaterialAction.ActionType.Waiting } }); } var notCopied = jobDB.LoadJobsNotCopiedToSystem(DateTime.UtcNow.AddHours(-WriteJobs.JobLookbackHours), DateTime.UtcNow); foreach (var j in notCopied.Jobs) { if (curStatus.Jobs.ContainsKey(j.UniqueStr)) { //The copy to the cell succeeded but the DB has not yet been updated. //The thread which copies jobs will soon notice and update the database //so we can ignore it for now. } else { curStatus.Jobs.Add(j.UniqueStr, new InProcessJob(j)); } } foreach (var j in curStatus.Jobs) { foreach (var d in jobDB.LoadDecrementsForJob(j.Value.UniqueStr)) { j.Value.Decrements.Add(d); } } return(curStatus); }
public void StatusSnapshot(string scenario) { /* * Symlinks not supported on Windows * var newJobs = JsonConvert.DeserializeObject<NewJobs>( * File.ReadAllText( * Path.Combine("..", "..", "..", "mazak", "read-snapshots", scenario + ".jobs.json")), * jsonSettings * ); */ NewJobs newJobs = null; if (scenario.Contains("basic")) { newJobs = JsonConvert.DeserializeObject <NewJobs>( File.ReadAllText( Path.Combine("..", "..", "..", "sample-newjobs", "fixtures-queues.json")), jsonSettings ); } else if (scenario.Contains("multiface")) { newJobs = JsonConvert.DeserializeObject <NewJobs>( File.ReadAllText( Path.Combine("..", "..", "..", "sample-newjobs", "multi-face.json")), jsonSettings ); } else if (scenario.Contains("pathgroups")) { newJobs = JsonConvert.DeserializeObject <NewJobs>( File.ReadAllText( Path.Combine("..", "..", "..", "sample-newjobs", "path-groups.json")), jsonSettings ); } _jobDB.AddJobs(newJobs, null); var allData = JsonConvert.DeserializeObject <MazakAllData>( File.ReadAllText( Path.Combine("..", "..", "..", "mazak", "read-snapshots", scenario + ".data.json")), jsonSettings ); var logDb = _emptyLog; bool close = false; var existingLogPath = Path.Combine("..", "..", "..", "mazak", "read-snapshots", scenario + ".log.db"); if (File.Exists(existingLogPath)) { logDb = new JobLogDB(new FMSSettings()); logDb.Open(existingLogPath); close = true; } if (scenario == "basic-no-material") { queueSyncFault.CurrentQueueMismatch.Returns(true); } CurrentStatus status; try { status = BuildCurrentStatus.Build(_jobDB, logDb, _settings, queueSyncFault, MazakDbType.MazakSmooth, allData, new DateTime(2018, 7, 19, 20, 42, 3, DateTimeKind.Utc)); } finally { if (close) { logDb.Close(); } } var expectedStatus = JsonConvert.DeserializeObject <CurrentStatus>( File.ReadAllText( Path.Combine("..", "..", "..", "mazak", "read-snapshots", scenario + ".status.json")), jsonSettings ); status.Should().BeEquivalentTo(expectedStatus, options => options.Excluding(c => c.TimeOfCurrentStatusUTC) ); }
public WriteJobsSpec() { var logConn = new Microsoft.Data.Sqlite.SqliteConnection("Data Source=:memory:"); logConn.Open(); _logDB = new JobLogDB(new FMSSettings(), logConn); _logDB.CreateTables(firstSerialOnEmpty: null); var jobConn = new Microsoft.Data.Sqlite.SqliteConnection("Data Source=:memory:"); jobConn.Open(); _jobDB = new JobDB(jobConn); _jobDB.CreateTables(); _writeMock = new WriteMock(); _readMock = Substitute.For <IReadDataAccess>(); _readMock.MazakType.Returns(MazakDbType.MazakSmooth); _readMock.LoadAllData().Returns(new MazakAllData() { Schedules = new[] { // a completed schedule, should be deleted new MazakScheduleRow() { Id = 1, PartName = "part1:1:1", Comment = MazakPart.CreateComment("uniq1", new [] { 1 }, false), PlanQuantity = 15, CompleteQuantity = 15, Priority = 50, Processes = { new MazakScheduleProcessRow() { MazakScheduleRowId = 1, FixedMachineFlag = 1, ProcessNumber = 1 } } }, // a non-completed schedule, should be decremented new MazakScheduleRow() { Id = 2, PartName = "part2:1:1", Comment = MazakPart.CreateComment("uniq2", new [] { 1 }, false), PlanQuantity = 15, CompleteQuantity = 10, Priority = 50, Processes = { new MazakScheduleProcessRow() { MazakScheduleRowId = 1, FixedMachineFlag = 1, ProcessNumber = 1, ProcessMaterialQuantity = 3, ProcessExecuteQuantity = 2 } } }, }, Parts = new[] { // should be deleted, since corresponding schedule is deleted new MazakPartRow() { PartName = "part1:1:1", Comment = MazakPart.CreateComment("uniq1", new[] { 1 }, false), Processes = { new MazakPartProcessRow() { PartName = "part1:1:1", ProcessNumber = 1, FixQuantity = 5, Fixture = "fixtoremove" } } }, //should be kept, since schedule is kept new MazakPartRow() { PartName = "part2:1:1", Comment = MazakPart.CreateComment("uniq2", new[] { 1 }, false), Processes = { new MazakPartProcessRow() { PartName = "part2:1:1", ProcessNumber = 1, FixQuantity = 2, Fixture = "fixtokeep" } } }, }, Fixtures = new[] { new MazakFixtureRow() { FixtureName = "fixtoremove", Comment = "Insight" }, new MazakFixtureRow() { FixtureName = "fixtokeep", Comment = "Insight" } }, Pallets = new[] { new MazakPalletRow() { PalletNumber = 5, Fixture = "fixtoremove" }, new MazakPalletRow() { PalletNumber = 6, Fixture = "fixtokeep" } }, PalletSubStatuses = Enumerable.Empty <MazakPalletSubStatusRow>(), PalletPositions = Enumerable.Empty <MazakPalletPositionRow>(), LoadActions = Enumerable.Empty <LoadAction>(), MainPrograms = Enumerable.Concat( (new[] { "1001", "1002", "1003", "1004", "1005" }).Select(p => new MazakProgramRow() { MainProgram = p, Comment = "" }), new[] { new MazakProgramRow() { MainProgram = System.IO.Path.Combine("theprogdir", "prog-bbb-1_rev2.EIA"), Comment = "Insight:2:prog-bbb-1" }, new MazakProgramRow() { MainProgram = System.IO.Path.Combine("theprogdir", "prog-bbb-1_rev3.EIA"), Comment = "Insight:3:prog-bbb-1" } } ) }); _readMock.LoadSchedulesPartsPallets().Returns(x => new MazakSchedulesPartsPallets() { Schedules = Enumerable.Empty <MazakScheduleRow>(), Parts = _writeMock.AddParts.Parts, Pallets = _writeMock.AddParts.Pallets, PalletSubStatuses = Enumerable.Empty <MazakPalletSubStatusRow>(), PalletPositions = Enumerable.Empty <MazakPalletPositionRow>(), LoadActions = Enumerable.Empty <LoadAction>(), MainPrograms = (new[] { "1001", "1002", "1003", "1004", "1005" }).Select(p => new MazakProgramRow() { MainProgram = p, Comment = "" }), }); _settings = new FMSSettings(); _settings.Queues["castings"] = new QueueSize(); _settings.Queues["queueAAA"] = new QueueSize(); _settings.Queues["queueBBB"] = new QueueSize(); _settings.Queues["queueCCC"] = new QueueSize(); _writeJobs = new WriteJobs( _writeMock, _readMock, Substitute.For <IHoldManagement>(), _jobDB, _logDB, _settings, check: false, useStarting: true, progDir: "theprogdir"); jsonSettings = new JsonSerializerSettings(); jsonSettings.Converters.Add(new BlackMaple.MachineFramework.TimespanConverter()); jsonSettings.Converters.Add(new Newtonsoft.Json.Converters.StringEnumConverter()); jsonSettings.DateTimeZoneHandling = DateTimeZoneHandling.Utc; jsonSettings.Formatting = Formatting.Indented; }
private static void AddLoads(JobLogDB log, IEnumerable <LoadAction> currentLoads, string pallet, PalletLocation palLoc, TimeSpan?elapsedLoadTime, CurrentStatus curStatus) { var queuedMats = new Dictionary <string, List <BlackMaple.MachineFramework.JobLogDB.QueuedMaterial> >(); //process remaining loads/unloads (already processed ones have been removed from currentLoads) foreach (var operation in currentLoads) { if (!operation.LoadEvent || operation.LoadStation != palLoc.Num) { continue; } for (int i = 0; i < operation.Qty; i++) { List <BlackMaple.MachineFramework.JobLogDB.QueuedMaterial> queuedMat = null; if (curStatus.Jobs.ContainsKey(operation.Unique)) { var queue = curStatus.Jobs[operation.Unique].GetInputQueue(process: operation.Process, path: operation.Path); if (!string.IsNullOrEmpty(queue)) { //only lookup each queue once if (queuedMats.ContainsKey(queue)) { queuedMat = queuedMats[queue]; } else { queuedMat = log.GetMaterialInQueue(queue).ToList(); queuedMats.Add(queue, queuedMat); } } } long matId = -1; string serial = null; string workId = null; var loc = new InProcessMaterialLocation() { Type = InProcessMaterialLocation.LocType.Free }; if (queuedMat != null) { var mat = queuedMat .Where(m => m.Unique == operation.Unique) .Select(m => (JobLogDB.QueuedMaterial?)m) .DefaultIfEmpty(null) .First(); if (mat.HasValue) { matId = mat.Value.MaterialID; loc = new InProcessMaterialLocation() { Type = InProcessMaterialLocation.LocType.InQueue, CurrentQueue = mat.Value.Queue, QueuePosition = mat.Value.Position, }; queuedMat.RemoveAll(m => m.MaterialID == mat.Value.MaterialID); var matDetails = log.GetMaterialDetails(matId); serial = matDetails?.Serial; workId = matDetails?.Workorder; } } var inProcMat = new InProcessMaterial() { MaterialID = matId, JobUnique = operation.Unique, PartName = operation.Part, Process = operation.Process, Path = operation.Path, Serial = serial, WorkorderId = workId, Location = loc, Action = new InProcessMaterialAction() { Type = InProcessMaterialAction.ActionType.Loading, LoadOntoPallet = pallet, LoadOntoFace = operation.Process, ProcessAfterLoad = operation.Process, PathAfterLoad = operation.Path, ElapsedLoadUnloadTime = elapsedLoadTime } }; curStatus.Material.Add(inProcMat); } } }
private static void AddUnloads(JobLogDB log, IEnumerable <LoadAction> currentActions, PalletStatus pallet, TimeSpan?elapsedLoadTime, List <BlackMaple.MachineWatchInterface.LogEntry> oldCycles, CurrentStatus status) { // For some reason, sometimes parts to unload don't show up in PalletSubStatus table. // So create them here if that happens foreach (var unload in currentActions) { if (unload.LoadEvent) { continue; } if (unload.LoadStation != pallet.CurrentPalletLocation.Num) { continue; } var matIDs = new Queue <long>(FindMatIDsFromOldCycles(oldCycles, unload.Unique, unload.Process)); status.Jobs.TryGetValue(unload.Unique, out InProcessJob job); for (int i = 0; i < unload.Qty; i += 1) { string face = unload.Process.ToString(); long matID = -1; if (matIDs.Count > 0) { matID = matIDs.Dequeue(); } var matDetails = log.GetMaterialDetails(matID); var inProcMat = new InProcessMaterial() { MaterialID = matID, JobUnique = unload.Unique, PartName = unload.Part, Process = unload.Process, Path = unload.Path, Serial = matDetails?.Serial, WorkorderId = matDetails?.Workorder, SignaledInspections = log.LookupInspectionDecisions(matID) .Where(x => x.Inspect) .Select(x => x.InspType) .ToList(), Location = new InProcessMaterialLocation() { Type = InProcessMaterialLocation.LocType.OnPallet, Pallet = pallet.Pallet, Face = unload.Process }, Action = new InProcessMaterialAction() { Type = InProcessMaterialAction.ActionType.UnloadToCompletedMaterial, ElapsedLoadUnloadTime = elapsedLoadTime } }; if (job != null) { if (unload.Process == job.NumProcesses) { inProcMat.Action.Type = InProcessMaterialAction.ActionType.UnloadToInProcess; } var queue = job.GetOutputQueue(process: unload.Process, path: unload.Path); if (!string.IsNullOrEmpty(queue)) { inProcMat.Action.UnloadIntoQueue = queue; } } status.Material.Add(inProcMat); } } }
public MazakBackend(IConfiguration configuration, FMSSettings st) { var cfg = configuration.GetSection("Mazak"); string localDbPath = cfg.GetValue <string>("Database Path"); MazakType = DetectMazakType(cfg, localDbPath); // database settings string sqlConnectString = cfg.GetValue <string>("SQL ConnectionString"); string dbConnStr; if (MazakType == MazakDbType.MazakSmooth) { if (!string.IsNullOrEmpty(sqlConnectString)) { dbConnStr = sqlConnectString; } else if (!string.IsNullOrEmpty(localDbPath)) { // old installers put sql server computer name in localDbPath dbConnStr = "Server=" + localDbPath + "\\pmcsqlserver;" + "User ID=mazakpmc;Password=Fms-978"; } else { var b = new System.Data.SqlClient.SqlConnectionStringBuilder(); b.UserID = "mazakpmc"; b.Password = "******"; b.DataSource = "(local)"; dbConnStr = b.ConnectionString; } } else { dbConnStr = localDbPath; if (string.IsNullOrEmpty(dbConnStr)) { dbConnStr = "c:\\Mazak\\NFMS\\DB"; } } // log csv string logPath = cfg.GetValue <string>("Log CSV Path"); if (logPath == null || logPath == "") { logPath = "c:\\Mazak\\FMS\\Log"; } if (MazakType != MazakDbType.MazakVersionE && !System.IO.Directory.Exists(logPath)) { Log.Error("Log CSV Directory {path} does not exist. Set the directory in the config.ini file.", logPath); } else if (MazakType != MazakDbType.MazakVersionE) { Log.Information("Loading log CSV files from {logcsv}", logPath); } // general config string useStarting = cfg.GetValue <string>("Use Starting Offset For Due Date"); string useStarting2 = cfg.GetValue <string>("Use Starting Offset"); if (string.IsNullOrEmpty(useStarting)) { if (string.IsNullOrEmpty(useStarting2)) { UseStartingOffsetForDueDate = true; } else { UseStartingOffsetForDueDate = Convert.ToBoolean(useStarting2); } } else { UseStartingOffsetForDueDate = Convert.ToBoolean(useStarting); } //Perhaps this should be a new setting, but if you don't check for pallets used once //then you don't care if all faces on a pallet are full so might as well use priority //which causes pallet positions to go empty. CheckPalletsUsedOnce = !UseStartingOffsetForDueDate; ProgramDirectory = cfg.GetValue <string>("Program Directory"); if (string.IsNullOrEmpty(ProgramDirectory)) { ProgramDirectory = "C:\\NCProgs"; } // serial settings string serialPerMaterial = cfg.GetValue <string>("Assign Serial Per Material"); if (!string.IsNullOrEmpty(serialPerMaterial)) { bool result; if (bool.TryParse(serialPerMaterial, out result)) { if (!result) { st.SerialType = SerialType.AssignOneSerialPerCycle; } } } Log.Debug( "Configured UseStartingOffsetForDueDate = {useStarting}", UseStartingOffsetForDueDate); jobLog = new BlackMaple.MachineFramework.JobLogDB(st); jobLog.Open( System.IO.Path.Combine(st.DataDirectory, "log.db"), System.IO.Path.Combine(st.DataDirectory, "insp.db"), startingSerial: st.StartingSerial ); jobDB = new BlackMaple.MachineFramework.JobDB(); var jobInspName = System.IO.Path.Combine(st.DataDirectory, "jobinspection.db"); if (System.IO.File.Exists(jobInspName)) { jobDB.Open(jobInspName); } else { jobDB.Open(System.IO.Path.Combine(st.DataDirectory, "mazakjobs.db")); } _writeDB = new OpenDatabaseKitTransactionDB(dbConnStr, MazakType); if (MazakType == MazakDbType.MazakVersionE) { loadOper = new LoadOperationsFromFile(cfg, enableWatcher: true); } else if (MazakType == MazakDbType.MazakWeb) { loadOper = new LoadOperationsFromFile(cfg, enableWatcher: false); // web instead watches the log csv files } else { loadOper = null; // smooth db doesn't use the load operations file } var openReadDb = new OpenDatabaseKitReadDB(dbConnStr, MazakType, loadOper); if (MazakType == MazakDbType.MazakSmooth) { _readDB = new SmoothReadOnlyDB(dbConnStr, openReadDb); } else { _readDB = openReadDb; } queues = new MazakQueues(jobLog, jobDB, _writeDB); var sendToExternal = new SendMaterialToExternalQueue(); hold = new HoldPattern(_writeDB, _readDB, jobDB, true); var writeJobs = new WriteJobs(_writeDB, _readDB, hold, jobDB, jobLog, st, CheckPalletsUsedOnce, UseStartingOffsetForDueDate, ProgramDirectory); var decr = new DecrementPlanQty(jobDB, _writeDB, _readDB); if (MazakType == MazakDbType.MazakWeb || MazakType == MazakDbType.MazakSmooth) { logDataLoader = new LogDataWeb(logPath, jobLog, jobDB, writeJobs, sendToExternal, _readDB, queues, st); } else { #if USE_OLEDB logDataLoader = new LogDataVerE(jobLog, jobDB, sendToExternal, writeJobs, _readDB, queues, st); #else throw new Exception("Mazak Web and VerE are not supported on .NET core"); #endif } routing = new RoutingInfo(_writeDB, writeJobs, _readDB, logDataLoader, jobDB, jobLog, writeJobs, queues, decr, CheckPalletsUsedOnce, st); logDataLoader.NewEntries += OnNewLogEntries; if (loadOper != null) { loadOper.LoadActions += OnLoadActions; } }