private void LoadJobs(string sampleDataPath, TimeSpan offset) { var newJobsJson = System.IO.File.ReadAllText( System.IO.Path.Combine(sampleDataPath, "newjobs.json")); var allNewJobs = (List <BlackMaple.MachineWatchInterface.NewJobs>)JsonConvert.DeserializeObject( newJobsJson, typeof(List <BlackMaple.MachineWatchInterface.NewJobs>), _jsonSettings ); foreach (var newJobs in allNewJobs) { foreach (var j in newJobs.Jobs) { OffsetJob(j, offset); } foreach (var su in newJobs.StationUse) { su.StartUTC = su.StartUTC.Add(offset); su.EndUTC = su.EndUTC.Add(offset); } foreach (var w in newJobs.CurrentUnfilledWorkorders) { w.DueDate = w.DueDate.Add(offset); } JobDB.AddJobs(newJobs, null); } }
public MazakQueues(JobLogDB log, JobDB jDB, IWriteData trans) { _jobDB = jDB; _log = log; _transDB = trans; CurrentQueueMismatch = false; }
public BuildCurrentStatusSpec() { var logConn = new Microsoft.Data.Sqlite.SqliteConnection("Data Source=:memory:"); logConn.Open(); _emptyLog = new JobLogDB(new FMSSettings(), logConn); _emptyLog.CreateTables(firstSerialOnEmpty: null); var jobConn = new Microsoft.Data.Sqlite.SqliteConnection("Data Source=:memory:"); jobConn.Open(); _jobDB = new JobDB(jobConn); _jobDB.CreateTables(); _settings = new FMSSettings(); _settings.Queues["castings"] = new QueueSize(); _settings.Queues["queueAAA"] = new QueueSize(); _settings.Queues["queueBBB"] = new QueueSize(); _settings.Queues["queueCCC"] = new QueueSize(); jsonSettings = new JsonSerializerSettings(); jsonSettings.Converters.Add(new BlackMaple.MachineFramework.TimespanConverter()); jsonSettings.Converters.Add(new Newtonsoft.Json.Converters.StringEnumConverter()); jsonSettings.DateTimeZoneHandling = DateTimeZoneHandling.Utc; jsonSettings.Formatting = Formatting.Indented; jsonSettings.ConstructorHandling = ConstructorHandling.AllowNonPublicDefaultConstructor; queueSyncFault = Substitute.For <IQueueSyncFault>(); queueSyncFault.CurrentQueueMismatch.Returns(false); }
public JobDBUpgradeSpec() { _jobs = new JobDB(); //_jobs.Open("job.v16.db"); _tempFile = System.IO.Path.GetTempFileName(); System.IO.File.Copy("job.v16.db", _tempFile, overwrite: true); _jobs.Open(_tempFile); }
public static int Save(JobLM lm) { JobDB db = JobDB.ToDB(lm); db.Save(); lm.AutoID = db.AutoID; return(lm.AutoID); }
public MakinoBackend(IConfiguration config, FMSSettings st) { try { var cfg = config.GetSection("Makino"); string adePath = cfg.GetValue <string>("ADE Path"); if (string.IsNullOrEmpty(adePath)) { adePath = @"c:\Makino\ADE"; } string dbConnStr = cfg.GetValue <string>("SQL Server Connection String"); if (string.IsNullOrEmpty(dbConnStr)) { dbConnStr = DetectSqlConnectionStr(); } bool downloadOnlyOrders = cfg.GetValue <bool>("Download Only Orders"); Log.Information( "Starting makino backend. Connection Str: {connStr}, ADE Path: {path}, DownloadOnlyOrders: {downOnlyOrders}", dbConnStr, adePath, downloadOnlyOrders); _dataDirectory = st.DataDirectory; _log = new JobLogDB(st); _log.Open( System.IO.Path.Combine(_dataDirectory, "log.db"), System.IO.Path.Combine(_dataDirectory, "inspections.db"), startingSerial: st.StartingSerial ); _jobDB = new BlackMaple.MachineFramework.JobDB(); _jobDB.Open(System.IO.Path.Combine(_dataDirectory, "jobs.db")); _status = new StatusDB(System.IO.Path.Combine(_dataDirectory, "makino.db")); #if DEBUG _makinoDB = new MakinoDB(MakinoDB.DBTypeEnum.SqlLocal, "", _status, _log); #else _makinoDB = new MakinoDB(MakinoDB.DBTypeEnum.SqlConnStr, dbConnStr, _status, _log); #endif _logTimer = new LogTimer(_log, _jobDB, _makinoDB, _status, st); _jobs = new Jobs(_makinoDB, _jobDB, adePath, downloadOnlyOrders); _logTimer.LogsProcessed += OnLogsProcessed; } catch (Exception ex) { Log.Error(ex, "Error when initializing makino backend"); } }
public MockServerBackend() { string path = null; // dataDir string dbFile(string f) => System.IO.Path.Combine(path, f + ".db"); if (path != null) { if (System.IO.File.Exists(dbFile("log"))) { System.IO.File.Delete(dbFile("log")); } LogDB = new JobLogDB(new FMSSettings()); LogDB.Open(dbFile("log"), dbFile("insp")); if (System.IO.File.Exists(dbFile("job"))) { System.IO.File.Delete(dbFile("job")); } JobDB = new JobDB(); JobDB.Open(dbFile("job")); } else { var conn = new Microsoft.Data.Sqlite.SqliteConnection("Data Source=:memory:"); conn.Open(); LogDB = new JobLogDB(new FMSSettings(), conn); LogDB.CreateTables(firstSerialOnEmpty: null); conn = new Microsoft.Data.Sqlite.SqliteConnection("Data Source=:memory:"); conn.Open(); JobDB = new JobDB(conn); JobDB.CreateTables(); } _jsonSettings = new JsonSerializerSettings(); _jsonSettings.Converters.Add(new Newtonsoft.Json.Converters.StringEnumConverter()); _jsonSettings.Converters.Add(new BlackMaple.MachineFramework.TimespanConverter()); _jsonSettings.ContractResolver = new Newtonsoft.Json.Serialization.DefaultContractResolver(); _jsonSettings.ConstructorHandling = Newtonsoft.Json.ConstructorHandling.AllowNonPublicDefaultConstructor; var sampleDataPath = System.IO.Path.Combine( System.IO.Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "../../../sample-data/" ); // sample data starts at Jan 1, 2018. Need to offset to current month var jan1_18 = new DateTime(2018, 1, 1, 0, 0, 0, DateTimeKind.Utc); var offset = DateTime.UtcNow.AddDays(-28).Subtract(jan1_18); LoadEvents(sampleDataPath, offset); LoadJobs(sampleDataPath, offset); LoadStatus(sampleDataPath, offset); }
public ActionResult UpdateJob(FormCollection formCollection) { User user = new User(); user = Session["User"] as IAProject_FreelancerSystem.Models.User; // Users if (Session["User"] != null) { if (user.role == "client") { return(RedirectToAction("Profile", "FactoryLayout")); } else if (user.role == "freelancer") { return(RedirectToAction("Index", "Wall")); } } else { return(RedirectToAction("Index", "Wall")); } // Get Job Job jobToEdit = new Job(); jobToEdit = new JobDB().SelectwithId(formCollection["jobID"]); jobToEdit.jobTitle = formCollection["jobTitle"]; jobToEdit.jobBudget = Int32.Parse(formCollection["jobBudget"]); jobToEdit.jobType = formCollection["jobType"]; jobToEdit.jobDescription = formCollection["jobDescription"]; // Update Data new JobDB().Update(jobToEdit); // View Data List <Job> list = new List <Job>(); list = new JobDB().SelectAll(); list = list.FindAll(u => u.jobAdminAcceptance == "Accepted" && u.jobStatus == "Waitting"); // Clients List <User> clients = new List <User>(); for (int i = 0; i < list.Count(); i++) { clients.Add(new UserDB().SelectwithId(list[i].clientID.ToString())); } ViewData["Jobs"] = list; ViewData["Job"] = null; ViewData["Clients"] = clients; return(RedirectToAction("PostsPage")); }
public ActionResult receivedproposals() { User user = new User(); user = Session["User"] as IAProject_FreelancerSystem.Models.User; // Users if (Session["User"] != null) { if (user.role == "admin") { return(RedirectToAction("Profile", "Dashboard")); } else if (user.role == "freelancer") { return(RedirectToAction("Index", "Wall")); } } else { return(RedirectToAction("Index", "Wall")); } // Get all User Job List <Job> jobList = new JobDB().SelectAll(); jobList = jobList.FindAll(J => J.clientID == user.userID); // Get all Proposels List <Proposal> AllproposalList = new ProposalsDB().SelectAll(); List <Proposal> proposalList = new List <Proposal>(); for (int i = 0; i < jobList.Count(); i++) { List <Proposal> temp = AllproposalList.FindAll(p => p.jobID == jobList[i].jobID); proposalList.AddRange(temp); } // Get all User that proposed List <User> userList = new List <User>(); for (int i = 0; i < proposalList.Count(); i++) { userList.Add(new UserDB().SelectwithId(proposalList[i].freelancerID.ToString())); } // ** We need to send Jobs , Proposel for each Job, Client for each Proposel ** // Return to View Profile ViewData["jobList"] = jobList; ViewData["proposalList"] = proposalList; ViewData["userList"] = userList; return(View("ReceivedProposals")); }
private static void AddDataFromJobDB(JobDB jobDB, JobPlan jobFromMazak) { var jobFromDb = jobDB.LoadJob(jobFromMazak.UniqueStr); if (jobFromDb == null) { return; } jobFromMazak.RouteStartingTimeUTC = jobFromDb.RouteStartingTimeUTC; jobFromMazak.RouteEndingTimeUTC = jobFromDb.RouteEndingTimeUTC; jobFromMazak.ScheduleId = jobFromDb.ScheduleId; jobFromMazak.AddInspections(jobFromDb.GetInspections()); jobFromMazak.HoldEntireJob = jobFromDb.HoldEntireJob; foreach (var b in jobFromDb.ScheduledBookingIds) { jobFromMazak.ScheduledBookingIds.Add(b); } for (int proc = 1; proc <= jobFromMazak.NumProcesses; proc++) { for (int path = 1; path <= jobFromMazak.GetNumPaths(proc); path++) { if (proc > jobFromDb.NumProcesses || path > jobFromDb.GetNumPaths(proc)) { continue; } jobFromMazak.SetSimulatedStartingTimeUTC(proc, path, jobFromDb.GetSimulatedStartingTimeUTC(proc, path)); jobFromMazak.SetSimulatedAverageFlowTime(proc, path, jobFromDb.GetSimulatedAverageFlowTime(proc, path)); jobFromMazak.SetSimulatedProduction(proc, path, jobFromDb.GetSimulatedProduction(proc, path)); jobFromMazak.SetExpectedLoadTime(proc, path, jobFromDb.GetExpectedLoadTime(proc, path)); jobFromMazak.SetExpectedUnloadTime(proc, path, jobFromDb.GetExpectedUnloadTime(proc, path)); jobFromMazak.SetInputQueue(proc, path, jobFromDb.GetInputQueue(proc, path)); jobFromMazak.SetOutputQueue(proc, path, jobFromDb.GetOutputQueue(proc, path)); var mazakStops = jobFromMazak.GetMachiningStop(proc, path).ToList(); var dbStops = jobFromDb.GetMachiningStop(proc, path).ToList(); for (int i = 0; i < Math.Min(mazakStops.Count, dbStops.Count); i++) { mazakStops[i].StationGroup = dbStops[i].StationGroup; mazakStops[i].ExpectedCycleTime = dbStops[i].ExpectedCycleTime; } } } }
public LogTimer( JobLogDB log, JobDB jobDB, MakinoDB makinoDB, StatusDB status, FMSSettings settings) { _lock = new object(); _log = log; _jobDB = jobDB; Settings = settings; _makinoDB = makinoDB; _status = status; TimerSignaled(null, null); _timer = new System.Timers.Timer(TimeSpan.FromMinutes(1).TotalMilliseconds); _timer.Elapsed += TimerSignaled; _timer.Start(); }
// POST PAGE public ActionResult PostsPage(string jobID) { User user = new User(); user = Session["User"] as IAProject_FreelancerSystem.Models.User; // Users if (Session["User"] != null) { if (user.role == "client") { return(RedirectToAction("Profile", "FactoryLayout")); } else if (user.role == "freelancer") { return(RedirectToAction("Index", "Wall")); } } else { return(RedirectToAction("Index", "Wall")); } Job job = new Job(); if (jobID != null) { job = new JobDB().SelectwithId(jobID); } else { job = null; } // Jobs List <Job> list = new List <Job>(); list = new JobDB().SelectAll(); list = list.FindAll(u => u.jobAdminAcceptance == "Accepted" && u.jobStatus == "Waitting"); // Clients List <User> clients = new List <User>(); for (int i = 0; i < list.Count(); i++) { clients.Add(new UserDB().SelectwithId(list[i].clientID.ToString())); } ViewData["Jobs"] = list; ViewData["Clients"] = clients; ViewData["Job"] = job; return(View()); }
public DecrementSpec() { var jobConn = new Microsoft.Data.Sqlite.SqliteConnection("Data Source=:memory:"); jobConn.Open(); _jobDB = new JobDB(jobConn); _jobDB.CreateTables(); _write = new WriteMock(); _read = Substitute.For <IReadDataAccess>(); _read.MazakType.Returns(MazakDbType.MazakSmooth); _decr = new DecrementPlanQty(_jobDB, _write, _read); }
public ActionResult UpdateJobType(FormCollection formCollection) { User user = new User(); user = Session["User"] as IAProject_FreelancerSystem.Models.User; // Users if (Session["User"] != null) { if (user.role == "client") { return(RedirectToAction("Profile", "FactoryLayout")); } else if (user.role == "freelancer") { return(RedirectToAction("Index", "Wall")); } } else { return(RedirectToAction("Index", "Wall")); } // Get the Job Job job = new Job(); job = new JobDB().SelectwithId(formCollection["jobID"]); // Update Role job.jobAdminAcceptance = formCollection["type"]; // Set the Job new JobDB().Update(job); // Jobs List <Job> list = new List <Job>(); list = new JobDB().SelectAll(); list = list.FindAll(u => u.jobAdminAcceptance == "Waitting"); // Clients List <User> clients = new List <User>(); for (int i = 0; i < list.Count(); i++) { clients.Add(new UserDB().SelectwithId(list[i].clientID.ToString())); } ViewData["Jobs"] = list; ViewData["Clients"] = clients; return(RedirectToAction("PostsRequests")); }
public ActionResult myposts(string jobID) { User user = new User(); user = Session["User"] as IAProject_FreelancerSystem.Models.User; // Users if (Session["User"] != null) { if (user.role == "admin") { return(RedirectToAction("Profile", "Dashboard")); } else if (user.role == "freelancer") { return(RedirectToAction("Index", "Wall")); } } else { return(RedirectToAction("Index", "Wall")); } List <Job> list_posts = new List <Job>(); list_posts = new JobDB().SelectAll(); list_posts = list_posts.FindAll(J => J.clientID == user.userID); // Return to View Profile ViewData["jobs"] = list_posts; Job job = new Job(); if (jobID != null) { job = new JobDB().SelectwithId(jobID); } else { job = null; } ViewData["Job"] = job; return(View("MyPosts")); }
public ActionResult Search(FormCollection formCollection) { User user = new User(); user = Session["User"] as IAProject_FreelancerSystem.Models.User; // Users if (Session["User"] != null) { if (user.role == "admin") { return(RedirectToAction("Profile", "Dashboard")); } else if (user.role == "freelancer") { return(RedirectToAction("Index", "Wall")); } } else { return(RedirectToAction("Index", "Wall")); } var dataToSearch = formCollection["dataToSearch"]; List <Job> list_posts = new List <Job>(); list_posts = new JobDB().SelectAll(); list_posts = list_posts.FindAll(J => J.clientID == user.userID); if (dataToSearch != "") { list_posts = list_posts.FindAll(J => J.jobTitle == dataToSearch); } // Return to View Profile ViewData["jobs"] = list_posts; ViewData["Job"] = null; return(View("MyPosts")); }
public ActionResult createnewpost(Job post) { User user = new User(); user = Session["User"] as IAProject_FreelancerSystem.Models.User; // Users if (Session["User"] != null) { if (user.role == "admin") { return(RedirectToAction("Profile", "Dashboard")); } else if (user.role == "freelancer") { return(RedirectToAction("Index", "Wall")); } } else { return(RedirectToAction("Index", "Wall")); } post.clientID = user.userID; post.jobStatus = "Waitting"; post.jobAdminAcceptance = "Waitting"; int clientID = post.clientID; string jobTitle = post.jobTitle; //= formCollection["jobtitle"]; int jobBudget = post.jobBudget; //= Int32.Parse(formCollection["jobbudget"]); // string creationDate = post.creationDate; // = formCollection["creationdate"]; string jobDescription = post.jobDescription; //= formCollection["jobdescription"]; new JobDB().Insert(post); List <Job> list_posts = new List <Job>(); list_posts = new JobDB().SelectAll(); list_posts = list_posts.FindAll(J => J.clientID == post.clientID); // Return to View Profile ViewData["jobs"] = list_posts; return(View("MyPosts")); }
public ActionResult EditJob(FormCollection formCollection) { User user = new User(); user = Session["User"] as IAProject_FreelancerSystem.Models.User; // Users if (Session["User"] != null) { if (user.role == "admin") { return(RedirectToAction("Profile", "Dashboard")); } else if (user.role == "freelancer") { return(RedirectToAction("Index", "Wall")); } } else { return(RedirectToAction("Index", "Wall")); } // Get Job Job jobToEdit = new Job(); jobToEdit = new JobDB().SelectwithId(formCollection["jobID"]); jobToEdit.jobTitle = formCollection["jobTitle"]; jobToEdit.jobBudget = Int32.Parse(formCollection["jobBudget"]); jobToEdit.jobType = formCollection["jobType"]; jobToEdit.jobDescription = formCollection["jobDescription"]; // Update Data new JobDB().Update(jobToEdit); return(RedirectToAction("MyPosts")); }
public static CurrentStatus Build(JobDB jobDB, JobLogDB log, FMSSettings fmsSettings, IMachineGroupName machineGroupName, IQueueSyncFault queueSyncFault, MazakDbType dbType, MazakAllData mazakData, DateTime utcNow) { //Load process and path numbers Dictionary <string, int> uniqueToMaxPath; Dictionary <string, int> uniqueToMaxProcess; CalculateMaxProcAndPath(mazakData, out uniqueToMaxPath, out uniqueToMaxProcess); var currentLoads = new List <LoadAction>(mazakData.LoadActions); var curStatus = new CurrentStatus(); foreach (var k in fmsSettings.Queues) { curStatus.QueueSizes[k.Key] = k.Value; } if (mazakData.Alarms != null) { foreach (var alarm in mazakData.Alarms) { if (!string.IsNullOrEmpty(alarm.AlarmMessage)) { curStatus.Alarms.Add(alarm.AlarmMessage); } } } if (queueSyncFault.CurrentQueueMismatch) { curStatus.Alarms.Add("Queue contents and Mazak schedule quantity mismatch."); } var jobsBySchID = new Dictionary <long, InProcessJob>(); var pathBySchID = new Dictionary <long, MazakPart.IProcToPath>(); foreach (var schRow in mazakData.Schedules) { if (!MazakPart.IsSailPart(schRow.PartName)) { continue; } MazakPartRow partRow = null; foreach (var p in mazakData.Parts) { if (p.PartName == schRow.PartName) { partRow = p; break; } } if (partRow == null) { continue; } //Parse data from the database var partName = partRow.PartName; int loc = partName.IndexOf(':'); if (loc >= 0) { partName = partName.Substring(0, loc); } string jobUnique = ""; MazakPart.IProcToPath procToPath = null; bool manual = false; if (!string.IsNullOrEmpty(partRow.Comment)) { MazakPart.ParseComment(partRow.Comment, out jobUnique, out procToPath, out manual); } if (!uniqueToMaxProcess.ContainsKey(jobUnique)) { continue; } int numProc = uniqueToMaxProcess[jobUnique]; int maxProc1Path = uniqueToMaxPath[jobUnique]; InProcessJob job; //Create or lookup the job if (curStatus.Jobs.ContainsKey(jobUnique)) { job = curStatus.Jobs[jobUnique]; } else { var jobPaths = new int[numProc]; for (int i = 0; i < numProc; i++) { jobPaths[i] = maxProc1Path; } job = new InProcessJob(jobUnique, numProc, jobPaths); job.PartName = partName; job.JobCopiedToSystem = true; curStatus.Jobs.Add(jobUnique, job); } jobsBySchID.Add(schRow.Id, job); pathBySchID.Add(schRow.Id, procToPath); //Job Basics job.SetPlannedCyclesOnFirstProcess(procToPath.PathForProc(proc: 1), schRow.PlanQuantity); AddCompletedToJob(schRow, job, procToPath); job.Priority = schRow.Priority; if (((HoldPattern.HoldMode)schRow.HoldMode) == HoldPattern.HoldMode.FullHold) { job.HoldEntireJob.UserHold = true; } else { job.HoldEntireJob.UserHold = false; } AddRoutingToJob(mazakData, partRow, job, machineGroupName, procToPath, dbType); } var loadedJobs = new HashSet <string>(); foreach (var j in jobsBySchID.Values) { if (loadedJobs.Contains(j.UniqueStr)) { continue; } loadedJobs.Add(j.UniqueStr); AddDataFromJobDB(jobDB, j); } //Now add pallets foreach (var palRow in mazakData.Pallets) { if (palRow.PalletNumber > 0 && !curStatus.Pallets.ContainsKey(palRow.PalletNumber.ToString())) { var palName = palRow.PalletNumber.ToString(); var palLoc = FindPalletLocation(machineGroupName, mazakData, dbType, palRow.PalletNumber); //Create the pallet PalletStatus status = new PalletStatus() { Pallet = palName, CurrentPalletLocation = palLoc, FixtureOnPallet = palRow.Fixture, NumFaces = 1, OnHold = false }; curStatus.Pallets.Add(status.Pallet, status); var oldCycles = log.CurrentPalletLog(palName); //Add the material currently on the pallet foreach (var palSub in mazakData.PalletSubStatuses) { if (palSub.PalletNumber != palRow.PalletNumber) { continue; } if (palSub.FixQuantity <= 0) { continue; } if (!jobsBySchID.ContainsKey(palSub.ScheduleID)) { continue; } status.NumFaces = Math.Max(status.NumFaces, palSub.PartProcessNumber); var job = jobsBySchID[palSub.ScheduleID]; var procToPath = pathBySchID[palSub.ScheduleID]; var matIDs = new Queue <long>(FindMatIDsFromOldCycles(oldCycles, job.UniqueStr, palSub.PartProcessNumber)); for (int i = 1; i <= palSub.FixQuantity; i++) { int face = palSub.PartProcessNumber; long matID = -1; if (matIDs.Count > 0) { matID = matIDs.Dequeue(); } var matDetails = log.GetMaterialDetails(matID); var inProcMat = new InProcessMaterial() { MaterialID = matID, JobUnique = job.UniqueStr, PartName = job.PartName, Process = palSub.PartProcessNumber, Path = procToPath.PathForProc(palSub.PartProcessNumber), Serial = matDetails?.Serial, WorkorderId = matDetails?.Workorder, SignaledInspections = log.LookupInspectionDecisions(matID) .Where(x => x.Inspect) .Select(x => x.InspType) .Distinct() .ToList(), LastCompletedMachiningRouteStopIndex = oldCycles.Any( c => c.LogType == LogType.MachineCycle && !c.StartOfCycle && c.Material.Any(m => m.MaterialID == matID && m.Process == palSub.PartProcessNumber) ) ? (int?)0 : null, Location = new InProcessMaterialLocation() { Type = InProcessMaterialLocation.LocType.OnPallet, Pallet = status.Pallet, Face = face }, Action = new InProcessMaterialAction() { Type = InProcessMaterialAction.ActionType.Waiting } }; curStatus.Material.Add(inProcMat); //check for unloading or transfer var loadNext = CheckLoadOfNextProcess(currentLoads, job.UniqueStr, palSub.PartProcessNumber, palLoc); var unload = CheckUnload(currentLoads, job.UniqueStr, palSub.PartProcessNumber, palLoc); if (loadNext != null) { var start = FindLoadStartFromOldCycles(oldCycles, matID); inProcMat.Action = new InProcessMaterialAction() { Type = InProcessMaterialAction.ActionType.Loading, LoadOntoFace = palSub.PartProcessNumber + 1, LoadOntoPallet = status.Pallet, ProcessAfterLoad = palSub.PartProcessNumber + 1, PathAfterLoad = procToPath.PathForProc(palSub.PartProcessNumber + 1), ElapsedLoadUnloadTime = start != null ? (TimeSpan?)utcNow.Subtract(start.EndTimeUTC) : null }; } else if (unload != null) { var start = FindLoadStartFromOldCycles(oldCycles, matID); inProcMat.Action = new InProcessMaterialAction() { Type = palSub.PartProcessNumber == job.NumProcesses ? InProcessMaterialAction.ActionType.UnloadToCompletedMaterial : InProcessMaterialAction.ActionType.UnloadToInProcess, UnloadIntoQueue = job.GetOutputQueue( process: palSub.PartProcessNumber, path: procToPath.PathForProc(palSub.PartProcessNumber)), ElapsedLoadUnloadTime = start != null ? (TimeSpan?)utcNow.Subtract(start.EndTimeUTC) : null }; } else { // detect if machining var start = FindMachineStartFromOldCycles(oldCycles, matID); if (start != null) { var machStop = job.GetMachiningStop(inProcMat.Process, inProcMat.Path).FirstOrDefault(); var elapsedTime = utcNow.Subtract(start.EndTimeUTC); inProcMat.Action = new InProcessMaterialAction() { Type = InProcessMaterialAction.ActionType.Machining, ElapsedMachiningTime = elapsedTime, ExpectedRemainingMachiningTime = machStop != null?machStop.ExpectedCycleTime.Subtract(elapsedTime) : TimeSpan.Zero }; } } } } if (palLoc.Location == PalletLocationEnum.LoadUnload) { var start = FindLoadStartFromOldCycles(oldCycles); var elapsedLoad = start != null ? (TimeSpan?)utcNow.Subtract(start.EndTimeUTC) : null; AddLoads(log, currentLoads, status.Pallet, palLoc, elapsedLoad, curStatus); AddUnloads(log, currentLoads, status, elapsedLoad, oldCycles, curStatus); } } } //now queued var seenMatIds = new HashSet <long>(curStatus.Material.Select(m => m.MaterialID)); foreach (var mat in log.GetMaterialInAllQueues()) { // material could be in the process of being loaded if (seenMatIds.Contains(mat.MaterialID)) { continue; } var matLogs = log.GetLogForMaterial(mat.MaterialID); int lastProc = 0; foreach (var entry in log.GetLogForMaterial(mat.MaterialID)) { foreach (var entryMat in entry.Material) { if (entryMat.MaterialID == mat.MaterialID) { lastProc = Math.Max(lastProc, entryMat.Process); } } } var matDetails = log.GetMaterialDetails(mat.MaterialID); curStatus.Material.Add(new InProcessMaterial() { MaterialID = mat.MaterialID, JobUnique = mat.Unique, PartName = mat.PartName, Process = lastProc, Path = 1, Serial = matDetails?.Serial, WorkorderId = matDetails?.Workorder, SignaledInspections = log.LookupInspectionDecisions(mat.MaterialID) .Where(x => x.Inspect) .Select(x => x.InspType) .Distinct() .ToList(), Location = new InProcessMaterialLocation() { Type = InProcessMaterialLocation.LocType.InQueue, CurrentQueue = mat.Queue, QueuePosition = mat.Position, }, Action = new InProcessMaterialAction() { Type = InProcessMaterialAction.ActionType.Waiting } }); } var notCopied = jobDB.LoadJobsNotCopiedToSystem(DateTime.UtcNow.AddHours(-WriteJobs.JobLookbackHours), DateTime.UtcNow); foreach (var j in notCopied.Jobs) { if (curStatus.Jobs.ContainsKey(j.UniqueStr)) { //The copy to the cell succeeded but the DB has not yet been updated. //The thread which copies jobs will soon notice and update the database //so we can ignore it for now. } else { curStatus.Jobs.Add(j.UniqueStr, new InProcessJob(j)); } } foreach (var j in curStatus.Jobs) { foreach (var d in jobDB.LoadDecrementsForJob(j.Value.UniqueStr)) { j.Value.Decrements.Add(d); } } return(curStatus); }
public static JobLM Get(int autoID) { return(JobDB.ToLM(Database.Table <JobDB>().First(i => i.AutoID == autoID))); }
public WriteJobsSpec() { var logConn = new Microsoft.Data.Sqlite.SqliteConnection("Data Source=:memory:"); logConn.Open(); _logDB = new JobLogDB(new FMSSettings(), logConn); _logDB.CreateTables(firstSerialOnEmpty: null); var jobConn = new Microsoft.Data.Sqlite.SqliteConnection("Data Source=:memory:"); jobConn.Open(); _jobDB = new JobDB(jobConn); _jobDB.CreateTables(); _writeMock = new WriteMock(); _readMock = Substitute.For <IReadDataAccess>(); _readMock.MazakType.Returns(MazakDbType.MazakSmooth); _readMock.LoadAllData().Returns(new MazakAllData() { Schedules = new[] { // a completed schedule, should be deleted new MazakScheduleRow() { Id = 1, PartName = "part1:1:1", Comment = MazakPart.CreateComment("uniq1", new [] { 1 }, false), PlanQuantity = 15, CompleteQuantity = 15, Priority = 50, Processes = { new MazakScheduleProcessRow() { MazakScheduleRowId = 1, FixedMachineFlag = 1, ProcessNumber = 1 } } }, // a non-completed schedule, should be decremented new MazakScheduleRow() { Id = 2, PartName = "part2:1:1", Comment = MazakPart.CreateComment("uniq2", new [] { 1 }, false), PlanQuantity = 15, CompleteQuantity = 10, Priority = 50, Processes = { new MazakScheduleProcessRow() { MazakScheduleRowId = 1, FixedMachineFlag = 1, ProcessNumber = 1, ProcessMaterialQuantity = 3, ProcessExecuteQuantity = 2 } } }, }, Parts = new[] { // should be deleted, since corresponding schedule is deleted new MazakPartRow() { PartName = "part1:1:1", Comment = MazakPart.CreateComment("uniq1", new[] { 1 }, false), Processes = { new MazakPartProcessRow() { PartName = "part1:1:1", ProcessNumber = 1, FixQuantity = 5, Fixture = "fixtoremove" } } }, //should be kept, since schedule is kept new MazakPartRow() { PartName = "part2:1:1", Comment = MazakPart.CreateComment("uniq2", new[] { 1 }, false), Processes = { new MazakPartProcessRow() { PartName = "part2:1:1", ProcessNumber = 1, FixQuantity = 2, Fixture = "fixtokeep" } } }, }, Fixtures = new[] { new MazakFixtureRow() { FixtureName = "fixtoremove", Comment = "Insight" }, new MazakFixtureRow() { FixtureName = "fixtokeep", Comment = "Insight" } }, Pallets = new[] { new MazakPalletRow() { PalletNumber = 5, Fixture = "fixtoremove" }, new MazakPalletRow() { PalletNumber = 6, Fixture = "fixtokeep" } }, PalletSubStatuses = Enumerable.Empty <MazakPalletSubStatusRow>(), PalletPositions = Enumerable.Empty <MazakPalletPositionRow>(), LoadActions = Enumerable.Empty <LoadAction>(), MainPrograms = Enumerable.Concat( (new[] { "1001", "1002", "1003", "1004", "1005" }).Select(p => new MazakProgramRow() { MainProgram = p, Comment = "" }), new[] { new MazakProgramRow() { MainProgram = System.IO.Path.Combine("theprogdir", "prog-bbb-1_rev2.EIA"), Comment = "Insight:2:prog-bbb-1" }, new MazakProgramRow() { MainProgram = System.IO.Path.Combine("theprogdir", "prog-bbb-1_rev3.EIA"), Comment = "Insight:3:prog-bbb-1" } } ) }); _readMock.LoadSchedulesPartsPallets().Returns(x => new MazakSchedulesPartsPallets() { Schedules = Enumerable.Empty <MazakScheduleRow>(), Parts = _writeMock.AddParts.Parts, Pallets = _writeMock.AddParts.Pallets, PalletSubStatuses = Enumerable.Empty <MazakPalletSubStatusRow>(), PalletPositions = Enumerable.Empty <MazakPalletPositionRow>(), LoadActions = Enumerable.Empty <LoadAction>(), MainPrograms = (new[] { "1001", "1002", "1003", "1004", "1005" }).Select(p => new MazakProgramRow() { MainProgram = p, Comment = "" }), }); _settings = new FMSSettings(); _settings.Queues["castings"] = new QueueSize(); _settings.Queues["queueAAA"] = new QueueSize(); _settings.Queues["queueBBB"] = new QueueSize(); _settings.Queues["queueCCC"] = new QueueSize(); _writeJobs = new WriteJobs( _writeMock, _readMock, Substitute.For <IHoldManagement>(), _jobDB, _logDB, _settings, check: false, useStarting: true, progDir: "theprogdir"); jsonSettings = new JsonSerializerSettings(); jsonSettings.Converters.Add(new BlackMaple.MachineFramework.TimespanConverter()); jsonSettings.Converters.Add(new Newtonsoft.Json.Converters.StringEnumConverter()); jsonSettings.DateTimeZoneHandling = DateTimeZoneHandling.Utc; jsonSettings.Formatting = Formatting.Indented; }
public DecrementPlanQty(JobDB jdb, IWriteData w, IReadDataAccess r) { _jobDB = jdb; _write = w; _read = r; }
public void Dispose() { JobDB.Close(); LogDB.Close(); }
public void AddJobs(NewJobs jobs, string expectedPreviousScheduleId) { JobDB.AddJobs(jobs, expectedPreviousScheduleId); }
public MazakBackend(IConfiguration configuration, FMSSettings st) { var cfg = configuration.GetSection("Mazak"); string localDbPath = cfg.GetValue <string>("Database Path"); MazakType = DetectMazakType(cfg, localDbPath); // database settings string sqlConnectString = cfg.GetValue <string>("SQL ConnectionString"); string dbConnStr; if (MazakType == MazakDbType.MazakSmooth) { if (!string.IsNullOrEmpty(sqlConnectString)) { dbConnStr = sqlConnectString; } else if (!string.IsNullOrEmpty(localDbPath)) { // old installers put sql server computer name in localDbPath dbConnStr = "Server=" + localDbPath + "\\pmcsqlserver;" + "User ID=mazakpmc;Password=Fms-978"; } else { var b = new System.Data.SqlClient.SqlConnectionStringBuilder(); b.UserID = "mazakpmc"; b.Password = "******"; b.DataSource = "(local)"; dbConnStr = b.ConnectionString; } } else { dbConnStr = localDbPath; if (string.IsNullOrEmpty(dbConnStr)) { dbConnStr = "c:\\Mazak\\NFMS\\DB"; } } // log csv string logPath = cfg.GetValue <string>("Log CSV Path"); if (logPath == null || logPath == "") { logPath = "c:\\Mazak\\FMS\\Log"; } if (MazakType != MazakDbType.MazakVersionE && !System.IO.Directory.Exists(logPath)) { Log.Error("Log CSV Directory {path} does not exist. Set the directory in the config.ini file.", logPath); } else if (MazakType != MazakDbType.MazakVersionE) { Log.Information("Loading log CSV files from {logcsv}", logPath); } // general config string useStarting = cfg.GetValue <string>("Use Starting Offset For Due Date"); string useStarting2 = cfg.GetValue <string>("Use Starting Offset"); if (string.IsNullOrEmpty(useStarting)) { if (string.IsNullOrEmpty(useStarting2)) { UseStartingOffsetForDueDate = true; } else { UseStartingOffsetForDueDate = Convert.ToBoolean(useStarting2); } } else { UseStartingOffsetForDueDate = Convert.ToBoolean(useStarting); } //Perhaps this should be a new setting, but if you don't check for pallets used once //then you don't care if all faces on a pallet are full so might as well use priority //which causes pallet positions to go empty. CheckPalletsUsedOnce = !UseStartingOffsetForDueDate; ProgramDirectory = cfg.GetValue <string>("Program Directory"); if (string.IsNullOrEmpty(ProgramDirectory)) { ProgramDirectory = "C:\\NCProgs"; } // serial settings string serialPerMaterial = cfg.GetValue <string>("Assign Serial Per Material"); if (!string.IsNullOrEmpty(serialPerMaterial)) { bool result; if (bool.TryParse(serialPerMaterial, out result)) { if (!result) { st.SerialType = SerialType.AssignOneSerialPerCycle; } } } Log.Debug( "Configured UseStartingOffsetForDueDate = {useStarting}", UseStartingOffsetForDueDate); jobLog = new BlackMaple.MachineFramework.JobLogDB(st); jobLog.Open( System.IO.Path.Combine(st.DataDirectory, "log.db"), System.IO.Path.Combine(st.DataDirectory, "insp.db"), startingSerial: st.StartingSerial ); jobDB = new BlackMaple.MachineFramework.JobDB(); var jobInspName = System.IO.Path.Combine(st.DataDirectory, "jobinspection.db"); if (System.IO.File.Exists(jobInspName)) { jobDB.Open(jobInspName); } else { jobDB.Open(System.IO.Path.Combine(st.DataDirectory, "mazakjobs.db")); } _writeDB = new OpenDatabaseKitTransactionDB(dbConnStr, MazakType); if (MazakType == MazakDbType.MazakVersionE) { loadOper = new LoadOperationsFromFile(cfg, enableWatcher: true); } else if (MazakType == MazakDbType.MazakWeb) { loadOper = new LoadOperationsFromFile(cfg, enableWatcher: false); // web instead watches the log csv files } else { loadOper = null; // smooth db doesn't use the load operations file } var openReadDb = new OpenDatabaseKitReadDB(dbConnStr, MazakType, loadOper); if (MazakType == MazakDbType.MazakSmooth) { _readDB = new SmoothReadOnlyDB(dbConnStr, openReadDb); } else { _readDB = openReadDb; } queues = new MazakQueues(jobLog, jobDB, _writeDB); var sendToExternal = new SendMaterialToExternalQueue(); hold = new HoldPattern(_writeDB, _readDB, jobDB, true); var writeJobs = new WriteJobs(_writeDB, _readDB, hold, jobDB, jobLog, st, CheckPalletsUsedOnce, UseStartingOffsetForDueDate, ProgramDirectory); var decr = new DecrementPlanQty(jobDB, _writeDB, _readDB); if (MazakType == MazakDbType.MazakWeb || MazakType == MazakDbType.MazakSmooth) { logDataLoader = new LogDataWeb(logPath, jobLog, jobDB, writeJobs, sendToExternal, _readDB, queues, st); } else { #if USE_OLEDB logDataLoader = new LogDataVerE(jobLog, jobDB, sendToExternal, writeJobs, _readDB, queues, st); #else throw new Exception("Mazak Web and VerE are not supported on .NET core"); #endif } routing = new RoutingInfo(_writeDB, writeJobs, _readDB, logDataLoader, jobDB, jobLog, writeJobs, queues, decr, CheckPalletsUsedOnce, st); logDataLoader.NewEntries += OnNewLogEntries; if (loadOper != null) { loadOper.LoadActions += OnLoadActions; } }
public static JobLM[] Search(int userAutoID, int pageIndex, int pageSize, string jobName, string planName, string begindate, string enddate, string isFinish) { var query = Database.Table <JobDB>(); if (!string.IsNullOrEmpty(jobName)) { query = query.Where(m => m.Name.Contains(jobName)); } if (!string.IsNullOrEmpty(planName)) { query = query.Where(m => m.PlanTitle.Contains(planName)); } if (!string.IsNullOrEmpty(begindate)) { query = query.Where(m => m.CreateTime >= Convert.ToDateTime(begindate)); } if (!string.IsNullOrEmpty(enddate)) { query = query.Where(m => m.CreateTime <= Convert.ToDateTime(enddate).AddDays(1)); } if (!string.IsNullOrEmpty(isFinish) && isFinish != "不限") { if (isFinish == "完成") { query = query.Where(m => m.IsComplete == true); } else { query = query.Where(m => m.IsComplete == false); } } if (userAutoID == 0) { return(query.Skip(pageIndex * pageSize).Take(pageSize).OrderByDescending(i => i.UpdateTime).ToArray().Select(i => JobDB.ToLM(i)).ToArray()); } return(query.Where(i => i.UserAutoID == userAutoID && i.DeleteFlag == 0).Skip(pageIndex * pageSize).Take(pageSize).OrderByDescending(i => i.UpdateTime).ToArray().Select(i => JobDB.ToLM(i)).ToArray()); }