public MazakSchedule(HoldPattern parent, MazakScheduleRow s) { _parent = parent; _schRow = s; if (MazakPart.IsSailPart(_schRow.PartName)) { MazakPart.ParseComment(_schRow.Comment, out string unique, out var paths, out var manual); var job = parent.jobDB.LoadJob(unique); if (job != null) { HoldEntireJob = job.HoldEntireJob; HoldMachining = job.HoldMachining(process: 1, path: paths.PathForProc(proc: 1)); } } }
private static void CalculateMaxProcAndPath(MazakSchedulesPartsPallets mazakData, out Dictionary <string, int> uniqueToMaxProc1Path, out Dictionary <string, int> uniqueToMaxProcess) { uniqueToMaxProc1Path = new Dictionary <string, int>(); uniqueToMaxProcess = new Dictionary <string, int>(); foreach (var partRow in mazakData.Parts) { if (MazakPart.IsSailPart(partRow.PartName) && !string.IsNullOrEmpty(partRow.Comment)) { string jobUnique = ""; bool manual = false; int numProc = partRow.Processes.Count; MazakPart.ParseComment(partRow.Comment, out jobUnique, out var paths, out manual); if (uniqueToMaxProc1Path.ContainsKey(jobUnique)) { uniqueToMaxProc1Path[jobUnique] = Math.Max(uniqueToMaxProc1Path[jobUnique], paths.PathForProc(proc: 1)); } else { uniqueToMaxProc1Path.Add(jobUnique, paths.PathForProc(proc: 1)); } if (uniqueToMaxProcess.ContainsKey(jobUnique)) { if (numProc != uniqueToMaxProcess[jobUnique]) { Log.Warning("Paths for {uniq} have a different number of processes", jobUnique); } } else { uniqueToMaxProcess.Add(jobUnique, numProc); } } } }
public static CurrentStatus Build(JobDB jobDB, JobLogDB log, FMSSettings fmsSettings, IMachineGroupName machineGroupName, IQueueSyncFault queueSyncFault, MazakDbType dbType, MazakAllData mazakData, DateTime utcNow) { //Load process and path numbers Dictionary <string, int> uniqueToMaxPath; Dictionary <string, int> uniqueToMaxProcess; CalculateMaxProcAndPath(mazakData, out uniqueToMaxPath, out uniqueToMaxProcess); var currentLoads = new List <LoadAction>(mazakData.LoadActions); var curStatus = new CurrentStatus(); foreach (var k in fmsSettings.Queues) { curStatus.QueueSizes[k.Key] = k.Value; } if (mazakData.Alarms != null) { foreach (var alarm in mazakData.Alarms) { if (!string.IsNullOrEmpty(alarm.AlarmMessage)) { curStatus.Alarms.Add(alarm.AlarmMessage); } } } if (queueSyncFault.CurrentQueueMismatch) { curStatus.Alarms.Add("Queue contents and Mazak schedule quantity mismatch."); } var jobsBySchID = new Dictionary <long, InProcessJob>(); var pathBySchID = new Dictionary <long, MazakPart.IProcToPath>(); foreach (var schRow in mazakData.Schedules) { if (!MazakPart.IsSailPart(schRow.PartName)) { continue; } MazakPartRow partRow = null; foreach (var p in mazakData.Parts) { if (p.PartName == schRow.PartName) { partRow = p; break; } } if (partRow == null) { continue; } //Parse data from the database var partName = partRow.PartName; int loc = partName.IndexOf(':'); if (loc >= 0) { partName = partName.Substring(0, loc); } string jobUnique = ""; MazakPart.IProcToPath procToPath = null; bool manual = false; if (!string.IsNullOrEmpty(partRow.Comment)) { MazakPart.ParseComment(partRow.Comment, out jobUnique, out procToPath, out manual); } if (!uniqueToMaxProcess.ContainsKey(jobUnique)) { continue; } int numProc = uniqueToMaxProcess[jobUnique]; int maxProc1Path = uniqueToMaxPath[jobUnique]; InProcessJob job; //Create or lookup the job if (curStatus.Jobs.ContainsKey(jobUnique)) { job = curStatus.Jobs[jobUnique]; } else { var jobPaths = new int[numProc]; for (int i = 0; i < numProc; i++) { jobPaths[i] = maxProc1Path; } job = new InProcessJob(jobUnique, numProc, jobPaths); job.PartName = partName; job.JobCopiedToSystem = true; curStatus.Jobs.Add(jobUnique, job); } jobsBySchID.Add(schRow.Id, job); pathBySchID.Add(schRow.Id, procToPath); //Job Basics job.SetPlannedCyclesOnFirstProcess(procToPath.PathForProc(proc: 1), schRow.PlanQuantity); AddCompletedToJob(schRow, job, procToPath); job.Priority = schRow.Priority; if (((HoldPattern.HoldMode)schRow.HoldMode) == HoldPattern.HoldMode.FullHold) { job.HoldEntireJob.UserHold = true; } else { job.HoldEntireJob.UserHold = false; } AddRoutingToJob(mazakData, partRow, job, machineGroupName, procToPath, dbType); } var loadedJobs = new HashSet <string>(); foreach (var j in jobsBySchID.Values) { if (loadedJobs.Contains(j.UniqueStr)) { continue; } loadedJobs.Add(j.UniqueStr); AddDataFromJobDB(jobDB, j); } //Now add pallets foreach (var palRow in mazakData.Pallets) { if (palRow.PalletNumber > 0 && !curStatus.Pallets.ContainsKey(palRow.PalletNumber.ToString())) { var palName = palRow.PalletNumber.ToString(); var palLoc = FindPalletLocation(machineGroupName, mazakData, dbType, palRow.PalletNumber); //Create the pallet PalletStatus status = new PalletStatus() { Pallet = palName, CurrentPalletLocation = palLoc, FixtureOnPallet = palRow.Fixture, NumFaces = 1, OnHold = false }; curStatus.Pallets.Add(status.Pallet, status); var oldCycles = log.CurrentPalletLog(palName); //Add the material currently on the pallet foreach (var palSub in mazakData.PalletSubStatuses) { if (palSub.PalletNumber != palRow.PalletNumber) { continue; } if (palSub.FixQuantity <= 0) { continue; } if (!jobsBySchID.ContainsKey(palSub.ScheduleID)) { continue; } status.NumFaces = Math.Max(status.NumFaces, palSub.PartProcessNumber); var job = jobsBySchID[palSub.ScheduleID]; var procToPath = pathBySchID[palSub.ScheduleID]; var matIDs = new Queue <long>(FindMatIDsFromOldCycles(oldCycles, job.UniqueStr, palSub.PartProcessNumber)); for (int i = 1; i <= palSub.FixQuantity; i++) { int face = palSub.PartProcessNumber; long matID = -1; if (matIDs.Count > 0) { matID = matIDs.Dequeue(); } var matDetails = log.GetMaterialDetails(matID); var inProcMat = new InProcessMaterial() { MaterialID = matID, JobUnique = job.UniqueStr, PartName = job.PartName, Process = palSub.PartProcessNumber, Path = procToPath.PathForProc(palSub.PartProcessNumber), Serial = matDetails?.Serial, WorkorderId = matDetails?.Workorder, SignaledInspections = log.LookupInspectionDecisions(matID) .Where(x => x.Inspect) .Select(x => x.InspType) .Distinct() .ToList(), LastCompletedMachiningRouteStopIndex = oldCycles.Any( c => c.LogType == LogType.MachineCycle && !c.StartOfCycle && c.Material.Any(m => m.MaterialID == matID && m.Process == palSub.PartProcessNumber) ) ? (int?)0 : null, Location = new InProcessMaterialLocation() { Type = InProcessMaterialLocation.LocType.OnPallet, Pallet = status.Pallet, Face = face }, Action = new InProcessMaterialAction() { Type = InProcessMaterialAction.ActionType.Waiting } }; curStatus.Material.Add(inProcMat); //check for unloading or transfer var loadNext = CheckLoadOfNextProcess(currentLoads, job.UniqueStr, palSub.PartProcessNumber, palLoc); var unload = CheckUnload(currentLoads, job.UniqueStr, palSub.PartProcessNumber, palLoc); if (loadNext != null) { var start = FindLoadStartFromOldCycles(oldCycles, matID); inProcMat.Action = new InProcessMaterialAction() { Type = InProcessMaterialAction.ActionType.Loading, LoadOntoFace = palSub.PartProcessNumber + 1, LoadOntoPallet = status.Pallet, ProcessAfterLoad = palSub.PartProcessNumber + 1, PathAfterLoad = procToPath.PathForProc(palSub.PartProcessNumber + 1), ElapsedLoadUnloadTime = start != null ? (TimeSpan?)utcNow.Subtract(start.EndTimeUTC) : null }; } else if (unload != null) { var start = FindLoadStartFromOldCycles(oldCycles, matID); inProcMat.Action = new InProcessMaterialAction() { Type = palSub.PartProcessNumber == job.NumProcesses ? InProcessMaterialAction.ActionType.UnloadToCompletedMaterial : InProcessMaterialAction.ActionType.UnloadToInProcess, UnloadIntoQueue = job.GetOutputQueue( process: palSub.PartProcessNumber, path: procToPath.PathForProc(palSub.PartProcessNumber)), ElapsedLoadUnloadTime = start != null ? (TimeSpan?)utcNow.Subtract(start.EndTimeUTC) : null }; } else { // detect if machining var start = FindMachineStartFromOldCycles(oldCycles, matID); if (start != null) { var machStop = job.GetMachiningStop(inProcMat.Process, inProcMat.Path).FirstOrDefault(); var elapsedTime = utcNow.Subtract(start.EndTimeUTC); inProcMat.Action = new InProcessMaterialAction() { Type = InProcessMaterialAction.ActionType.Machining, ElapsedMachiningTime = elapsedTime, ExpectedRemainingMachiningTime = machStop != null?machStop.ExpectedCycleTime.Subtract(elapsedTime) : TimeSpan.Zero }; } } } } if (palLoc.Location == PalletLocationEnum.LoadUnload) { var start = FindLoadStartFromOldCycles(oldCycles); var elapsedLoad = start != null ? (TimeSpan?)utcNow.Subtract(start.EndTimeUTC) : null; AddLoads(log, currentLoads, status.Pallet, palLoc, elapsedLoad, curStatus); AddUnloads(log, currentLoads, status, elapsedLoad, oldCycles, curStatus); } } } //now queued var seenMatIds = new HashSet <long>(curStatus.Material.Select(m => m.MaterialID)); foreach (var mat in log.GetMaterialInAllQueues()) { // material could be in the process of being loaded if (seenMatIds.Contains(mat.MaterialID)) { continue; } var matLogs = log.GetLogForMaterial(mat.MaterialID); int lastProc = 0; foreach (var entry in log.GetLogForMaterial(mat.MaterialID)) { foreach (var entryMat in entry.Material) { if (entryMat.MaterialID == mat.MaterialID) { lastProc = Math.Max(lastProc, entryMat.Process); } } } var matDetails = log.GetMaterialDetails(mat.MaterialID); curStatus.Material.Add(new InProcessMaterial() { MaterialID = mat.MaterialID, JobUnique = mat.Unique, PartName = mat.PartName, Process = lastProc, Path = 1, Serial = matDetails?.Serial, WorkorderId = matDetails?.Workorder, SignaledInspections = log.LookupInspectionDecisions(mat.MaterialID) .Where(x => x.Inspect) .Select(x => x.InspType) .Distinct() .ToList(), Location = new InProcessMaterialLocation() { Type = InProcessMaterialLocation.LocType.InQueue, CurrentQueue = mat.Queue, QueuePosition = mat.Position, }, Action = new InProcessMaterialAction() { Type = InProcessMaterialAction.ActionType.Waiting } }); } var notCopied = jobDB.LoadJobsNotCopiedToSystem(DateTime.UtcNow.AddHours(-WriteJobs.JobLookbackHours), DateTime.UtcNow); foreach (var j in notCopied.Jobs) { if (curStatus.Jobs.ContainsKey(j.UniqueStr)) { //The copy to the cell succeeded but the DB has not yet been updated. //The thread which copies jobs will soon notice and update the database //so we can ignore it for now. } else { curStatus.Jobs.Add(j.UniqueStr, new InProcessJob(j)); } } foreach (var j in curStatus.Jobs) { foreach (var d in jobDB.LoadDecrementsForJob(j.Value.UniqueStr)) { j.Value.Decrements.Add(d); } } return(curStatus); }
public static MazakWriteData AddSchedules( MazakSchedulesPartsPallets mazakData, IEnumerable <JobPlan> jobs, bool UseStartingOffsetForDueDate) { var transSet = new MazakWriteData(); var usedScheduleIDs = new HashSet <int>(); var scheduledParts = new HashSet <string>(); foreach (var schRow in mazakData.Schedules) { usedScheduleIDs.Add(schRow.Id); scheduledParts.Add(schRow.PartName); } //now add the new schedule foreach (JobPlan part in jobs) { for (int proc1path = 1; proc1path <= part.GetNumPaths(1); proc1path++) { if (part.GetPlannedCyclesOnFirstProcess(proc1path) <= 0) { continue; } //check if part exists downloaded int downloadUid = -1; string mazakPartName = ""; string mazakComment = ""; foreach (var partRow in mazakData.Parts) { if (MazakPart.IsSailPart(partRow.PartName)) { MazakPart.ParseComment(partRow.Comment, out string u, out var ps, out bool m); if (u == part.UniqueStr && ps.PathForProc(proc: 1) == proc1path) { downloadUid = MazakPart.ParseUID(partRow.PartName); mazakPartName = partRow.PartName; mazakComment = partRow.Comment; break; } } } if (downloadUid < 0) { throw new BlackMaple.MachineFramework.BadRequestException( "Attempting to create schedule for " + part.UniqueStr + " but a part does not exist"); } if (!scheduledParts.Contains(mazakPartName)) { int schid = FindNextScheduleId(usedScheduleIDs); int earlierConflicts = CountEarlierConflicts(part, proc1path, jobs); SchedulePart(transSet, schid, mazakPartName, mazakComment, part.NumProcesses, part, proc1path, earlierConflicts, UseStartingOffsetForDueDate); } } } if (UseStartingOffsetForDueDate) { SortSchedulesByDate(transSet); } return(transSet); }
private List <DecrSchedule> JobsToDecrement(MazakSchedulesAndLoadActions schedules) { var jobs = new List <DecrSchedule>(); foreach (var sch in schedules.Schedules) { //parse schedule if (!MazakPart.IsSailPart(sch.PartName)) { continue; } if (string.IsNullOrEmpty(sch.Comment)) { continue; } MazakPart.ParseComment(sch.Comment, out string unique, out var procToPath, out bool manual); if (manual) { continue; } //load the job if (string.IsNullOrEmpty(unique)) { continue; } var job = _jobDB.LoadJob(unique); if (job == null) { continue; } // if already decremented, ignore if (_jobDB.LoadDecrementsForJob(unique).Any()) { continue; } // check load is in process var loadOpers = schedules.LoadActions; var loadingQty = 0; if (loadOpers != null) { foreach (var action in loadOpers) { if (action.Unique == job.UniqueStr && action.Process == 1 && action.LoadEvent && action.Path == procToPath.PathForProc(action.Process)) { loadingQty += action.Qty; Log.Debug("Found {uniq} is in the process of being loaded action {@action}", job.UniqueStr, action); } } } jobs.Add(new DecrSchedule() { Schedule = sch, Job = job, Proc1Path = procToPath.PathForProc(proc: 1), NewPlanQty = CountCompletedOrMachiningStarted(sch) + loadingQty }); } return(jobs); }
private List <MWI.LogEntry> CheckPendingLoads(int pallet, DateTime t, string foreignID, bool palletCycle, List <MWI.LogEntry> cycle) { var pending = _log.PendingLoads(pallet.ToString()); if (pending.Count == 0) { if (palletCycle) { bool hasCompletedUnload = false; foreach (var e in cycle) { if (e.LogType == LogType.LoadUnloadCycle && e.StartOfCycle == false && e.Result == "UNLOAD") { hasCompletedUnload = true; } } if (hasCompletedUnload) { _log.CompletePalletCycle(pallet.ToString(), t, foreignID); } else { Log.Debug("Skipping pallet cycle at time {time} because we detected a pallet cycle without unload", t); } } return(cycle); } var mat = new Dictionary <string, IEnumerable <JobLogDB.EventLogMaterial> >(); foreach (var p in pending) { Log.Debug("Processing pending load {@pending}", p); var s = p.Key.Split(','); if (s.Length != 3) { continue; } string fullPartName = s[0]; string jobPartName = MazakPart.ExtractPartNameFromMazakPartName(fullPartName); int proc; int fixQty; if (!int.TryParse(s[1], out proc)) { proc = 1; } if (!int.TryParse(s[2], out fixQty)) { fixQty = 1; } _mazakSchedules.FindSchedule(fullPartName, proc, out string unique, out int path, out int numProc); JobPlan job = GetJob(unique); var mats = new List <JobLogDB.EventLogMaterial>(); if (job != null && !string.IsNullOrEmpty(job.GetInputQueue(proc, path))) { // search input queue for material Log.Debug("Searching queue {queue} for {unique}-{proc} to load", job.GetInputQueue(proc, path), unique, proc); // TODO: filter paths var qs = _log.GetMaterialInQueue(job.GetInputQueue(proc, path)).Where(q => q.Unique == unique).ToList(); for (int i = 1; i <= fixQty; i++) { string face; if (fixQty == 1) { face = proc.ToString(); } else { face = proc.ToString() + "-" + i.ToString(); } if (i <= qs.Count) { var qmat = qs[i - 1]; mats.Add(new JobLogDB.EventLogMaterial() { MaterialID = qmat.MaterialID, Process = proc, Face = face }); } else { // not enough material in queue Log.Warning("Not enough material in queue {queue} for {part}-{proc}, creating new material for {@pending}", job.GetInputQueue(proc, path), fullPartName, proc, p); mats.Add(new JobLogDB.EventLogMaterial() { MaterialID = _log.AllocateMaterialID(unique, jobPartName, numProc), Process = proc, Face = face }); } } } else if (proc == 1) { // create new material Log.Debug("Creating new material for unique {unique} process 1", unique); for (int i = 1; i <= fixQty; i += 1) { string face; if (fixQty == 1) { face = proc.ToString(); } else { face = proc.ToString() + "-" + i.ToString(); } mats.Add(new JobLogDB.EventLogMaterial() { MaterialID = _log.AllocateMaterialID(unique, jobPartName, numProc), Process = proc, Face = face }); } } else { // search on pallet in the previous process for material Log.Debug("Searching on pallet for unique {unique} process {proc} to load into process {proc}", unique, proc - 1, proc); var byFace = ParseMaterialFromPreviousEvents( jobPartName: jobPartName, proc: proc - 1, fixQty: fixQty, isUnloadEnd: false, oldEvents: cycle); for (int i = 1; i <= fixQty; i += 1) { string prevFace; string nextFace; if (fixQty == 1) { prevFace = (proc - 1).ToString(); nextFace = proc.ToString(); } else { prevFace = (proc - 1).ToString() + "-" + i.ToString(); nextFace = proc.ToString() + "-" + i.ToString(); } if (byFace.ContainsKey(prevFace)) { var old = byFace[prevFace]; mats.Add(new JobLogDB.EventLogMaterial() { MaterialID = old.MaterialID, Process = proc, Face = nextFace }); } else { //something went wrong, must create material mats.Add(new JobLogDB.EventLogMaterial() { MaterialID = _log.AllocateMaterialID(unique, jobPartName, numProc), Process = proc, Face = nextFace }); Log.Warning("Could not find material on pallet {pallet} for previous process {proc}, creating new material for {@pending}", pallet, proc - 1, p); } } } mat[p.Key] = mats; } _log.CompletePalletCycle(pallet.ToString(), t, foreignID, mat, generateSerials: true); if (palletCycle) { return(cycle); } else { return(_log.CurrentPalletLog(pallet.ToString())); } }
public MazakProcessFromTemplate(MazakPart parent, MazakPartProcessRow template, int path) : base(parent, template.ProcessNumber, path) { TemplateProcessRow = template; }
public MazakProcessFromJob(MazakPart parent, int process, int pth) : base(parent, process, pth) { }
protected MazakProcess(MazakPart parent, int proc, int path) { Part = parent; ProcessNumber = proc; Path = path; }
private IEnumerable <ScheduleWithQueues> LoadSchedules(MazakSchedulesAndLoadActions mazakData) { var loadOpers = mazakData.LoadActions; var schs = new List <ScheduleWithQueues>(); var pending = _log.AllPendingLoads(); var skippedParts = new HashSet <string>(); foreach (var schRow in mazakData.Schedules.OrderBy(s => s.DueDate).ThenBy(s => s.Priority)) { if (!MazakPart.IsSailPart(schRow.PartName)) { continue; } MazakPart.ParseComment(schRow.Comment, out string unique, out var procToPath, out bool manual); var job = _jobDB.LoadJob(unique); if (job == null) { continue; } // only if no load or unload action is in process bool foundJobAtLoad = false; foreach (var action in loadOpers) { if (action.Unique == job.UniqueStr && action.Path == procToPath.PathForProc(action.Process)) { foundJobAtLoad = true; skippedParts.Add(job.PartName); log.Debug("Not editing queued material because {uniq} is in the process of being loaded or unload with action {@action}", job.UniqueStr, action); break; } } foreach (var pendingLoad in pending) { var s = pendingLoad.Key.Split(','); if (schRow.PartName == s[0]) { skippedParts.Add(job.PartName); foundJobAtLoad = true; log.Debug("Not editing queued material because found a pending load {@pendingLoad}", pendingLoad); break; } } if (foundJobAtLoad) { continue; } // start building the schedule var sch = new ScheduleWithQueues() { SchRow = schRow, Unique = unique, Job = job, LowerPriorityScheduleMatchingPartSkipped = skippedParts.Contains(job.PartName), Procs = new Dictionary <int, ScheduleWithQueuesProcess>(), }; bool missingProc = false; for (int proc = 1; proc <= job.NumProcesses; proc++) { MazakScheduleProcessRow schProcRow = null; foreach (var row in schRow.Processes) { if (row.ProcessNumber == proc) { schProcRow = row; break; } } if (schProcRow == null) { log.Error("Unable to find process {proc} for job {uniq} and schedule {schid}", proc, job.UniqueStr, schRow.Id); missingProc = true; break; } var path = procToPath.PathForProc(proc); sch.Procs.Add(proc, new ScheduleWithQueuesProcess() { SchProcRow = schProcRow, PathGroup = job.GetPathGroup(process: proc, path: path), InputQueue = job.GetInputQueue(process: proc, path: path) }); } if (!missingProc) { schs.Add(sch); } } return(schs); }
private void AddFixturesPalletsParts(NewJobs newJ) { var mazakData = readDatabase.LoadAllData(); //first allocate a UID to use for this download int UID = 0; while (UID < int.MaxValue) { //check schedule rows for UID foreach (var schRow in mazakData.Schedules) { if (MazakPart.ParseUID(schRow.PartName) == UID) { goto found; } } //check fixture rows for UID foreach (var fixRow in mazakData.Fixtures) { if (MazakPart.ParseUID(fixRow.FixtureName) == UID) { goto found; } } break; found: UID += 1; } if (UID == int.MaxValue) { throw new Exception("Unable to find unused UID"); } Log.Debug("Creating new schedule with UID {uid}", UID); var(transSet, savedParts) = BuildMazakSchedules.RemoveCompletedAndDecrementSchedules( mazakData, UseStartingOffsetForDueDate ); if (transSet.Schedules.Any()) { writeDb.Save(transSet, "Update schedules"); } Log.Debug("Saved Parts: {parts}", savedParts); var jobErrs = new List <string>(); var mazakJobs = ConvertJobsToMazakParts.JobsToMazak( newJ.Jobs, UID, mazakData, savedParts, writeDb.MazakType, CheckPalletsUsedOnce, fmsSettings, LookupProgram, jobErrs); if (jobErrs.Any()) { throw new BlackMaple.MachineFramework.BadRequestException( string.Join(Environment.NewLine, jobErrs) ); } //delete everything transSet = mazakJobs.DeleteOldPartPalletRows(); if (transSet.Parts.Any() || transSet.Pallets.Any()) { try { writeDb.Save(transSet, "Delete Parts Pallets"); } catch (ErrorModifyingParts e) { foreach (var partName in e.PartNames) { if (readDatabase.CheckPartExists(partName)) { throw new Exception("Mazak returned an error when attempting to delete part " + partName); } } } } //have to delete fixtures after schedule, parts, and pallets are already deleted //also, add new fixtures transSet = mazakJobs.CreateDeleteFixtureAndProgramDatabaseRows(jobDB.LoadProgramContent, ProgramDirectory); writeDb.Save(transSet, "Fixtures"); //now save the pallets and parts transSet = mazakJobs.CreatePartPalletDatabaseRows(); writeDb.Save(transSet, "Add Parts"); }