Exemple #1
0
        public CincronBackend(IConfigurationSection config, FMSSettings cfg)
        {
            try
            {
                string msgFile;
#if DEBUG
                var path = System.Reflection.Assembly.GetExecutingAssembly().Location;
                msgFile = System.IO.Path.Combine(
                    System.IO.Path.GetDirectoryName(path), "..\\..\\..\\test\\Cincron\\parker-example-messages");
#else
                msgFile = config.GetValue <string>("Message File");
#endif

                Log.Information("Starting cincron backend with message file {file}", msgFile);

                if (!System.IO.File.Exists(msgFile))
                {
                    Log.Error("Message file {file} does not exist", msgFile);
                }

                _log = new JobLogDB(cfg);

                _log.Open(
                    System.IO.Path.Combine(cfg.DataDirectory, "log.db"),
                    startingSerial: cfg.StartingSerial
                    );
                _msgWatcher = new MessageWatcher(msgFile, _log, cfg);
                _msgWatcher.Start();
            }
            catch (Exception ex)
            {
                Log.Error(ex, "Unhandled exception when initializing cincron backend");
            }
        }
Exemple #2
0
 public EventDBUpgradeSpec()
 {
     _log      = new JobLogDB(new FMSSettings());
     _tempFile = System.IO.Path.GetTempFileName();
     System.IO.File.Copy("log.v17.db", _tempFile, overwrite: true);
     _log.Open(_tempFile);
 }
Exemple #3
0
        public MakinoBackend(IConfiguration config, FMSSettings st)
        {
            try
            {
                var cfg = config.GetSection("Makino");

                string adePath = cfg.GetValue <string>("ADE Path");
                if (string.IsNullOrEmpty(adePath))
                {
                    adePath = @"c:\Makino\ADE";
                }

                string dbConnStr = cfg.GetValue <string>("SQL Server Connection String");
                if (string.IsNullOrEmpty(dbConnStr))
                {
                    dbConnStr = DetectSqlConnectionStr();
                }

                bool downloadOnlyOrders = cfg.GetValue <bool>("Download Only Orders");

                Log.Information(
                    "Starting makino backend. Connection Str: {connStr}, ADE Path: {path}, DownloadOnlyOrders: {downOnlyOrders}",
                    dbConnStr, adePath, downloadOnlyOrders);

                _dataDirectory = st.DataDirectory;

                _log = new JobLogDB(st);
                _log.Open(
                    System.IO.Path.Combine(_dataDirectory, "log.db"),
                    System.IO.Path.Combine(_dataDirectory, "inspections.db"),
                    startingSerial: st.StartingSerial
                    );

                _jobDB = new BlackMaple.MachineFramework.JobDB();
                _jobDB.Open(System.IO.Path.Combine(_dataDirectory, "jobs.db"));

                _status = new StatusDB(System.IO.Path.Combine(_dataDirectory, "makino.db"));

#if DEBUG
                _makinoDB = new MakinoDB(MakinoDB.DBTypeEnum.SqlLocal, "", _status, _log);
#else
                _makinoDB = new MakinoDB(MakinoDB.DBTypeEnum.SqlConnStr, dbConnStr, _status, _log);
#endif

                _logTimer = new LogTimer(_log, _jobDB, _makinoDB, _status, st);

                _jobs = new Jobs(_makinoDB, _jobDB, adePath, downloadOnlyOrders);

                _logTimer.LogsProcessed += OnLogsProcessed;
            }
            catch (Exception ex)
            {
                Log.Error(ex, "Error when initializing makino backend");
            }
        }
Exemple #4
0
        public MockServerBackend()
        {
            string path = null; // dataDir

            string dbFile(string f) => System.IO.Path.Combine(path, f + ".db");

            if (path != null)
            {
                if (System.IO.File.Exists(dbFile("log")))
                {
                    System.IO.File.Delete(dbFile("log"));
                }
                LogDB = new JobLogDB(new FMSSettings());
                LogDB.Open(dbFile("log"), dbFile("insp"));

                if (System.IO.File.Exists(dbFile("job")))
                {
                    System.IO.File.Delete(dbFile("job"));
                }
                JobDB = new JobDB();
                JobDB.Open(dbFile("job"));
            }
            else
            {
                var conn = new Microsoft.Data.Sqlite.SqliteConnection("Data Source=:memory:");
                conn.Open();
                LogDB = new JobLogDB(new FMSSettings(), conn);
                LogDB.CreateTables(firstSerialOnEmpty: null);

                conn = new Microsoft.Data.Sqlite.SqliteConnection("Data Source=:memory:");
                conn.Open();
                JobDB = new JobDB(conn);
                JobDB.CreateTables();
            }

            _jsonSettings = new JsonSerializerSettings();
            _jsonSettings.Converters.Add(new Newtonsoft.Json.Converters.StringEnumConverter());
            _jsonSettings.Converters.Add(new BlackMaple.MachineFramework.TimespanConverter());
            _jsonSettings.ContractResolver    = new Newtonsoft.Json.Serialization.DefaultContractResolver();
            _jsonSettings.ConstructorHandling = Newtonsoft.Json.ConstructorHandling.AllowNonPublicDefaultConstructor;

            var sampleDataPath = System.IO.Path.Combine(
                System.IO.Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location),
                "../../../sample-data/"
                );

            // sample data starts at Jan 1, 2018.  Need to offset to current month
            var jan1_18 = new DateTime(2018, 1, 1, 0, 0, 0, DateTimeKind.Utc);
            var offset  = DateTime.UtcNow.AddDays(-28).Subtract(jan1_18);

            LoadEvents(sampleDataPath, offset);
            LoadJobs(sampleDataPath, offset);
            LoadStatus(sampleDataPath, offset);
        }
        public void StatusSnapshot(string scenario)
        {
            /*
             * Symlinks not supported on Windows
             * var newJobs = JsonConvert.DeserializeObject<NewJobs>(
             * File.ReadAllText(
             *  Path.Combine("..", "..", "..", "mazak", "read-snapshots", scenario + ".jobs.json")),
             *  jsonSettings
             * );
             */
            NewJobs newJobs = null;

            if (scenario.Contains("basic"))
            {
                newJobs = JsonConvert.DeserializeObject <NewJobs>(
                    File.ReadAllText(
                        Path.Combine("..", "..", "..", "sample-newjobs", "fixtures-queues.json")),
                    jsonSettings
                    );
            }
            else if (scenario.Contains("multiface"))
            {
                newJobs = JsonConvert.DeserializeObject <NewJobs>(
                    File.ReadAllText(
                        Path.Combine("..", "..", "..", "sample-newjobs", "multi-face.json")),
                    jsonSettings
                    );
            }
            else if (scenario.Contains("pathgroups"))
            {
                newJobs = JsonConvert.DeserializeObject <NewJobs>(
                    File.ReadAllText(
                        Path.Combine("..", "..", "..", "sample-newjobs", "path-groups.json")),
                    jsonSettings
                    );
            }
            _jobDB.AddJobs(newJobs, null);

            var allData = JsonConvert.DeserializeObject <MazakAllData>(
                File.ReadAllText(
                    Path.Combine("..", "..", "..", "mazak", "read-snapshots", scenario + ".data.json")),
                jsonSettings
                );

            var  logDb           = _emptyLog;
            bool close           = false;
            var  existingLogPath =
                Path.Combine("..", "..", "..", "mazak", "read-snapshots", scenario + ".log.db");

            if (File.Exists(existingLogPath))
            {
                logDb = new JobLogDB(new FMSSettings());
                logDb.Open(existingLogPath);
                close = true;
            }

            if (scenario == "basic-no-material")
            {
                queueSyncFault.CurrentQueueMismatch.Returns(true);
            }

            CurrentStatus status;

            try
            {
                status = BuildCurrentStatus.Build(_jobDB, logDb, _settings, queueSyncFault, MazakDbType.MazakSmooth, allData,
                                                  new DateTime(2018, 7, 19, 20, 42, 3, DateTimeKind.Utc));
            }
            finally
            {
                if (close)
                {
                    logDb.Close();
                }
            }

            var expectedStatus = JsonConvert.DeserializeObject <CurrentStatus>(
                File.ReadAllText(
                    Path.Combine("..", "..", "..", "mazak", "read-snapshots", scenario + ".status.json")),
                jsonSettings
                );

            status.Should().BeEquivalentTo(expectedStatus, options =>
                                           options.Excluding(c => c.TimeOfCurrentStatusUTC)
                                           );
        }
Exemple #6
0
        public MazakBackend(IConfiguration configuration, FMSSettings st)
        {
            var    cfg         = configuration.GetSection("Mazak");
            string localDbPath = cfg.GetValue <string>("Database Path");

            MazakType = DetectMazakType(cfg, localDbPath);

            // database settings
            string sqlConnectString = cfg.GetValue <string>("SQL ConnectionString");
            string dbConnStr;

            if (MazakType == MazakDbType.MazakSmooth)
            {
                if (!string.IsNullOrEmpty(sqlConnectString))
                {
                    dbConnStr = sqlConnectString;
                }
                else if (!string.IsNullOrEmpty(localDbPath))
                {
                    // old installers put sql server computer name in localDbPath
                    dbConnStr = "Server=" + localDbPath + "\\pmcsqlserver;" +
                                "User ID=mazakpmc;Password=Fms-978";
                }
                else
                {
                    var b = new System.Data.SqlClient.SqlConnectionStringBuilder();
                    b.UserID     = "mazakpmc";
                    b.Password   = "******";
                    b.DataSource = "(local)";
                    dbConnStr    = b.ConnectionString;
                }
            }
            else
            {
                dbConnStr = localDbPath;
                if (string.IsNullOrEmpty(dbConnStr))
                {
                    dbConnStr = "c:\\Mazak\\NFMS\\DB";
                }
            }

            // log csv
            string logPath = cfg.GetValue <string>("Log CSV Path");

            if (logPath == null || logPath == "")
            {
                logPath = "c:\\Mazak\\FMS\\Log";
            }

            if (MazakType != MazakDbType.MazakVersionE && !System.IO.Directory.Exists(logPath))
            {
                Log.Error("Log CSV Directory {path} does not exist.  Set the directory in the config.ini file.", logPath);
            }
            else if (MazakType != MazakDbType.MazakVersionE)
            {
                Log.Information("Loading log CSV files from {logcsv}", logPath);
            }

            // general config
            string useStarting  = cfg.GetValue <string>("Use Starting Offset For Due Date");
            string useStarting2 = cfg.GetValue <string>("Use Starting Offset");

            if (string.IsNullOrEmpty(useStarting))
            {
                if (string.IsNullOrEmpty(useStarting2))
                {
                    UseStartingOffsetForDueDate = true;
                }
                else
                {
                    UseStartingOffsetForDueDate = Convert.ToBoolean(useStarting2);
                }
            }
            else
            {
                UseStartingOffsetForDueDate = Convert.ToBoolean(useStarting);
            }
            //Perhaps this should be a new setting, but if you don't check for pallets used once
            //then you don't care if all faces on a pallet are full so might as well use priority
            //which causes pallet positions to go empty.
            CheckPalletsUsedOnce = !UseStartingOffsetForDueDate;

            ProgramDirectory = cfg.GetValue <string>("Program Directory");
            if (string.IsNullOrEmpty(ProgramDirectory))
            {
                ProgramDirectory = "C:\\NCProgs";
            }

            // serial settings
            string serialPerMaterial = cfg.GetValue <string>("Assign Serial Per Material");

            if (!string.IsNullOrEmpty(serialPerMaterial))
            {
                bool result;
                if (bool.TryParse(serialPerMaterial, out result))
                {
                    if (!result)
                    {
                        st.SerialType = SerialType.AssignOneSerialPerCycle;
                    }
                }
            }

            Log.Debug(
                "Configured UseStartingOffsetForDueDate = {useStarting}",
                UseStartingOffsetForDueDate);

            jobLog = new BlackMaple.MachineFramework.JobLogDB(st);
            jobLog.Open(
                System.IO.Path.Combine(st.DataDirectory, "log.db"),
                System.IO.Path.Combine(st.DataDirectory, "insp.db"),
                startingSerial: st.StartingSerial
                );

            jobDB = new BlackMaple.MachineFramework.JobDB();
            var jobInspName = System.IO.Path.Combine(st.DataDirectory, "jobinspection.db");

            if (System.IO.File.Exists(jobInspName))
            {
                jobDB.Open(jobInspName);
            }
            else
            {
                jobDB.Open(System.IO.Path.Combine(st.DataDirectory, "mazakjobs.db"));
            }

            _writeDB = new OpenDatabaseKitTransactionDB(dbConnStr, MazakType);

            if (MazakType == MazakDbType.MazakVersionE)
            {
                loadOper = new LoadOperationsFromFile(cfg, enableWatcher: true);
            }
            else if (MazakType == MazakDbType.MazakWeb)
            {
                loadOper = new LoadOperationsFromFile(cfg, enableWatcher: false); // web instead watches the log csv files
            }
            else
            {
                loadOper = null; // smooth db doesn't use the load operations file
            }
            var openReadDb = new OpenDatabaseKitReadDB(dbConnStr, MazakType, loadOper);

            if (MazakType == MazakDbType.MazakSmooth)
            {
                _readDB = new SmoothReadOnlyDB(dbConnStr, openReadDb);
            }
            else
            {
                _readDB = openReadDb;
            }

            queues = new MazakQueues(jobLog, jobDB, _writeDB);
            var sendToExternal = new SendMaterialToExternalQueue();

            hold = new HoldPattern(_writeDB, _readDB, jobDB, true);
            var writeJobs = new WriteJobs(_writeDB, _readDB, hold, jobDB, jobLog, st, CheckPalletsUsedOnce, UseStartingOffsetForDueDate, ProgramDirectory);
            var decr      = new DecrementPlanQty(jobDB, _writeDB, _readDB);

            if (MazakType == MazakDbType.MazakWeb || MazakType == MazakDbType.MazakSmooth)
            {
                logDataLoader = new LogDataWeb(logPath, jobLog, jobDB, writeJobs, sendToExternal, _readDB, queues, st);
            }
            else
            {
#if USE_OLEDB
                logDataLoader = new LogDataVerE(jobLog, jobDB, sendToExternal, writeJobs, _readDB, queues, st);
#else
                throw new Exception("Mazak Web and VerE are not supported on .NET core");
#endif
            }

            routing = new RoutingInfo(_writeDB, writeJobs, _readDB, logDataLoader, jobDB, jobLog, writeJobs, queues, decr,
                                      CheckPalletsUsedOnce, st);

            logDataLoader.NewEntries += OnNewLogEntries;
            if (loadOper != null)
            {
                loadOper.LoadActions += OnLoadActions;
            }
        }