예제 #1
0
        private static int loadData(IEbDocContext context, StreamReader file, bool IsRawData)
        {
            IEbDocContext db = context;

            try
            {
                int      counter = 0;
                string   ln;
                string[] headerItems = HEADER.Split("\t".ToCharArray());

                int path = Array.IndexOf(headerItems, "PATH");

                while ((ln = file.ReadLine()) != null)
                {
                    log.Debug($"row [{counter}]:\n{ln}");

                    //delimit content by tab into an array of strings
                    string[] content = ln.Split("\t".ToCharArray());
                    // if the filename is blank then continue
                    if (string.IsNullOrWhiteSpace(content.GetValue(Array.IndexOf(headerItems, "FILE NAME")).ToString()))
                    {
                        continue;
                    }

                    log.Debug($"load row [{counter}] into row and doc objects");
                    var row = new Record(
                        content.GetValue(Array.IndexOf(headerItems, "MODULE")).ToString(),
                        content.GetValue(Array.IndexOf(headerItems, "SERVICE REQUEST NO")).ToString(),
                        content.GetValue(Array.IndexOf(headerItems, "WORK ORDER NO")).ToString(),
                        content.GetValue(Array.IndexOf(headerItems, "HANSEN 7 WORK ORDER NO")).ToString(),
                        content.GetValue(Array.IndexOf(headerItems, "APPLICATION NO")).ToString()
                        );
                    var doc = GetDocument(
                        content.GetValue(Array.IndexOf(headerItems, "PATH")).ToString(),
                        content.GetValue(Array.IndexOf(headerItems, "FILE NAME")).ToString(),
                        IsRawData
                        );
                    log.Debug($"get record for [{row.B1_ALT_ID}]");
                    Record record = db.Records
                                    .Include(r => r.Documents)
                                    .FirstOrDefault(r => r.B1_ALT_ID == row.B1_ALT_ID & r.Hansen_Module == row.Hansen_Module);
                    if (record is null)
                    {
                        addAccelaData(db, row);
                        row.Documents.Add(doc);
                        db.Records.Add(row);
                        log.Debug($"record [{row.B1_ALT_ID}] not found in database.\n" +
                                  $"\t\tdocument [{row.Documents.Last().File_Name}] added to new record [{row.B1_ALT_ID}]");
                    }

                    // if the msd path does not exist in the list of documents then add the document
                    //else if (record.Documents.Select(d => d.MSD_path == doc.MSD_path).Count() > 0)
                    else if (!record.Documents.Any(d => d.MSD_path == doc.MSD_path))
                    {
                        record.Documents.Add(doc);
                        log.Debug($"[{record.B1_ALT_ID}] exists, [{doc.File_Name}] added to list of docuements");
                    }

                    if (record != null && string.IsNullOrWhiteSpace(record.Group))
                    {
                        addAccelaData(db, record);
                    }

                    log.Debug($"saving [{doc.File_Name}] to database");
                    db.SaveChanges();

                    counter++;
                    // debug line  - count off every 500 record processed
                    if (counter > 0 && counter % 500 == 0)
                    {
                        log.Info($"[{counter}] files processed ");
                    }
                }
                file.Close();

                return(counter);
            }
            catch (Exception ex)
            {
                log.Error($"ERROR Found: {ex.Message}");
                throw ex;
            }
        }
예제 #2
0
        public static int createArchive2(IEbDocContext context, string repo_source, string repo_target, int max_file_count)
        {
            IEbDocContext db = context;

            log.Info("begin createArchive process");
            int  files_archived = 0;
            long size           = 0;

            int           max_count = max_file_count + (max_file_count * 3 / 10);
            long          max_size  = 2000000000; // 2GB
            List <string> metadata  = new List <string> {
                ARCHIVE_HEADER
            };


            // setup archive variables
            string archive_no   = GetNextArchiveNo(db);
            string archivePath  = Path.Combine(repo_target, string.Format(ARCHIVE_NAME, archive_no));
            string metadataPath = Path.Combine(repo_target, string.Format(METADATA_NAME, archive_no));

            log.Debug($"variables set\narchive_no:\t[{archive_no}" +
                      $"\narchivePath:\t[{archivePath}]" +
                      $"\nmetadataPath\t[{metadataPath}]"
                      );


            try
            {
                // tag files with archive no. (max files plus 30%)

                // pull records with docs that have archive no, docs that will migrate ( is not to large and has record in accela)

                // pull 500 records and documents.

                // build manifest in memory

                // archive files on mainfest

                // write manifest to file

                var docs = db.Documents
                           .Where(d => d.ArchiveNo == null && d.File_Size < 5000000 && !d.Is_Missing)
                           .Take(max_count).ToList();

                if (docs.Count() < 1)
                {
                    log.Info("no records available to process");
                    return(files_archived);
                }

                log.Debug("iterate through the docuemnts");
                foreach (var doc in docs)
                {
                    log.Debug($"file is [{doc.File_Path}]");

                    // stop if we reach max file count
                    if (files_archived == max_file_count || size > max_size)
                    {
                        break;
                    }

                    // mark document with archive_no to identify data as processed.
                    doc.ArchiveNo = archive_no;

                    var data = db;

                    var rec = data.Records.SingleOrDefault(r => r.RecordId == doc.RecordId && r.Group != null);
                    if (rec is null)
                    {
                        // if record does not migrate, continue
                        log.Debug($"[{doc.File_Path}] does not have a corresponding record in accela");
                        continue;
                    }

                    log.Debug("update document data");
                    doc.Target_Repository = repo_target;
                    doc.Zip_Date          = DateTime.Now;
                    doc.Zip_Path          = archivePath;
                    doc.ArchiveNo         = archive_no;
                    doc.Metadata_Path     = metadataPath;

                    //  "HANSEN_MODULE\tHANSEN_ID\tB1_ALT_ID\tFILE_NAME\tFILE_PATH\tFILE_SIZE";
                    metadata.Add($"{rec.Hansen_Module}\t{rec.Hansen_Id}\t{rec.B1_ALT_ID}\t{doc.File_Name}\t{doc.File_Path}\t{doc.File_Size}");

                    files_archived++;
                    size += doc.File_Size;

                    // debug line  - count off every 500 record processed
                    if (files_archived % 100 == 0)
                    {
                        log.Info($"[{files_archived}] files processed ");
                    }
                }

                db.SaveChanges();

                log.Debug("create the metadata file");
                if (!Directory.Exists(repo_target))
                {
                    Directory.CreateDirectory(repo_target);
                }
                using (StreamWriter sw = File.CreateText(metadataPath))
                {
                    foreach (var row in metadata)
                    {
                        sw.WriteLine(row);
                    }
                }

                log.Debug("create zip file");
                zipArchive(db, archive_no, repo_source, archivePath);

                log.Info($"----- archive [{archive_no}] complete -----");
            }
            catch (Exception ex)
            {
                log.Error($"createArchive2 failure: {ex}");
                throw ex;
            }
            return(files_archived);
        }
예제 #3
0
        public static int createArchive(IEbDocContext context, string repo_source, string repo_target, int max_file_count)
        {
            IEbDocContext db = context;

            log.Info("begin createArchive process");
            int files_archived = 0;

            try
            {
                var docs = db.Documents
                           .Where(d => d.ArchiveNo == null && d.File_Size < 5000000 && !d.Is_Missing)
                           .Take(max_file_count).ToList();

                if (docs.Count() < 1)
                {
                    log.Info("no records available to process");
                    return(files_archived);
                }

                string archive_no   = GetNextArchiveNo(db);
                string archivePath  = Path.Combine(repo_target, string.Format(ARCHIVE_NAME, archive_no));
                string metadataPath = Path.Combine(repo_target, string.Format(METADATA_NAME, archive_no));

                log.Debug($"variables set\narchive_no:\t[{archive_no}" +
                          $"\narchivePath:\t[{archivePath}]" +
                          $"\nmetadataPath\t[{metadataPath}]"
                          );
                if (!Directory.Exists(repo_target))
                {
                    Directory.CreateDirectory(repo_target);
                }

                log.Debug("create the metadata file");
                using (StreamWriter sw = File.CreateText(metadataPath))
                {
                    sw.WriteLine(ARCHIVE_HEADER);
                }

                log.Debug("iterate through the docuemnts");
                foreach (var doc in docs)
                {
                    log.Debug($"file is [{doc.File_Path}]");

                    var data = db;

                    var rec = data.Records.SingleOrDefault(r => r.RecordId == doc.RecordId && r.Group != null);
                    if (rec is null)
                    {
                        // mark document with archive_no to identify data as processed.
                        log.Debug($"[{doc.File_Path}] does not have a corresponding record in accela");
                        doc.ArchiveNo = archive_no;
                        db.SaveChanges();
                        continue;
                    }

                    zipArchive(repo_source, doc, archivePath);

                    log.Debug($"add [{doc.File_Path} to metadata file [{metadataPath}]");
                    using (StreamWriter sw = File.AppendText(metadataPath))
                    {
                        //  "HANSEN_MODULE\tHANSEN_ID\tB1_ALT_ID\tFILE_NAME\tFILE_PATH\tFILE_SIZE";
                        string metadata = $"{rec.Hansen_Module}\t{rec.Hansen_Id}\t{rec.B1_ALT_ID}\t{doc.File_Name}\t{doc.File_Path}\t{doc.File_Size}";
                        sw.WriteLine(metadata);
                    }

                    log.Debug("update document data");
                    doc.Target_Repository = repo_target;
                    doc.Zip_Date          = DateTime.Now;
                    doc.Zip_Path          = archivePath;
                    doc.ArchiveNo         = archive_no;
                    doc.Metadata_Path     = metadataPath;

                    db.SaveChanges();
                    files_archived++;

                    // debug line  - count off every 500 record processed
                    if (files_archived % 100 == 0)
                    {
                        log.Info($"[{files_archived}] files processed ");
                    }
                }
                log.Info($"[{archivePath}] contains [{files_archived}] files of [{max_file_count}] requested.");
            }
            catch (Exception ex)
            {
                log.Error($"Epic failure: {ex}");
                throw ex;
            }
            return(files_archived);
        }