public void SaveToDatabase(IXmlFileService xmlFileService, IZipFileService zipFileService, IConFileService conFileService, IDocTypeService docTypeService, IManCoService manCoService, ApplicationInfo appInfo) { try { NexdoxMessaging.SendMessage(" Adding Data to SQL Database", true, this); foreach (ZipPackage zp in Statics.zipPackage) { switch (Path.GetExtension(zp.FileName).ToLower()) { case ".xml": DocType docType = docTypeService.GetDocType(zp.DocumentType); if (docType == null) { throw new Exception(string.Format("Document type {0} not found in unity database", zp.DocumentType)); } ManCo manCo = manCoService.GetManCo(zp.ManCoID); if (manCo == null) { throw new Exception(string.Format("Man Co {0} not found in unity database", zp.ManCoID)); } xmlFileService.CreateXmlFile( Statics.zipPackage.DocumentSetID.ToString(), zp.FileName, zp.ParentZipFileName, zp.Offshore, docType.Id, manCo.Id, 0, string.Empty, zp.FileName, DateTime.Now, appInfo.NexdoxGlobalRunID.ToString(), File.GetLastWriteTime(appInfo.InputPath)); break; case ".zip": if (zp.IsBigZip) { zipFileService.CreateBigZipFile(Statics.zipPackage.DocumentSetID.ToString(), zp.FileName, File.GetLastWriteTime(appInfo.InputPath)); } else { zipFileService.CreateLittleZipFile(Statics.zipPackage.DocumentSetID.ToString(), zp.FileName, zp.ParentZipFileName, File.GetLastWriteTime(appInfo.InputPath)); } break; case ".con": conFileService.CreateConFile(Statics.zipPackage.DocumentSetID.ToString(), zp.FileName, zp.ParentZipFileName, File.GetLastWriteTime(appInfo.InputPath)); break; } /*SqlCommand sqlComm = new SqlCommand("sp_InsertInputFile", sqlConn); * sqlComm.CommandType = CommandType.StoredProcedure; * * SqlParameter InputFileName = sqlComm.Parameters.Add("@INPUTFILENAME", SqlDbType.VarChar); * InputFileName.Direction = ParameterDirection.Input; * InputFileName.Value = zp.FileName; * * SqlParameter DocType = sqlComm.Parameters.Add("@DOCTYPE", SqlDbType.VarChar); * DocType.Direction = ParameterDirection.Input; * DocType.Value = zp.DocumentType; * * SqlParameter ParentDocFileName = sqlComm.Parameters.Add("@PARENTDOCFILENAME", SqlDbType.VarChar); * ParentDocFileName.Direction = ParameterDirection.Input; * ParentDocFileName.Value = zp.ParentZipFileName; * * SqlParameter BigZip = sqlComm.Parameters.Add("@BIGZIP", SqlDbType.Bit); * BigZip.Direction = ParameterDirection.Input; * BigZip.Value = zp.IsBigZip; * * SqlParameter LittleZip = sqlComm.Parameters.Add("@LITTLEZIP", SqlDbType.Bit); * LittleZip.Direction = ParameterDirection.Input; * LittleZip.Value = zp.IsLittleZip; * * SqlParameter DocumentSetID = sqlComm.Parameters.Add("@DOCUMENTSET_ID", SqlDbType.UniqueIdentifier); * DocumentSetID.Direction = ParameterDirection.Input; * DocumentSetID.Value = Statics.zipPackage.DocumentSetID; * * SqlParameter OffShore = sqlComm.Parameters.Add("@OFFSHORE", SqlDbType.Bit); * OffShore.Direction = ParameterDirection.Input; * OffShore.Value = zp.Offshore; * * SqlParameter ManCo = sqlComm.Parameters.Add("@MANCO", SqlDbType.VarChar); * ManCo.Direction = ParameterDirection.Input; * ManCo.Value = zp.ManCoID.ToString(); * * SqlParameter Domicile = sqlComm.Parameters.Add("@DOMICILE", SqlDbType.VarChar); * Domicile.Direction = ParameterDirection.Input; * Domicile.Value = zp.Domicile; * * SqlParameter StatusID = sqlComm.Parameters.Add("@STATUS_ID", SqlDbType.Int); * StatusID.Direction = ParameterDirection.Input; * StatusID.Value = zp.StatusID; * * SqlParameter InputDateCreation = sqlComm.Parameters.Add("@INPUTCREATIONDATE", SqlDbType.DateTime); * InputDateCreation.Direction = ParameterDirection.Input; * InputDateCreation.Value = zp.InputCreationDate; * * SqlDataReader myReader = sqlComm.ExecuteReader(); * myReader.Close();*/ } } catch (Exception e) { throw NexdoxMessaging.Exception(e.Message, this); } }
/////// <summary> /////// Checks to see if any of the XML files are over the size threshold and attempts to splirt them into smaller files /////// </summary> ////private void SplitHugeDataFiles() ////{ //// int hugeFileSplitThreshold = Int32.Parse(_appInfo["HugeFileSplitThreshold"]) * 1000; //// foreach (string inputFile in Directory.GetFiles(_appInfo.OutputPath, _appInfo["XmlFileMask"])) //// { //// FileInfo fi = new FileInfo(inputFile); //// if (fi.Length < hugeFileSplitThreshold) //// continue; //// var documents = XElement.Load(inputFile).Elements("Document_Data"); //// string docSubType = documents.First().Element("Doc").Element("Doc_Sub_Type").Value; //// if (!(new[] { "PER", "AGT" }).Contains(docSubType)) //// continue; //// int splitCount = (int)(fi.Length / hugeFileSplitThreshold) + 1; //// var consolidatedDocuments = documents.GroupBy(d => d.Element("Addressee").Element("Id").Value).OrderBy(g => g.Count()).Reverse(); //// int documentsPerChunk = (int)(documents.Count() / splitCount); //// int counter = 0; //// string mainPartOfInitialFileNames = Path.ChangeExtension(inputFile, null); //// XElement initialJobFile = XElement.Load(mainPartOfInitialFileNames + ".job"); //// List<XElement> tempDocuments = new List<XElement>(); //// foreach (var documentsForOneAddressee in consolidatedDocuments) //// { //// if (tempDocuments.Count + documentsForOneAddressee.Count() < documentsPerChunk) //// { //// tempDocuments.AddRange(documentsForOneAddressee); //// } //// else //// { //// if (tempDocuments.Count() == 0) // Is still too large, but we had no possibility to change this //// { //// tempDocuments = documentsForOneAddressee.ToList(); //// // Process tempDocuments and clear tempDocuments //// SaveSmallerXMLChunks(tempDocuments, initialJobFile, mainPartOfInitialFileNames, counter); //// tempDocuments.Clear(); //// } //// else //// { //// // Process tempDocuments, and tail should be saved //// SaveSmallerXMLChunks(tempDocuments, initialJobFile, mainPartOfInitialFileNames, counter); //// tempDocuments = documentsForOneAddressee.ToList(); //// } //// counter++; //// } //// } //// SaveSmallerXMLChunks(tempDocuments, initialJobFile, mainPartOfInitialFileNames, counter); //// File.Move(mainPartOfInitialFileNames + ".xml", mainPartOfInitialFileNames + ".xml.huge"); //// File.Move(mainPartOfInitialFileNames + ".job", mainPartOfInitialFileNames + ".job.huge"); //// } ////} /////// <summary> /////// saves chunks of xml as smaller input files /////// </summary> /////// <param name="documents">List of documents that makeup our new file</param> /////// <param name="initialJobFile">The initial XML file</param> /////// <param name="mainPartOfInitialFileNames">Filename</param> /////// <param name="counter">Current File Chunk Number</param> ////private void SaveSmallerXMLChunks(List<XElement> documents, XElement initialJobFile, string mainPartOfInitialFileNames, int counter) ////{ //// string newMainpartOfName = string.Format("{0}.{1:d3}", mainPartOfInitialFileNames, counter); //// XElement newDocumentSet = new XElement("Document_Pack", documents); //// initialJobFile.Element("Rec_Total").Element("count").Value = documents.Count().ToString(); //// newDocumentSet.Save(newMainpartOfName + ".xml"); //// initialJobFile.Save(newMainpartOfName + ".job"); ////} /// <summary> /// Updates any new PDFs supplied into the shared resources folder /// </summary> private void ProcessInserts(string pdfFile) { //Get filename of the insert string pdfFileName = Path.GetFileName(pdfFile); string path = pdfFile.Replace(pdfFileName, ""); NexdoxMessaging.SendMessage(" Processing pdf file " + pdfFileName + "...", true, this); try { string pdfInsert = pdfFile; string epsInsert = pdfInsert.Substring(0, pdfInsert.LastIndexOf(".")); System.Diagnostics.Process process; ProcessStartInfo processInfo = new ProcessStartInfo( _appInfo["PDFConversionAppPath"] + @"\pdf2vec.exe", "\"" + pdfInsert + "\" \"" + epsInsert + ".eps\""); processInfo.CreateNoWindow = true; processInfo.UseShellExecute = false; processInfo.RedirectStandardOutput = true; process = System.Diagnostics.Process.Start(processInfo); process.WaitForExit(10000); //10 seconds? using (StreamWriter writer = new StreamWriter(_appInfo.OutputPath + "test.txt")) { string line; while ((line = process.StandardOutput.ReadLine()) != null) { writer.WriteLine(line); } } int exitCode = process.ExitCode; process.Close(); } catch (Exception e) { NexdoxMessaging.SendMessage("ERROR - When converting eps file - " + e.Message, false, null); } //Ok, now we need to make sure that there is no showpage. Otherwise our output will create //an additional page. blast! NexdoxResourceManager.ImageList allImages = Statics.CentralResources.Images.FindAllMatchingNames(Path.GetFileNameWithoutExtension(pdfFile)); NexdoxResourceManager.ImageList updatedImages = new NexdoxResourceManager.ImageList(); foreach (string file in Directory.GetFiles(path, Path.GetFileNameWithoutExtension(pdfFile) + "*.eps")) { string fileNameWithoutExtension = Path.GetFileNameWithoutExtension(file); string newFileNameWithoutExtension = fileNameWithoutExtension.Replace("-", "_").Replace(" ", "_"); File.Move(path + fileNameWithoutExtension + ".eps", path + newFileNameWithoutExtension + ".temp"); using (StreamWriter writer = new StreamWriter(path + newFileNameWithoutExtension + ".eps", false)) { using (StreamReader reader = new StreamReader(path + newFileNameWithoutExtension + ".temp")) { string line; while ((line = reader.ReadLine()) != null) { if (line.Contains("showpage") || line.Contains("verydoc")) { continue; } writer.WriteLine(line); } } } NexdoxResourceManager.ImageList images = Statics.CentralResources.Images.FindAllMatchingNames(newFileNameWithoutExtension); if (images.Count > 1) { bool imageUploaded = false; foreach (NexdoxResourceManager.ImageResource image in images) { if (string.Compare(image.BaseName, newFileNameWithoutExtension, true) == 0) { UpdateImageInDAM(path, updatedImages, newFileNameWithoutExtension, image); imageUploaded = true; } } if (!imageUploaded) { AddNewImageToDAM(path, updatedImages, newFileNameWithoutExtension); } } else if (images.Count == 0) { AddNewImageToDAM(path, updatedImages, newFileNameWithoutExtension); } else { UpdateImageInDAM(path, updatedImages, newFileNameWithoutExtension, images[0]); } File.Delete(newFileNameWithoutExtension + ".temp"); } string errMsg = String.Empty; // DV - I'm not sure why this bit is here, it looks to duplicate what happens above, seemingly just // here to catch any issues //chekout RMC foreach (NexdoxResourceManager.ImageResource image in allImages) { if (!updatedImages.Contains(image)) { NexdoxResourceManager.CheckOutResult result = image.ParentResourceManager.ResourceManagerAsset.CheckOut(); if (result != NexdoxResourceManager.CheckOutResult.Success) { throw NexdoxMessaging.Exception( "Error checking out parent resource: " + image.ParentResourceManager.ResourceManagerAsset.Name + ". " + result.ToString(), this); } result = image.CheckOut(); if (result != NexdoxResourceManager.CheckOutResult.Success) { throw NexdoxMessaging.Exception( "Error checking out image resource: " + image.Name + ". " + result.ToString(), this); } image.Deleted = true; image.Save("", false); image.CheckIn(); image.ParentResourceManager.ResourceManagerAsset.Save("", false); image.ParentResourceManager.ResourceManagerAsset.CheckIn(); if (string.Compare(_appInfo["Region"], "Live", true) == 0) { image.SetCurrentRegionVersion("Live", image.VersionNo); image.SetCurrentRegionVersion("Test", image.VersionNo); image.ParentResourceManager.ResourceManagerAsset.SetCurrentRegionVersion( "Live", image.ParentResourceManager.ResourceManagerAsset.VersionNo); image.ParentResourceManager.ResourceManagerAsset.SetCurrentRegionVersion( "Test", image.ParentResourceManager.ResourceManagerAsset.VersionNo); } else if (string.Compare(_appInfo["Region"], "Test", true) == 0) { image.SetCurrentRegionVersion("Test", image.VersionNo); image.ParentResourceManager.ResourceManagerAsset.SetCurrentRegionVersion( "Test", image.ParentResourceManager.ResourceManagerAsset.VersionNo); } //Always into Dev image.SetCurrentRegionVersion("Dev", image.VersionNo); image.ParentResourceManager.ResourceManagerAsset.SetCurrentRegionVersion( "Dev", image.ParentResourceManager.ResourceManagerAsset.VersionNo); } } }