/// <summary> /// Archive database files as zip /// </summary> /// <returns>true when successfull, false when failed</returns> public bool Archive(string zipFilePath, IProcessingCallback callback) { // removing existing if (File.Exists(zipFilePath)) { callback?.Info(string.Format("Deleting existing file {0}", zipFilePath)); try { File.Delete(zipFilePath); } catch (Exception ex) { callback?.Error($"Failed to delete file {zipFilePath} with error {ex.Message}"); return(false); } } // open file stream + zip out stream FileStream fileStreamOut = new FileStream(zipFilePath, FileMode.CreateNew, FileAccess.Write); bool result = true; using (ZipOutputStream zipOutStream = new ZipOutputStream(fileStreamOut)) { // database file result = ArchiveFile(DBFilePath, zipOutStream, callback); if (result) { // document directory foreach (string filePath in Directory.GetFiles(RepositoryPath)) { if (!(result = ArchiveFile(filePath, zipOutStream, callback))) { break; } } } } // closing fileStreamOut.Close(); return(result); }
/// <summary> /// Add zip entry to opened ZipOutStream /// </summary> /// <returns>true if successfull, false if failed</returns> private bool ArchiveFile(string filePath, ZipOutputStream zipOutStream, IProcessingCallback callback) { try { callback?.Info(string.Format("Adding zip entry {0}...", Path.GetFileName(filePath))); using (FileStream fileStreamIn = new FileStream(filePath, FileMode.Open, FileAccess.Read)) { int size = BUFFER_SIZE; byte[] buffer = new byte[size]; ZipEntry entry = new ZipEntry(Path.GetFileName(filePath)); zipOutStream.PutNextEntry(entry); do { size = fileStreamIn.Read(buffer, 0, buffer.Length); zipOutStream.Write(buffer, 0, size); }while (size > 0); } } catch (Exception ex) { callback?.Error($"Zipping file {filePath} failed with error {ex.Message}"); return(false); } return(true); // success }
public static bool BackupBranch(List<string> nodePath, string zipFilePath, IProcessingCallback callback) { // remove zip path if it already exists if (System.IO.File.Exists(zipFilePath)) System.IO.File.Delete(zipFilePath); // build destination database path DBDescriptor dbDescTo = DBDescriptor.CreateTemp(); { // build data contexts PPDataContext dbFrom = new PPDataContext(); using (PPDataContext dbTo = new PPDataContext(dbDescTo)) { // copy format table CopyCardboardFormats(dbFrom, dbTo); // copy cardboard profiles CopyCardboardProfiles(dbFrom, dbTo); // copy document types CopyDocumentTypes(dbFrom, dbTo); // copy branch nodes recursively TreeNode nodeFrom = TreeNode.GetNodeByPath(dbFrom, null, nodePath, 0); TreeNode nodeTo = TreeNode.GetNodeByPath(dbTo, null, nodePath, 0); ; CopyTreeNodesRecursively(dbFrom, dbTo, nodeFrom, nodeTo, callback); } GC.Collect(); } Thread.Sleep(1000); // archive temp database dbDescTo.Archive(zipFilePath, callback); return true; }
private static void FillFromArchive(DBDescriptor desc, string zipFilePath, IProcessingCallback callback) { FileStream fileStreamIn = new FileStream(zipFilePath, FileMode.Open, FileAccess.Read); using (ZipInputStream zipInStream = new ZipInputStream(fileStreamIn)) { ZipEntry entry; while ((entry = zipInStream.GetNextEntry()) != null) { bool isDB = string.Equals("PicParam.db", entry.Name, StringComparison.CurrentCultureIgnoreCase); if (null != callback) { callback.Info($"Extracting zip entry : {entry.Name}"); } string destFilePath = isDB ? desc.DBFilePath : Path.Combine(desc.RepositoryPath, entry.Name); if (!ExtractZipEntry(entry, zipInStream, destFilePath, callback)) { break; } } } fileStreamIn.Close(); }
public static void Overwrite(string zipFilePath, IProcessingCallback callback) { // check existence of zip archive if (!System.IO.File.Exists(zipFilePath)) { throw new FileNotFoundException(string.Format("File {0} not found", zipFilePath), zipFilePath); } // extract zip archive to temp DBDescriptor dbDescFrom = DBDescriptor.CreateTempFromArchive(zipFilePath, callback); PPDataContext dbFrom = new PPDataContext(dbDescFrom); TreeNode nodeFrom = TreeNode.GetRootNodes(dbFrom)[0]; PPDataContext dbTo = new PPDataContext(); TreeNode nodeTo = TreeNode.GetRootNodes(dbTo)[0]; // merge format table OverwriteCardboardFormats(dbFrom, dbTo, callback); // merge cardboard profiles OverwriteCardboardProfiles(dbFrom, dbTo, callback); // merge document types OverwriteDocumentTypes(dbFrom, dbTo, callback); // first clear existing documents ClearExistingDocumentsRecursively(dbFrom, nodeFrom, nodeTo, callback); // then merge using (PPDataContext dbTo1 = new PPDataContext()) { MergeTreeNodesRecursively(dbFrom, dbTo, nodeFrom, nodeTo, callback); } }
private void CopyTreeNodeRecursively(PPDataContext db, IProcessingCallback callback) { if (null != callback) { callback.Info("Tree nodes..."); } PLMPackSR.PLMPackServiceClient client = new PLMPackSR.PLMPackServiceClient(); client.ClientCredentials.UserName.UserName = UserName; client.ClientCredentials.UserName.Password = Password; List <TreeNode> rootNodes = TreeNode.GetRootNodes(db); string offset = string.Empty; PLMPackSR.DCTreeNode userRootNode = client.GetUserRootNode(); client.Close(); foreach (TreeNode root in rootNodes) { List <TreeNode> rootChildrens = root.Childrens(db); foreach (TreeNode tn in rootChildrens) { RecursiveInsert(db, tn, userRootNode, string.Empty, callback); } } }
public void test(IDocument doc, IProcessingCallback Processing) { Processing.ReportMessage("Setting fc_Predefined:InvoicePredefinedVendorId from import folder..."); IPage page = doc.Pages[0]; string path = page.ImageSource; string pattern = @"\[(.*?)\]"; Match m = Regex.Match(path, pattern, RegexOptions.IgnoreCase); Processing.ReportMessage("Testing RegEx..."); if (m.Success) { string BUid = m.Groups[1].Value; Processing.ReportMessage("Regex succesfull, setting " + BUid + " as the BU ID!"); doc.Properties.Set("fc_Predefined:InvoicePredefinedVendorId", BUid); } else { Processing.ReportWarning("Could not detect BU ID :-("); } }
public TreeNodeVisitorCollectParameterNames(IProcessingCallback callback) { _callback = callback; // instantiate component loader _compLoader = new Pic.Plugin.ComponentLoader(); _compLoader.SearchMethod = new ComponentSearchMethodDB(); }
public override void Execute(IProcessingCallback callback) { using (TreeProcessor tnProcessor = new TreeProcessor()) { tnProcessor.ProcessVisitor(new TreeNodeVisitorCollectParameterNames(callback)); } }
public static bool BackupBranch(List <string> nodePath, string zipFilePath, IProcessingCallback callback) { // remove zip path if it already exists if (System.IO.File.Exists(zipFilePath)) { System.IO.File.Delete(zipFilePath); } // build destination database path DBDescriptor dbDescTo = DBDescriptor.CreateTemp(); { // build data contexts PPDataContext dbFrom = new PPDataContext(); using (PPDataContext dbTo = new PPDataContext(dbDescTo)) { // copy format table CopyCardboardFormats(dbFrom, dbTo); // copy cardboard profiles CopyCardboardProfiles(dbFrom, dbTo); // copy document types CopyDocumentTypes(dbFrom, dbTo); // copy branch nodes recursively TreeNode nodeFrom = TreeNode.GetNodeByPath(dbFrom, null, nodePath, 0); TreeNode nodeTo = TreeNode.GetNodeByPath(dbTo, null, nodePath, 0);; CopyTreeNodesRecursively(dbFrom, dbTo, nodeFrom, nodeTo, callback); } GC.Collect(); } Thread.Sleep(1000); // archive temp database dbDescTo.Archive(zipFilePath, callback); return(true); }
public Processor(string ApplicationId, string password, IProcessingCallback proc) { this.Processing = proc; Processing.ReportMessage("Starting Receipt processing!"); restClient = new RestServiceClient(); restClient.Proxy.Credentials = CredentialCache.DefaultCredentials; //!!! Please provide your application id and password in Config.txt // To create an application and obtain a password, // register at http://cloud.ocrsdk.com/Account/Register // More info on getting your application id and password at // http://ocrsdk.com/documentation/faq/#faq3 // Name of application you created restClient.ApplicationId = ApplicationId; // Password should be sent to your e-mail after application was created restClient.Password = password; // Display hint to provide credentials if (String.IsNullOrEmpty(restClient.ApplicationId) || String.IsNullOrEmpty(restClient.Password)) { Processing.ReportError("Please provide access credentials to Cloud OCR SDK service!"); throw new Exception("Please provide access credentials to Cloud OCR SDK service!"); } Console.WriteLine(String.Format("Application id: {0}\n", restClient.ApplicationId)); }
/// <summary> /// Create under new temp directory and fill with archive content /// </summary> /// <returns>created DBDescriptor</returns> public static DBDescriptor CreateTempFromArchive(string zipFilePath, IProcessingCallback callback) { DBDescriptor desc = CreateTemp(false); FillFromArchive(desc, zipFilePath, callback); return(desc); }
private void Upload(string filePath, Guid g, IProcessingCallback callback) { if (null != callback) { callback.Info(string.Format(_actuallyUpload ? "Uploading {0}..." : "Not actually uploading {0}...", Path.GetFileName(filePath))); } FileTransferUtility.UploadFile(filePath, g); }
private PLMPackSR.DCFile Upload(string filePath, IProcessingCallback callback, PLMPackSR.PLMPackServiceClient client) { if (null != callback) { callback.Info(string.Format(_actuallyUpload ? "Uploading {0}..." : "Not actually uploading {0}...", Path.GetFileName(filePath))); } return(client.CreateNewFile( _actuallyUpload ? FileTransferUtility.UploadFile(filePath) : Guid.NewGuid() , Path.GetExtension(filePath) )); }
/// <summary> /// Method to be executed by worker thread /// </summary> public override void Execute(IProcessingCallback callback) { try { BackupRestore.BackupBranch(_treeNodePath, _zipFilePath, callback); } catch (Exception ex) { if (null != callback) callback.Error(ex.Message); } }
/// <summary> /// Method to be executed by worker thread /// </summary> public override void Execute(IProcessingCallback callback) { try { BackupRestore.BackupBranch(_treeNodePath, _zipFilePath, callback); } catch (Exception ex) { if (null != callback) { callback.Error(ex.Message); } } }
/// <summary> /// Creates a backup of the database /// </summary> public static bool BackupFull(string zipFilePath, IProcessingCallback callback) { try { // build "Root" node path List <string> nodePathRoot = new List <string>(); nodePathRoot.Add("Root"); // backup branch "Root" BackupBranch(nodePathRoot, zipFilePath, callback); } catch (Exception ex) { _log.Error(ex.Message); return(false); } return(true); }
public static void ClearDatabase(IProcessingCallback callback) { string databaseFile = ApplicationConfiguration.CustomSection.DatabasePath; // other files Pic.DAL.SQLite.PPDataContext db = new Pic.DAL.SQLite.PPDataContext(); // get root node List <TreeNode> rootNodes = TreeNode.GetRootNodes(db); TreeNode rootNode = rootNodes[0]; // delete all childs of root node foreach (Pic.DAL.SQLite.TreeNode tn in rootNode.Childrens(db)) { tn.Delete(db, true, callback); } db.SubmitChanges(); }
/// <summary> /// Creates a backup of the database /// </summary> public static bool BackupFull(string zipFilePath, IProcessingCallback callback) { try { // build "Root" node path List<string> nodePathRoot = new List<string>(); nodePathRoot.Add("Root"); // backup branch "Root" BackupBranch(nodePathRoot, zipFilePath, callback); } catch (Exception ex) { _log.Error(ex.Message); return false; } return true; }
private void ProcessTask(object status) { IProcessingCallback callback = status as IProcessingCallback; callback.Begin(); try { _task.Execute(callback); } catch (Exception ex) { if (null != callback) { callback.Error(string.Format("{0} failed with error: {1}", _task.Title, ex.Message)); } } // ending callback.End(); }
/// <summary> /// restores a backup database : callback version /// </summary> public static bool Restore(string zipFilePath, IProcessingCallback callback) { try { // clear existing directories DBDescriptor dbDescTo = DBDescriptor.Current; if (!dbDescTo.Clear()) { if (null != callback) { callback.Error("Failed to clear current database!"); } return(false); } // extract new database DBDescriptor dbDescFrom = DBDescriptor.CreateTempFromArchive(zipFilePath, callback); // build data contexts PPDataContext dbFrom = new PPDataContext(dbDescFrom); PPDataContext dbTo = new PPDataContext(dbDescTo); // copy format table CopyCardboardFormats(dbFrom, dbTo); // copy cardboard profiles CopyCardboardProfiles(dbFrom, dbTo); // copy document types CopyDocumentTypes(dbFrom, dbTo); // copy branch nodes recursively TreeNode nodeFrom = TreeNode.GetRootNodes(dbFrom)[0]; TreeNode nodeTo = TreeNode.GetRootNodes(dbTo)[0];; CopyTreeNodesRecursively(dbFrom, dbTo, nodeFrom, nodeTo, callback); GC.Collect(); } catch (Exception ex) { if (null != callback) { callback.Error(ex.Message); } _log.Error(ex.ToString()); return(false); } return(true); }
private void ExtractZip(object status) { IProcessingCallback callback = status as IProcessingCallback; callback.Begin(); string sOpName = string.Empty; try { switch (MergeMode) { case Mode.Mode_Overwrite: sOpName = "Updating with file "; BackupRestore.Overwrite( LocalLibraryFile , this); break; case Mode.Mode_Merge: sOpName = "Merging with file "; BackupRestore.Merge( LocalLibraryFile , this); break; default: Debug.Assert(false); break; } } catch (Exception ex) { if (null != callback) { callback.Error(string.Format("{0} {1} failed with error: {2}" , sOpName, LocalLibraryFile, ex.Message)); } } // ending callback.End(); }
public Export(IDocument document, IProcessingCallback callback) { if (callback != null) { log = callback; } else { throw new ArgumentNullException("callback", "The IProcessingCallback can not be empty."); } if (document != null) { doc = document; } else { log.ReportError("The IDocument can not be empty."); throw new ArgumentNullException("document", "The IDocument can not be empty."); } }
public static bool BackupBranch(List <string> nodePath, string zipFilePath, IProcessingCallback callback) { // remove zip path if it already exists if (System.IO.File.Exists(zipFilePath)) { System.IO.File.Delete(zipFilePath); } // instantiate data context of current database PPDataContext dbFrom = new PPDataContext(); // get node from TreeNode nodeFrom = TreeNode.GetNodeByPath(dbFrom, null, nodePath, 0); // build list of profiles referred by branch components List <string> profileNames = new List <string>(); BuildListOfUsedCardboardProfiles(dbFrom, nodeFrom, ref profileNames); // build destination database path DBDescriptor dbDescTo = DBDescriptor.CreateTemp(); { // build data context using (PPDataContext dbTo = new PPDataContext(dbDescTo)) { // copy cardboard profiles MergeCardboardProfiles(dbFrom, dbTo, profileNames, callback); // copy document types CopyDocumentTypes(dbFrom, dbTo); // copy branch nodes recursively TreeNode nodeTo = TreeNode.GetNodeByPath(dbTo, null, nodePath, 0);; CopyTreeNodesRecursively(dbFrom, dbTo, nodeFrom, nodeTo, callback); } GC.Collect(); } Thread.Sleep(1000); // archive temp database dbDescTo.Archive(zipFilePath, callback); return(true); }
/// <summary> /// restores a backup database : callback version /// </summary> public static bool Restore(string zipFilePath, IProcessingCallback callback) { try { // clear existing directories DBDescriptor dbDescTo = DBDescriptor.Current; if (!dbDescTo.Clear()) { if (null != callback) callback.Error("Failed to clear current database!"); return false; } // extract new database DBDescriptor dbDescFrom = DBDescriptor.CreateTempFromArchive(zipFilePath, callback); // build data contexts PPDataContext dbFrom = new PPDataContext(dbDescFrom); PPDataContext dbTo = new PPDataContext(dbDescTo); // copy format table CopyCardboardFormats(dbFrom, dbTo); // copy cardboard profiles CopyCardboardProfiles(dbFrom, dbTo); // copy document types CopyDocumentTypes(dbFrom, dbTo); // copy branch nodes recursively TreeNode nodeFrom = TreeNode.GetRootNodes(dbFrom)[0]; TreeNode nodeTo = TreeNode.GetRootNodes(dbTo)[0]; ; CopyTreeNodesRecursively(dbFrom, dbTo, nodeFrom, nodeTo, callback); GC.Collect(); } catch (Exception ex) { if (null != callback) callback.Error(ex.Message); _log.Error(ex.ToString()); return false; } return true; }
private void CopyCardboardFormat(PPDataContext db, IProcessingCallback callback) { if (null != callback) { callback.Info("Cardboard formats..."); } PLMPackSR.PLMPackServiceClient client = new PLMPackSR.PLMPackServiceClient(); client.ClientCredentials.UserName.UserName = UserName; client.ClientCredentials.UserName.Password = Password; foreach (CardboardFormat cf in db.CardboardFormats) { if (null != callback) { callback.Info(string.Format("Cardboard format {0} ( {1} * {2} )", cf.Name, cf.Length, cf.Width)); } if (!client.CardboardFormatExists(cf.Name)) { client.CreateNewCardboardFormat(cf.Name, cf.Description, cf.Length, cf.Width); } } client.Close(); }
private void CopyCardboardProfiles(PPDataContext db, IProcessingCallback callback) { if (null != callback) { callback.Info("Cardboard profiles..."); } PLMPackSR.PLMPackServiceClient client = new PLMPackSR.PLMPackServiceClient(); client.ClientCredentials.UserName.UserName = UserName; client.ClientCredentials.UserName.Password = Password; foreach (CardboardProfile cp in db.CardboardProfiles) { if (null != callback) { callback.Info(string.Format("Cardboard profile {0} ({1})", cp.Name, cp.Thickness)); } if (!client.CardboardProfileExists(cp.Name)) { client.CreateNewCardboardProfile(cp.Name, "", cp.Code, cp.Thickness); } } client.Close(); }
/// <summary> /// Method to be executed by worker thread /// </summary> public override void Execute(IProcessingCallback callback) { BackupRestore.Merge(_filePath, callback); }
/// <summary> /// Method to be executed by worker thread /// </summary> public override void Execute(IProcessingCallback callback) { BackupRestore.ClearDatabase(callback); }
public static void Overwrite(string zipFilePath, IProcessingCallback callback) { // check existence of zip archive if (!System.IO.File.Exists(zipFilePath)) throw new FileNotFoundException(string.Format("File {0} not found", zipFilePath), zipFilePath); // extract zip archive to temp DBDescriptor dbDescFrom = DBDescriptor.CreateTempFromArchive(zipFilePath, callback); PPDataContext dbFrom = new PPDataContext(dbDescFrom); TreeNode nodeFrom = TreeNode.GetRootNodes(dbFrom)[0]; PPDataContext dbTo = new PPDataContext(); TreeNode nodeTo = TreeNode.GetRootNodes(dbTo)[0]; // merge format table OverwriteCardboardFormats(dbFrom, dbTo, callback); // merge cardboard profiles OverwriteCardboardProfiles(dbFrom, dbTo, callback); // merge document types OverwriteDocumentTypes(dbFrom, dbTo, callback); // first clear existing documents ClearExistingDocumentsRecursively(dbFrom, nodeFrom, nodeTo, callback); // then merge using (PPDataContext dbTo1 = new PPDataContext()) { MergeTreeNodesRecursively(dbFrom, dbTo, nodeFrom, nodeTo, callback); } }
public static void MergeDocumentTypes(PPDataContext dbFrom, PPDataContext dbTo, IProcessingCallback callback) { foreach (DocumentType dt in dbFrom.DocumentTypes) { if (DocumentType.HasByName(dbTo, dt.Name)) { if (null != callback) callback.Info(string.Format("Document type {0} already exists. Skipping...", dt.Name)); } else { if (null != callback) callback.Info(string.Format("Creating document type {0}...", dt.Name)); DocumentType.CreateNew(dbTo, dt.Name, dt.Description, dt.Application); } } }
private void RecursiveInsert(PPDataContext db, TreeNode tn, PLMPackSR.DCTreeNode wsNode, string offset, IProcessingCallback callback) { PLMPackSR.PLMPackServiceClient client = new PLMPackSR.PLMPackServiceClient(); client.ClientCredentials.UserName.UserName = UserName; client.ClientCredentials.UserName.Password = Password; PLMPackSR.DCTreeNode wsNodeChild = null; string docType = string.Empty; try { // create node thumbnail string thumbPath = tn.Thumbnail.File.PathWRepo(RepositoryPath); DCFile thFile = Upload(thumbPath, callback, client); PLMPackSR.DCThumbnail wsThumbnail = client.CreateNewThumbnailFromFile(thFile); if (tn.IsDocument) { // get document Document doc = tn.Documents(db)[0]; string docPath = doc.File.PathWRepo(RepositoryPath); // upload document PLMPackSR.DCFile wsDocFile = Upload(docPath, callback, client); if (tn.IsComponent) { docType = "COMPONENT"; Component comp = doc.Components[0]; // get majorations List <PLMPackSR.DCMajorationSet> majorationSets = new List <PLMPackSR.DCMajorationSet>(); foreach (MajorationSet majoSet in comp.MajorationSets) { DCCardboardProfile cbProfile = client.GetCardboardProfileByName(majoSet.CardboardProfile.Name); string sMajo = string.Empty; List <DCMajoration> dcMajorationList = new List <DCMajoration>(); foreach (Majoration majo in majoSet.Majorations) { sMajo += string.Format("({0}={1})", majo.Name, majo.Value); dcMajorationList.Add(new DCMajoration() { Name = majo.Name, Value = majo.Value }); } majorationSets.Add( new DCMajorationSet() { Profile = cbProfile, Majorations = dcMajorationList.ToArray() } ); if (null != callback) { callback.Info(string.Format("{0} - {1}", majoSet.CardboardProfile.Name, sMajo)); } } // get default parameter values List <PLMPackSR.DCParamDefaultValue> paramDefaultValues = new List <PLMPackSR.DCParamDefaultValue>(); foreach (ParamDefaultValue pdv in comp.ParamDefaultValues) { paramDefaultValues.Add(new DCParamDefaultValue() { Name = pdv.Name, Value = pdv.Value }); } PLMPackSR.DCTreeNode wsNodeComp = client.CreateNewNodeComponent( wsNode, tn.Name, tn.Description , wsThumbnail, wsDocFile, doc.Components[0].Guid , majorationSets.ToArray(), paramDefaultValues.ToArray()); client.ShareEveryone(wsNodeComp); } else { docType = "DOCUMENT"; PLMPackSR.DCTreeNode wsNodeDocument = client.CreateNewNodeDocument(wsNode, tn.Name, tn.Description , wsThumbnail, wsDocFile); client.ShareEveryone(wsNodeDocument); } } else { docType = "BRANCH"; wsNodeChild = client.CreateNewNodeBranch(wsNode, tn.Name, tn.Description, wsThumbnail); client.ShareEveryone(wsNodeChild); } client.Close(); } catch (Exception ex) { client.Abort(); if (null != callback) { callback.Error(ex.ToString()); } } if (null == wsNodeChild) { return; } if (null != callback) { callback.Info(string.Format("{0}-> {1} ({2})", offset, tn.Name, docType)); } offset += " "; foreach (TreeNode tnChild in tn.Childrens(db)) { RecursiveInsert(db, tnChild, wsNodeChild, offset, callback); } }
/// <summary> /// Method to be executed by worker thread /// </summary> public override void Execute(IProcessingCallback callback) { BackupRestore.BackupFull(_destFilePath, callback); }
private static bool ExtractZipEntry(ZipEntry entry, ZipInputStream zipInStream, string destFilePath, IProcessingCallback callback) { if (!entry.IsFile) return false; // prevent overwrite if (System.IO.File.Exists(destFilePath)) { if (null != callback) callback.Info(string.Format("{0} already exists : Skipping...", entry.Name)); return false; } // *** extract file : begin if (null != callback) callback.Info(string.Format("Extracting {0}...", entry.Name)); // instantiate output stream FileStream fileStreamOut = new FileStream(destFilePath, FileMode.Create, FileAccess.Write); int size = BUFFER_SIZE; byte[] buffer = new byte[size]; do { size = zipInStream.Read(buffer, 0, buffer.Length); fileStreamOut.Write(buffer, 0, size); } while (size > 0); fileStreamOut.Close(); // *** extract file : end return true; }
private static void FillFromArchive(DBDescriptor desc, string zipFilePath, IProcessingCallback callback) { FileStream fileStreamIn = new FileStream(zipFilePath, FileMode.Open, FileAccess.Read); using (ZipInputStream zipInStream = new ZipInputStream(fileStreamIn)) { ZipEntry entry; while ((entry = zipInStream.GetNextEntry()) != null) { bool isDB = string.Equals("PicParam.db", entry.Name, StringComparison.CurrentCultureIgnoreCase); if (null != callback) callback.Info(string.Format("Extracting zip entry {0}", entry.Name)); string destFilePath = isDB ? desc.DBFilePath : Path.Combine(desc.RepositoryPath, entry.Name); if (!ExtractZipEntry(entry, zipInStream, destFilePath, callback)) break; } } fileStreamIn.Close(); }
/// <summary> /// Create under new temp directory and fill with archive content /// </summary> /// <returns>created DBDescriptor</returns> public static DBDescriptor CreateTempFromArchive(string zipFilePath, IProcessingCallback callback) { DBDescriptor desc = DBDescriptor.CreateTemp(false); FillFromArchive(desc, zipFilePath, callback); return desc; }
/// <summary> /// Add zip entry to opened ZipOutStream /// </summary> /// <returns>true if successfull, false if failed</returns> private bool ArchiveFile(string filePath, ZipOutputStream zipOutStream, IProcessingCallback callback) { try { if (null != callback) callback.Info(string.Format("Adding zip entry {0}...", Path.GetFileName(filePath))); using (FileStream fileStreamIn = new FileStream(filePath, FileMode.Open, FileAccess.Read)) { int size = BUFFER_SIZE; byte[] buffer = new byte[size]; ZipEntry entry = new ZipEntry(Path.GetFileName(filePath)); zipOutStream.PutNextEntry(entry); do { size = fileStreamIn.Read(buffer, 0, buffer.Length); zipOutStream.Write(buffer, 0, size); } while (size > 0); } } catch (Exception ex) { if (null != callback) callback.Error(string.Format("Zipping file {0} failed with error {1}", filePath, ex.Message)); return false; } return true; // success }
/// <summary> /// Archive database files as zip /// </summary> /// <returns>true when successfull, false when failed</returns> public bool Archive(string zipFilePath, IProcessingCallback callback) { // removing existing if (File.Exists(zipFilePath)) { callback.Info(string.Format("Deleting existing file {0}", zipFilePath)); try { File.Delete(zipFilePath); } catch (Exception ex) { callback.Error(string.Format("Failed to delete file {0} with error {1}", zipFilePath, ex.Message)); return false; } } // open file stream + zip out stream FileStream fileStreamOut = new FileStream(zipFilePath, FileMode.CreateNew, FileAccess.Write); bool result = true; using (ZipOutputStream zipOutStream = new ZipOutputStream(fileStreamOut)) { // database file result = ArchiveFile(DBFilePath, zipOutStream, callback); if (result) { // document directory foreach (string filePath in Directory.GetFiles(_docDirPath)) { if (!(result = ArchiveFile(filePath, zipOutStream, callback))) break; } } } // closing fileStreamOut.Close(); return result; }
public static void OverwriteDocumentTypes(PPDataContext dbFrom, PPDataContext dbTo, IProcessingCallback callback) { foreach (DocumentType dt in dbFrom.DocumentTypes) { if (DocumentType.HasByName(dbTo, dt.Name)) { if (null != callback) callback.Info(string.Format("Updating document type {0} already exists...", dt.Name)); DocumentType docType = DocumentType.GetByName(dbTo, dt.Name); docType.Description = dt.Description; docType.Application = dt.Application; dbTo.SubmitChanges(); } else { if (null != callback) callback.Info(string.Format("Creating document type {0}...", dt.Name)); DocumentType.CreateNew(dbTo, dt.Name, dt.Description, dt.Application); } } }
public static void ClearExistingDocumentsRecursively(PPDataContext dbFrom, TreeNode nodeFrom, TreeNode nodeTo, IProcessingCallback callback) { if (null != callback && !nodeFrom.IsDocument) callback.Info(string.Format("Processing branch {0}", nodeFrom.Name)); // get thumbnail path of node to insert string thumbnailPath = nodeFrom.Thumbnail.File.Path(dbFrom); // handle childrens foreach (TreeNode childFrom in nodeFrom.Childrens(dbFrom)) { // get thumbnail of node to insert thumbnailPath = childFrom.Thumbnail.File.Path(dbFrom); if (childFrom.IsDocument) { Document docFrom = childFrom.Documents(dbFrom)[0]; string docTypeName = docFrom.DocumentType.Name; // delete existing document // will be using new data context each time a tree node is deleted // in order to avoid exceptions claiming that there is a foreign key violation using (PPDataContext dbTo0 = new PPDataContext()) { if (nodeTo.HasChild(dbTo0, childFrom.Name)) { string documentName = childFrom.Name; TreeNode childTo = nodeTo.GetChild(dbTo0, documentName); if (null != childTo && childTo.IsDocument) { try { if (null != callback) callback.Info(string.Format("Deleting tree node {0} ...", childTo.Name)); childTo.Delete(dbTo0, true, callback); dbTo0.SubmitChanges(); } catch (Exception ex) { callback.Error(string.Format("Deleting document {0} failed with exception {1}", documentName, ex.Message)); } } } } } else // childFrom.IsDocument { using (PPDataContext dbTo2 = new PPDataContext()) { TreeNode childTo = null; if (nodeTo.HasChild(dbTo2, childFrom.Name)) { if (null != callback) callback.Info(string.Format("Branch {0} already exists.Skipping...", childFrom.Name)); childTo = nodeTo.GetChild(dbTo2, childFrom.Name); ClearExistingDocumentsRecursively(dbFrom, childFrom, childTo, callback); } } } } }
public void Upload(IProcessingCallback callback) { // check that database actually exist if (!System.IO.File.Exists(_dbPath)) { if (null != callback) { callback.Error(string.Format("Input database path ({0}) could not be found.", _dbPath)); } return; } // begin if (null != callback) { callback.Begin(); } // connect PLMPackSR.PLMPackServiceClient client = new PLMPackSR.PLMPackServiceClient(); client.ClientCredentials.UserName.UserName = UserName; client.ClientCredentials.UserName.Password = Password; PLMPackSR.DCUser user = client.Connect(); if (user != null) { if (null != callback) { callback.Info(string.Format("Connection successful: {0}", user.Email)); } } else { if (null != callback) { callback.Info(string.Format("Failed to connect with user credentials ({0} + {1})", UserName, Password)); } return; } // ### upload default thumbnails Dictionary <string, string> defNameDict = new Dictionary <string, string>() { { "AI", "Ai.png" }, { "ARD", "ARD.png" }, { "CALC", "Calc.png" }, { "CCF2", "CFF2.png" }, { "DXF", "DXF.png" }, { "EPS", "EPS.png" }, { "EXCEL", "Excel.png" }, { "FOLDER", "Folder.png" }, { "IMAGE", "Image.png" }, { "PDF", "pdf.png" }, { "DES3", "Picador3D.png" }, { "DES", "PicGEOM.png" }, { "PPT", "Powerpoint.png" }, { "WORD", "Word.png" }, { "WRITER", "Writer.png" } }; foreach (KeyValuePair <string, string> entry in defNameDict) { string filePath = Path.Combine(RepositoryThumbnail, entry.Value); Guid fileGuid = client.UploadDefault(entry.Key, Path.GetExtension(filePath).Trim('.')); Upload(filePath, fileGuid, callback); } client.Close(); // ### upload default thumbnails PPDataContext db = new PPDataContext(_dbPath, RepositoryPath); CopyCardboardFormat(db, callback); CopyCardboardProfiles(db, callback); CopyTreeNodeRecursively(db, callback); // end if (null != callback) { callback.End(); } }
public static void MergeTreeNodesRecursively(PPDataContext dbFrom, PPDataContext dbTo, TreeNode nodeFrom, TreeNode nodeTo, IProcessingCallback callback) { if (null != callback && !nodeFrom.IsDocument) callback.Info(string.Format("Processing branch {0}", nodeFrom.Name)); // get thumbnail path of node to insert string thumbnailPath = nodeFrom.Thumbnail.File.Path(dbFrom); // handle childrens foreach (TreeNode childFrom in nodeFrom.Childrens(dbFrom)) { // get thumbnail of node to insert thumbnailPath = childFrom.Thumbnail.File.Path(dbFrom); if (childFrom.IsDocument) { Document docFrom = childFrom.Documents(dbFrom)[0]; string docTypeName = docFrom.DocumentType.Name; if (nodeTo.HasChild(dbTo, childFrom.Name)) { if (null != callback) callback.Info(string.Format("Document {0} already exists...", childFrom.Name)); } else { if (string.Equals("Parametric component", docTypeName, StringComparison.CurrentCultureIgnoreCase)) { if (null != callback) callback.Info(string.Format("Parametric component {0} already exists...", childFrom.Name)); // insert as component Component compFrom = docFrom.Components[0]; Component compTo = Component.GetByGuid(dbTo, compFrom.Guid); if (null == compTo) { if (null != callback) callback.Info(string.Format("Inserting component {0}...", childFrom.Name)); compTo = nodeTo.InsertComponent(dbTo, docFrom.File.Path(dbFrom), compFrom.Guid, childFrom.Name, childFrom.Description, thumbnailPath); // parameter default values Dictionary<string, double> dictNameValues = compFrom.GetParamDefaultValues(); if (dictNameValues.Count > 0) { if (null != callback) { string sParameters = string.Empty; foreach (string defParamName in dictNameValues.Keys) { StringBuilder sb = new StringBuilder(); sb.Append(defParamName); sb.Append("="); sb.Append(dictNameValues[defParamName]); sb.Append(", "); sParameters += sb.ToString(); } sParameters.Trim(); sParameters.Trim(','); callback.Info(string.Format("Default parameter values : {0}", sParameters)); } compTo.InsertNewParamDefaultValues(dbTo, dictNameValues); } // majorations foreach (MajorationSet mjset in compFrom.MajorationSets) { // retrieve profile string profileName = mjset.CardboardProfile.Name; CardboardProfile profileTo = CardboardProfile.GetByName(dbTo, profileName); if (null == profileTo) { if (null != callback) callback.Error(string.Format("Failed to retrieve profile {0}", mjset.CardboardProfile.Name)); continue; } // get majorations Dictionary<string, double> majorations = new Dictionary<string, double>(); string sMajo = string.Format("prof = {0} -> ", profileName); foreach (Majoration mj in mjset.Majorations) { majorations.Add(mj.Name, mj.Value); sMajo += string.Format("{0}={1}, ", mj.Name, mj.Value); } // insert if (null != callback) callback.Info(sMajo); compTo.InsertNewMajorationSet(dbTo, profileTo.Name, majorations); } } else { if (null != callback) callback.Info(string.Format("Component with GUID {0} already exists...", compFrom.Guid)); } } else { if (null != callback) callback.Info(string.Format("Inserting document {0}...", childFrom.Name)); // insert as document nodeTo.InsertDocument(dbTo, docFrom.File.Path(dbFrom), childFrom.Name, childFrom.Description, docTypeName, thumbnailPath); } } } else { TreeNode childTo = null; if (nodeTo.HasChild(dbTo, childFrom.Name)) { if (null != callback) callback.Info(string.Format("Branch {0} already exists.Skipping...", childFrom.Name)); childTo = nodeTo.GetChild(dbTo, childFrom.Name); } else { if (null != callback) callback.Info(string.Format("Inserting branch {0}...", childFrom.Name)); childTo = nodeTo.CreateChild(dbTo, childFrom.Name, childFrom.Description, thumbnailPath); } MergeTreeNodesRecursively(dbFrom, dbTo, childFrom, childTo, callback); } } }
public static void ClearDatabase(IProcessingCallback callback) { string databaseFile = ApplicationConfiguration.CustomSection.DatabasePath; // other files Pic.DAL.SQLite.PPDataContext db = new Pic.DAL.SQLite.PPDataContext(); // get root node List<TreeNode> rootNodes = TreeNode.GetRootNodes(db); TreeNode rootNode = rootNodes[0]; // delete all childs of root node foreach (Pic.DAL.SQLite.TreeNode tn in rootNode.Childrens(db)) tn.Delete(db, true, callback); db.SubmitChanges(); }
public static void OverwriteCardboardFormats(PPDataContext dbFrom, PPDataContext dbTo, IProcessingCallback callback) { foreach (CardboardFormat cf in dbFrom.CardboardFormats) { if (CardboardFormat.HasByName(dbTo, cf.Name)) { if (null != callback) callback.Info(string.Format("Cardboard format {0} already exists. Skipping...", cf.Name)); CardboardFormat cardboardFormat = CardboardFormat.GetByName(dbTo, cf.Name); } else { if (null != callback) callback.Info(string.Format("Creating carboard format {0}...", cf.Name)); CardboardFormat.CreateNew(dbTo, cf.Name, cf.Description, cf.Length, cf.Width); } } }
public static void OverwriteCardboardProfiles(PPDataContext dbFrom, PPDataContext dbTo, IProcessingCallback callback) { foreach (CardboardProfile cp in dbFrom.CardboardProfiles) { if (CardboardProfile.HasByName(dbTo, cp.Name)) { if (null != callback) callback.Info(string.Format("Cardboard profile {0} already exists. Skipping...", cp.Name)); CardboardProfile cardboardProf = CardboardProfile.GetByName(dbTo, cp.Name); cardboardProf.Code = cp.Code; cardboardProf.Thickness = cp.Thickness; dbTo.SubmitChanges(); } else { if (null != callback) callback.Info(string.Format("Creating carboard profile {0}...", cp.Name)); CardboardProfile.CreateNew(dbTo, cp.Name, cp.Code, cp.Thickness); } } }
public abstract void Execute(IProcessingCallback callback);
private static bool ExtractZipEntry(ZipEntry entry, ZipInputStream zipInStream, string destFilePath, IProcessingCallback callback) { if (!entry.IsFile) { return(false); } // prevent overwrite if (File.Exists(destFilePath)) { if (null != callback) { callback.Info($"{entry.Name} already exists : Skipping..."); } return(false); } // *** extract file : begin if (null != callback) { callback.Info($"Extracting {entry.Name}..."); } // instantiate output stream FileStream fileStreamOut = new FileStream(destFilePath, FileMode.Create, FileAccess.Write); int size = BUFFER_SIZE; byte[] buffer = new byte[size]; do { size = zipInStream.Read(buffer, 0, buffer.Length); fileStreamOut.Write(buffer, 0, size); } while (size > 0); fileStreamOut.Close(); // *** extract file : end return(true); }