public PDFNode(string pdfDirectory, GEN_FILE doc) : base(doc) { Type = ResourceNodeType.PDFDoc; PathDoc = pdfDirectory + doc.File_Name; FileName = doc.File_Name; }
public XPSNode(String xpsDirectory, GEN_FILE doc) : base(doc) { Type = ResourceNodeType.XPSDoc; string fileName = Path.GetFileNameWithoutExtension(doc.File_Name); PathDoc = xpsDirectory + fileName + ".xps"; FileName = fileName; }
public static ExternalDocument ToExternalDocument(this GEN_FILE genFile) { var document = new ExternalDocument(); document.Data = genFile.Data; document.FileSize = genFile.File_Size; document.Name = genFile.Title; document.ShortName = genFile.Doc_Num; document.FileName = genFile.File_Name; return(document); }
public static GEN_FILE ToGenFile(this IExternalDocument document) { var genFile = new GEN_FILE(); genFile.Data = document.Data; genFile.Is_Uploaded = true; genFile.File_Size = document.FileSize; genFile.Title = document.Name; genFile.Name = document.ShortName; genFile.Doc_Num = document.ShortName; genFile.Short_Name = document.ShortName; genFile.File_Name = document.FileName; return(genFile); }
internal int LookupGenFileId(string fileName) { using (var db = new CSET_Context()) { GEN_FILE gf = (from h in db.GEN_FILE where h.File_Name == fileName orderby h.Gen_File_Id descending select h).FirstOrDefault(); if (gf == null) { return(0); } return(gf.Gen_File_Id); } }
public ResourceNode(GEN_FILE doc) : this() { ID = doc.Gen_File_Id; TreeTextNode = doc.Short_Name; if (doc.Title != null) { HeadingTitle = doc.Title; } if (doc.Doc_Num != null) { HeadingTitle2 = doc.Short_Name; } if (doc.Publish_Date != null) { DatePublished = String.Format("{0:ddd, MMM d, yyyy}", doc.Publish_Date); } if (doc.Summary != null) { HeadingText = doc.Summary; } }
public void CreateResourceLibraryData() { try { TopNodes = new ObservableCollection <ResourceNode>(); ResourceModelDictionary = new Dictionary <int, ResourceNode>(); Dictionary <int, ResourceNode> ResourceNodeDict = new Dictionary <int, ResourceNode>(); var query = dbContext.REF_LIBRARY_PATH .Include(x => x.GEN_FILE_LIB_PATH_CORL) .ThenInclude(x => x.Gen_File_); foreach (var obj in query.ToList()) { ResourceNode node = new NoneNode(); node.ID = Convert.ToInt32(obj.Lib_Path_Id); if (obj.Parent_Path_Id.HasValue) { node.ParentID = Convert.ToInt32(obj.Parent_Path_Id); } else { node.ParentID = -1; } node.TreeTextNode = obj.Path_Name; node.Type = ResourceNodeType.None; node.Nodes = new ObservableCollection <ResourceNode>(); List <ResourceNode> listItems = new List <ResourceNode>(); foreach (GEN_FILE_LIB_PATH_CORL corl in obj.GEN_FILE_LIB_PATH_CORL) { GEN_FILE doc = corl.Gen_File_; if (ResourceModelDictionary.ContainsKey(doc.Gen_File_Id)) //Check if node is already created { ResourceNode getNode = ResourceModelDictionary[doc.Gen_File_Id]; listItems.Add(getNode); } else if (doc.File_Type_Id == 31)//pdf { ResourceNode pdfNode = new PDFNode(pdfDirectory, doc); ResourceModelDictionary.Add(pdfNode.ID, pdfNode); listItems.Add(pdfNode); } else if (doc.File_Type_Id == 41)//docx { ResourceNode docxNode = new XPSNode(xpsDirectory, doc); ResourceModelDictionary.Add(docxNode.ID, docxNode); listItems.Add(docxNode); } else { Debug.Assert(false, "Invalid document type: " + doc.File_Type_.File_Type1); } } foreach (ResourceNode rn in listItems.OrderBy(x => x.TreeTextNode)) { node.Nodes.Add(rn); } ResourceNodeDict.Add(node.ID, node); } foreach (ResourceNode libDoc in ResourceNodeDict.Values) { if (libDoc.ParentID == -1) { TopNodes.Add(libDoc); } else { ResourceNode lib = ResourceNodeDict[libDoc.ParentID]; lib.Nodes.Add(libDoc); } } ResourceNode procTopicModel = new NoneNode("Cyber Security Procurement Language"); TopNodes.Add(procTopicModel); Dictionary <int, List <PROCUREMENTLANGUAGEDATA> > dictionaryProcurementLanguageData = new Dictionary <int, List <PROCUREMENTLANGUAGEDATA> >(); foreach (PROCUREMENT_LANGUAGE_DATA data in dbContext.PROCUREMENT_LANGUAGE_DATA.ToList()) { List <PROCUREMENTLANGUAGEDATA> list; if (!dictionaryProcurementLanguageData.TryGetValue(data.Parent_Heading_Id.Value, out list)) { list = new List <PROCUREMENTLANGUAGEDATA>(); dictionaryProcurementLanguageData[data.Parent_Heading_Id.Value] = list; } list.Add(TinyMapper.Map <PROCUREMENTLANGUAGEDATA>(data)); } foreach (PROCUREMENT_LANGUAGE_HEADINGS procHeading in dbContext.PROCUREMENT_LANGUAGE_HEADINGS.OrderBy(h => h.Heading_Num).ToList()) { ResourceNode procHeadingModel = new NoneNode(procHeading.Heading_Name); procTopicModel.Nodes.Add(procHeadingModel); List <PROCUREMENTLANGUAGEDATA> listItems = dictionaryProcurementLanguageData[procHeading.Id]; foreach (PROCUREMENTLANGUAGEDATA data in listItems.OrderBy(d => d.Section_Number)) { ResourceNode procModel = new ProcurementLanguageTopicNode(data); procHeadingModel.Nodes.Add(procModel); } } ResourceNode recCatTopicModel = new NoneNode("Catalog of Recommendations"); TopNodes.Add(recCatTopicModel); Dictionary <int, List <CATALOGRECOMMENDATIONSDATA> > dictionaryCatalogRecommendations = new Dictionary <int, List <CATALOGRECOMMENDATIONSDATA> >(); foreach (CATALOG_RECOMMENDATIONS_DATA data in dbContext.CATALOG_RECOMMENDATIONS_DATA.ToList()) { List <CATALOGRECOMMENDATIONSDATA> list; if (!dictionaryCatalogRecommendations.TryGetValue(data.Parent_Heading_Id.Value, out list)) { list = new List <CATALOGRECOMMENDATIONSDATA>(); dictionaryCatalogRecommendations[data.Parent_Heading_Id.Value] = list; } list.Add(TinyMapper.Map <CATALOGRECOMMENDATIONSDATA>(data)); } foreach (CATALOG_RECOMMENDATIONS_HEADINGS procHeading in dbContext.CATALOG_RECOMMENDATIONS_HEADINGS.OrderBy(h => h.Heading_Num).ToList()) { ResourceNode procHeadingModel = new NoneNode(procHeading.Heading_Name); recCatTopicModel.Nodes.Add(procHeadingModel); List <CATALOGRECOMMENDATIONSDATA> listItems = dictionaryCatalogRecommendations[procHeading.Id]; foreach (CATALOGRECOMMENDATIONSDATA data in listItems.OrderBy(d => d.Section_Short_Number)) { ResourceNode procModel = new CatalogRecommendationsTopicNode(data); procHeadingModel.Nodes.Add(procModel); } } } catch (Exception ex) { //CSET_Main.Common.CSETLogger.Fatal("An exception occurred in loading resource library.", ex); } }
public static Document ParseDocument(string filePath, GEN_FILE doc) { string author = null; string keywords = null; string summary = null; string text = null; try { PDFTextStripper stripper = new PDFTextStripper(); PDDocument document = PDDocument.load(filePath); text = stripper.getText(document); PDDocumentInformation info = document.getDocumentInformation(); author = info.getAuthor(); keywords = info.getKeywords(); summary = info.getSubject(); document.close(); } catch (Exception ex) { Debug.WriteLine("Exception in reading file: " + filePath + " ex: " + ex.Message); } Document lucDoc = new Document(); string filename = Path.GetFileNameWithoutExtension(doc.File_Name); string short_name = doc.Short_Name; string title = doc.Title; string header = doc.Summary; string doc_id = doc.Gen_File_Id.ToString(); Debug.WriteLine("DocID: " + doc_id); StringBuilder keyTextBuilder = new StringBuilder(); foreach (FILE_KEYWORDS keywordobj in doc.FILE_KEYWORDS.ToList()) { keyTextBuilder.Append(keywordobj.Keyword + " "); } string keyword = keyTextBuilder.ToString(); lucDoc.Add(new Field(FieldNames.FILE_NAME, filename, Field.Store.YES, Field.Index.ANALYZED)); if (author != null && author.Trim() != "") { lucDoc.Add(new Field(FieldNames.AUTHOR, author, Field.Store.YES, Field.Index.ANALYZED)); } if (keywords != null && keywords.Trim() != "") { lucDoc.Add(new Field(FieldNames.KEYWORDS, keywords, Field.Store.YES, Field.Index.ANALYZED)); } if (summary != null && summary.Trim() != "") { lucDoc.Add(new Field(FieldNames.SUMMARY, summary, Field.Store.YES, Field.Index.ANALYZED)); } lucDoc.Add(new Field(FieldNames.SHORT_NAME, short_name, Field.Store.YES, Field.Index.ANALYZED)); lucDoc.Add(new Field(FieldNames.TITLE, title, Field.Store.YES, Field.Index.ANALYZED)); if (!String.IsNullOrWhiteSpace(header)) { lucDoc.Add(new Field(FieldNames.HEADER, header, Field.Store.YES, Field.Index.ANALYZED)); } if (text != null && text.Trim() != "") { lucDoc.Add(new Field(FieldNames.TEXT, text, Field.Store.YES, Field.Index.ANALYZED)); } lucDoc.Add(new Field(FieldNames.DOC_ID, doc_id, Field.Store.YES, Field.Index.NO)); lucDoc.Add(new Field(FieldNames.RESOURCE_TYPE, ResourceTypeEnum.Resource_Doc.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); return(lucDoc); }
public static Lucene.Net.Documents.Document ParseDocument(string filePath, GEN_FILE resdoc_Entity) { StringBuilder textBuilder = new StringBuilder(); using (WordprocessingDocument doc = WordprocessingDocument.Open(filePath, false)) { OpenXmlElement root = doc.MainDocumentPart.Document; Body body = (Body)root.LogicalChildrenContent().First(); foreach (OpenXmlElement blockLevelContentElement in body.LogicalChildrenContent()) { if (blockLevelContentElement is Paragraph) { string pText = blockLevelContentElement .LogicalChildrenContent(typeof(Run)) .LogicalChildrenContent(typeof(Text)) .OfType <Text>() .Select(t => t.Text) .StringConcatenate(); textBuilder.Append(pText); textBuilder.Append(Environment.NewLine); } } } string text = textBuilder.ToString(); Lucene.Net.Documents.Document lucDoc = new Lucene.Net.Documents.Document(); string filename = Path.GetFileNameWithoutExtension(resdoc_Entity.File_Name); string author = null; StringBuilder keyTextBuilder = new StringBuilder(); foreach (FILE_KEYWORDS keywordobj in resdoc_Entity.FILE_KEYWORDS.ToList()) { keyTextBuilder.Append(keywordobj.Keyword + " "); } string keywords = keyTextBuilder.ToString(); string summary = null; string short_name = resdoc_Entity.Short_Name; string title = resdoc_Entity.Title; string header = resdoc_Entity.Summary; string doc_id = resdoc_Entity.Gen_File_Id.ToString(); Debug.WriteLine("DocID: " + doc_id); lucDoc.Add(new Field(FieldNames.FILE_NAME, filename, Field.Store.YES, Field.Index.ANALYZED)); if (author != null && author.Trim() != "") { lucDoc.Add(new Field(FieldNames.AUTHOR, author, Field.Store.YES, Field.Index.ANALYZED)); } if (keywords != null && keywords.Trim() != "") { lucDoc.Add(new Field(FieldNames.KEYWORDS, keywords, Field.Store.YES, Field.Index.ANALYZED)); } if (summary != null && summary.Trim() != "") { lucDoc.Add(new Field(FieldNames.SUMMARY, summary, Field.Store.YES, Field.Index.ANALYZED)); } lucDoc.Add(new Field(FieldNames.SHORT_NAME, short_name, Field.Store.YES, Field.Index.ANALYZED)); lucDoc.Add(new Field(FieldNames.TITLE, title, Field.Store.YES, Field.Index.ANALYZED)); lucDoc.Add(new Field(FieldNames.HEADER, header, Field.Store.YES, Field.Index.ANALYZED)); if (text != null && text.Trim() != "") { lucDoc.Add(new Field(FieldNames.TEXT, text, Field.Store.YES, Field.Index.ANALYZED)); } lucDoc.Add(new Field(FieldNames.RESOURCE_TYPE, ResourceTypeEnum.Resource_Doc.ToString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); lucDoc.Add(new Field(FieldNames.DOC_ID, doc_id, Field.Store.YES, Field.Index.NO)); return(lucDoc); }