public override bool Repair(bool geocachesOnly) { bool result = false; //repair _fileCollection = new FileCollection(PluginSettings.Instance.ActiveDataFile); if (File.Exists(_fileCollection.DatabaseInfoFilename)) { XmlDocument doc = new XmlDocument(); doc.Load(_fileCollection.DatabaseInfoFilename); XmlElement root = doc.DocumentElement; int storageVersion = int.Parse(root.SelectSingleNode("StorageVersion").InnerText); if (storageVersion == 1) { _uniqueCheckList = new Hashtable(); result = RepairV1(); } } //load geocaches if (result) { result = Open(geocachesOnly); } return result; }
/// <summary> /// Create extra commits for files that have been marked as "head-only". /// </summary> public void CreateHeadOnlyCommits(IEnumerable<string> headOnlyBranches, BranchStreamCollection streams, FileCollection allFiles) { var branches = SortBranches(headOnlyBranches, streams); var branchMerges = new Dictionary<string, string>(); if (branches.Any()) { m_log.DoubleRuleOff(); m_log.WriteLine("Creating head-only commits"); } using (m_log.Indent()) { foreach (var branch in branches) { // record where this branch will be merged to if (streams[branch].Predecessor != null) branchMerges[streams[branch].Predecessor.Branch] = branch; string branchMergeFrom; branchMerges.TryGetValue(branch, out branchMergeFrom); CreateHeadOnlyCommit(branch, streams, allFiles, branchMergeFrom); } } }
public bool Validate(FileCollection sourceFiles, Microsoft.SharePoint.Client.ClientContext ctx) { int scount = 0; int tcount = 0; foreach (var sf in sourceFiles) { scount++; string fileName = sf.Src; string folderName = sf.Folder; string fileUrl = UrlUtility.Combine(ctx.Web.ServerRelativeUrl, folderName + "/" + fileName); var file = ctx.Web.GetFileByServerRelativeUrl(UrlUtility.Combine(ctx.Web.ServerRelativeUrl, folderName + "/" + fileName)); ctx.Load(file, f => f.Exists, f => f.Length); ctx.ExecuteQuery(); if (file.Exists) { tcount++; #region File - Security if (sf.Security != null) { ctx.Load(file, f => f.ListItemAllFields); ctx.ExecuteQuery(); bool isSecurityMatch = ValidateSecurityCSOM(ctx, sf.Security, file.ListItemAllFields); if (!isSecurityMatch) { return false; } } #endregion #region Overwrite validation if (sf.Overwrite == false) { // lookup the original added file size...should be different from the one we retrieved from SharePoint since we opted to NOT overwrite var files = System.IO.Directory.GetFiles(@".\framework\functional\templates"); foreach (var f in files) { if (f.Contains(sf.Src)) { if (new System.IO.FileInfo(f).Length == file.Length) { return false; } } } } #endregion } else { return false; } } return true; }
/// <summary> /// Starts a new unity file. If the current unity file contains no files, this function has no effect, i.e. you will not get an empty unity file. /// </summary> public void EndCurrentUnityFile() { if (CurrentCollection.Files.Count == 0) return; UnityFiles.Add(CurrentCollection); CurrentCollection = new FileCollection(); }
public QueueDetailUI(ref FileCollection collection) { this.collection = collection; InitializeComponent(); // Set form title if(collection.name != null && !collection.name.Equals("")) this.Text = "Details for: " + collection.name; InitializeImages(); InitializeTree(); timer1.Start(); }
void IXmlSerializable.ReadXml(XmlReader reader) { if (null == reader) { throw new ArgumentNullException("reader"); } if (reader.IsEmptyElement) { reader.Read(); return; } var name = reader.Name; while (reader.Read()) { #if NET20 if (XmlReaderExtensionMethods.IsEndElement(reader, name)) #else if (reader.IsEndElement(name)) #endif { reader.Read(); break; } while (XmlNodeType.Element == reader.NodeType) { switch (reader.Name) { case "directories": #if NET20 Directories = XmlReaderExtensionMethods.Deserialize<DirectoryCollection>(reader); #else Directories = reader.Deserialize<DirectoryCollection>(); #endif break; case "files": #if NET20 Files = XmlReaderExtensionMethods.Deserialize<FileCollection>(reader); #else Files = reader.Deserialize<FileCollection>(); #endif break; } } } }
private bool Export(string filename, Core.Storage.Database db, List<Core.Data.Geocache> gcList) { bool result = false; try { using (FileCollection fileCollection = new FileCollection(filename)) { result = Save(fileCollection, db, gcList); } } catch (Exception e) { Core.ApplicationData.Instance.Logger.AddLog(this, e); } return result; }
public void opIndexer_string() { using (var temp = new TempFile()) { const string name = "example"; var expected = temp.Info.FullName; var obj = new FileCollection { new FileItem { Name = name, Value = temp.Info.FullName } }; var actual = obj[name].FullName; Assert.Equal(expected, actual); } }
public MyMeetingInfosManagement() { InitializeComponent(); Utility.DisplayGridToolBarButton(ToolBar, "OAMYMEETING", true); PARENT.Children.Add(loadbar); ToolBar.Visibility = Visibility.Visible; ToolBar.btnAudit.Visibility = Visibility.Collapsed; ToolBar.btnNew.Visibility = Visibility.Collapsed; ToolBar.btnEdit.Visibility = Visibility.Collapsed; ToolBar.btnDelete.Visibility = Visibility.Collapsed; ToolBar.cbxCheckState.Visibility = Visibility.Collapsed; ToolBar.stpCheckState.Visibility = Visibility.Collapsed; _files = new FileCollection(_customParams, _maxUpload); ToolBar.btnRefresh.Click += new RoutedEventHandler(btnRefresh_Click); ToolBar.BtnView.Click += new RoutedEventHandler(MeetingDetailBtn_Click); LoadMeetingInfos(); this.Loaded += new RoutedEventHandler(MyMeetingInfosManagement_Loaded); DaGr.SelectionChanged += new SelectionChangedEventHandler(DaGr_SelectionChanged); MeetingClient.GetMyMeetingInfosManagementCompleted += new EventHandler<GetMyMeetingInfosManagementCompletedEventArgs>(MeetingInfoClient_GetMyMeetingInfosManagementCompleted); ToolBar.ShowRect(); }
public Page(IDictionary<string, string> initParams) { InitializeComponent(); LoadConfiguration(initParams); _files = new FileCollection(_customParams, _maxUpload); HtmlPage.RegisterScriptableObject("Files", _files); HtmlPage.RegisterScriptableObject("Control", this); FileList.ItemsSource = _files; FilesCount.DataContext = _files; TotalProgress.DataContext = _files; PercentLabel.DataContext = _files; TotalKB.DataContext = _files; this.Loaded += new RoutedEventHandler(Page_Loaded); _files.CollectionChanged += new System.Collections.Specialized.NotifyCollectionChangedEventHandler(_files_CollectionChanged); _files.AllFilesFinished += new EventHandler(_files_AllFilesFinished); _files.TotalPercentageChanged += new EventHandler(_files_TotalPercentageChanged); }
/// <summary> /// 获得数据列表 /// </summary> /// <returns></returns> public static List<FileInfo> GetList() { string cacheKey = GetCacheKey(); //本实体已经注册成缓存实体,并且缓存存在的时候,直接从缓存取 if (CachedEntityCommander.IsTypeRegistered(typeof(FileInfo)) && CachedEntityCommander.GetCache(cacheKey) != null) { return CachedEntityCommander.GetCache(cacheKey) as List< FileInfo>; } else { List< FileInfo> list =new List< FileInfo>(); FileCollection collection=new FileCollection(); Query qry = new Query(File.Schema); collection.LoadAndCloseReader(qry.ExecuteReader()); foreach(File file in collection) { FileInfo fileInfo= new FileInfo(); LoadFromDAL(fileInfo,file); list.Add(fileInfo); } //生成缓存 if (CachedEntityCommander.IsTypeRegistered(typeof(FileInfo))) { CachedEntityCommander.SetCache(cacheKey, list); } return list; } }
protected override void CopyToNewMethod() { using (FileCollection fc = new FileCollection(_selectedCopyToFilename)) { copyToSave(fc); } }
private bool readFiles(FileCollection fc) { bool result = false; try { //todo: when version is not compatible anymore, do a check on storage version!! int lsize = sizeof(long); byte[] memBuffer = new byte[10 * 1024 * 1024]; using (MemoryStream ms = new MemoryStream(memBuffer)) using (BinaryReader br = new BinaryReader(ms)) using (Utils.ProgressBlock progress = new Utils.ProgressBlock(this, STR_LOADINGDATA, STR_LOADINGDATA, 1, 0)) { FileStream fs = fc._fsGeocaches; fs.Position = 0; long eof = fs.Length; while (fs.Position < eof) { RecordInfo ri = new RecordInfo(); ri.Offset = fs.Position; fs.Read(memBuffer, 0, lsize + 1); ms.Position = 0; ri.Length = br.ReadInt64(); if (memBuffer[lsize] == 0) { //free ri.FreeSlot = true; ri.ID = string.Concat("_", ri.Offset.ToString()); } else { //lazy loading ri.FreeSlot = false; int readCount = Math.Min(42, (int)(ri.Length - lsize - 1)); fs.Read(memBuffer, 0, readCount); ms.Position = 0; ri.ID = br.ReadString(); } fs.Position = ri.Offset + ri.Length; fc._geocachesInDB.Add(ri.ID, ri); } fs = fc._fsLogs; fs.Position = 0; eof = fs.Length; while (fs.Position < eof) { RecordInfo ri = new RecordInfo(); ri.Offset = fs.Position; fs.Read(memBuffer, 0, lsize + 1); ms.Position = 0; ri.Length = br.ReadInt64(); if (memBuffer[lsize] == 0) { //free ri.FreeSlot = true; ri.ID = string.Concat("_", ri.Offset.ToString()); } else { //lazy loading ri.FreeSlot = false; int readCount = Math.Min(32, (int)(ri.Length - lsize - 1)); fs.Read(memBuffer, 0, readCount); ms.Position = 0; ri.ID = br.ReadString(); } fs.Position = ri.Offset + ri.Length; fc._logsInDB.Add(ri.ID, ri); } using (fs = File.Open(fc.WaypointsFilename, FileMode.OpenOrCreate, FileAccess.Read)) { fs.Position = 0; eof = fs.Length; while (fs.Position < eof) { RecordInfo ri = new RecordInfo(); ri.Offset = fs.Position; fs.Read(memBuffer, 0, lsize + 1); ms.Position = 0; ri.Length = br.ReadInt64(); if (memBuffer[lsize] == 0) { //free ri.FreeSlot = true; ri.ID = string.Concat("_", ri.Offset.ToString()); } else { //lazy loading ri.FreeSlot = false; int readCount = Math.Min(32, (int)(ri.Length - lsize - 1)); fs.Read(memBuffer, 0, readCount); ms.Position = 0; ri.ID = br.ReadString(); } fs.Position = ri.Offset + ri.Length; fc._wptsInDB.Add(ri.ID, ri); } } using (fs = File.Open(fc.UserWaypointsFilename, FileMode.OpenOrCreate, FileAccess.Read)) { fs.Position = 0; eof = fs.Length; while (fs.Position < eof) { RecordInfo ri = new RecordInfo(); ri.Offset = fs.Position; fs.Read(memBuffer, 0, lsize + 1); ms.Position = 0; ri.Length = br.ReadInt64(); if (memBuffer[lsize] == 0) { //free ri.FreeSlot = true; ri.ID = string.Concat("_", ri.Offset.ToString()); } else { //lazy loading ri.FreeSlot = false; int readCount = Math.Min(32, (int)(ri.Length - lsize - 1)); fs.Read(memBuffer, 0, readCount); ms.Position = 0; ri.ID = br.ReadString(); } fs.Position = ri.Offset + ri.Length; fc._usrwptsInDB.Add(ri.ID, ri); } } using (fs = File.Open(fc.LogImagesFilename, FileMode.OpenOrCreate, FileAccess.Read)) { fs.Position = 0; eof = fs.Length; while (fs.Position < eof) { RecordInfo ri = new RecordInfo(); ri.Offset = fs.Position; fs.Read(memBuffer, 0, lsize + 1); ms.Position = 0; ri.Length = br.ReadInt64(); if (memBuffer[lsize] == 0) { //free ri.FreeSlot = true; ri.ID = string.Concat("_", ri.Offset.ToString()); } else { //lazy loading ri.FreeSlot = false; int readCount = Math.Min(100, (int)(ri.Length - lsize - 1)); fs.Read(memBuffer, 0, readCount); ms.Position = 0; ri.ID = br.ReadString(); } fs.Position = ri.Offset + ri.Length; fc._logimgsInDB.Add(ri.ID, ri); } } using (fs = File.Open(fc.GeocacheImagesFilename, FileMode.OpenOrCreate, FileAccess.Read)) { fs.Position = 0; eof = fs.Length; while (fs.Position < eof) { RecordInfo ri = new RecordInfo(); ri.Offset = fs.Position; fs.Read(memBuffer, 0, lsize + 1); ms.Position = 0; ri.Length = br.ReadInt64(); if (memBuffer[lsize] == 0) { //free ri.FreeSlot = true; ri.ID = string.Concat("_", ri.Offset.ToString()); } else { //lazy loading ri.FreeSlot = false; int readCount = Math.Min(64, (int)(ri.Length - lsize - 1)); fs.Read(memBuffer, 0, readCount); ms.Position = 0; ri.ID = br.ReadString(); } fs.Position = ri.Offset + ri.Length; fc._geocacheimgsInDB.Add(ri.ID, ri); } } } result = true; } catch { } return result; }
public void ProcessYamlTocWithFolderShouldSucceed() { var file1 = _fileCreator.CreateFile(string.Empty, FileType.MarkdownContent); var file2 = _fileCreator.CreateFile(string.Empty, FileType.MarkdownContent, "sub"); var subToc = _fileCreator.CreateFile($@" #[Topic]({Path.GetFileName(file2)}) ", FileType.MarkdownToc, "sub"); var content = $@" - name: Topic1 href: {file1} items: - name: Topic1.1 href: {file1} homepage: {file2} - name: Topic1.2 href: sub/ homepage: {file1} - name: Topic2 href: sub/ "; var toc = _fileCreator.CreateFile(content, FileType.YamlToc); FileCollection files = new FileCollection(_inputFolder); files.Add(DocumentType.Article, new[] { file1, file2, toc, subToc }); BuildDocument(files); var outputRawModelPath = Path.Combine(_outputFolder, Path.ChangeExtension(toc, RawModelFileExtension)); Assert.True(File.Exists(outputRawModelPath)); var model = JsonUtility.Deserialize <TocItemViewModel>(outputRawModelPath); var expectedModel = new TocItemViewModel { Items = new TocViewModel { new TocItemViewModel { Name = "Topic1", Href = file1, TopicHref = file1, Items = new TocViewModel { new TocItemViewModel { Name = "Topic1.1", Href = file1, // For relative file, href keeps unchanged Homepage = file2, // Homepage always keeps unchanged TopicHref = file2, }, new TocItemViewModel { Name = "Topic1.2", Href = file1, // For relative folder, href should be overwritten by homepage Homepage = file1, TopicHref = file1, TocHref = "sub/toc.md", } } }, new TocItemViewModel { Name = "Topic2", Href = file2, TopicHref = file2, TocHref = "sub/toc.md", } } }; AssertTocEqual(expectedModel, model); }
public void ProcessYamlTocWithTocHrefShouldSucceed() { var file1 = _fileCreator.CreateFile(string.Empty, FileType.MarkdownContent); var file2 = _fileCreator.CreateFile(string.Empty, FileType.MarkdownContent, "sub1/sub2"); var referencedToc = _fileCreator.CreateFile($@" - name: Topic href: {Path.GetFileName(file2)} ", FileType.YamlToc, "sub1/sub2"); var content = $@" - name: Topic1 tocHref: /Topic1/ topicHref: /Topic1/index.html items: - name: Topic1.1 tocHref: /Topic1.1/ topicHref: /Topic1.1/index.html - name: Topic1.2 tocHref: /Topic1.2/ topicHref: /Topic1.2/index.html - name: Topic2 tocHref: {referencedToc} topicHref: {file2} "; var toc = _fileCreator.CreateFile(content, FileType.YamlToc); FileCollection files = new FileCollection(_inputFolder); files.Add(DocumentType.Article, new[] { file1, file2, toc, referencedToc }); BuildDocument(files); var outputRawModelPath = Path.Combine(_outputFolder, Path.ChangeExtension(toc, RawModelFileExtension)); Assert.True(File.Exists(outputRawModelPath)); var model = JsonUtility.Deserialize <TocItemViewModel>(outputRawModelPath); var expectedModel = new TocItemViewModel { Items = new TocViewModel { new TocItemViewModel { Name = "Topic1", Href = "/Topic1/", TocHref = "/Topic1/", Homepage = "/Topic1/index.html", TopicHref = "/Topic1/index.html", Items = new TocViewModel { new TocItemViewModel { Name = "Topic1.1", Href = "/Topic1.1/", TocHref = "/Topic1.1/", Homepage = "/Topic1.1/index.html", TopicHref = "/Topic1.1/index.html", }, new TocItemViewModel { Name = "Topic1.2", Href = "/Topic1.2/", TocHref = "/Topic1.2/", Homepage = "/Topic1.2/index.html", TopicHref = "/Topic1.2/index.html", } } }, new TocItemViewModel { Name = "Topic2", TocHref = referencedToc, Href = referencedToc, TopicHref = file2, Homepage = file2, } } }; AssertTocEqual(expectedModel, model); }
private static ITagResolver ResolveBranches(IEnumerable<Commit> commits, FileCollection includedFiles) { ITagResolver branchResolver; var autoBranchResolver = new AutoBranchResolver(m_log, includedFiles) { PartialTagThreshold = m_config.PartialTagThreshold }; branchResolver = autoBranchResolver; // if we're matching branchpoints, resolve those tags first if (m_config.BranchpointRule != null) { var tagResolver = new TagResolver(m_log, includedFiles) { PartialTagThreshold = m_config.PartialTagThreshold }; var allBranches = includedFiles.SelectMany(f => f.AllBranches).Distinct(); var rule = m_config.BranchpointRule; var branchpointTags = allBranches.Where(b => rule.IsMatch(b)).Select(b => rule.Apply(b)); if (!tagResolver.Resolve(branchpointTags, commits)) { var unresolvedTags = tagResolver.UnresolvedTags.OrderBy(i => i); m_log.WriteLine("Unresolved branchpoint tags:"); using (m_log.Indent()) { foreach (var tag in unresolvedTags) m_log.WriteLine("{0}", tag); } } commits = tagResolver.Commits; branchResolver = new ManualBranchResolver(m_log, autoBranchResolver, tagResolver, m_config.BranchpointRule); } // resolve remaining branchpoints if (!branchResolver.Resolve(includedFiles.SelectMany(f => f.AllBranches).Distinct(), commits)) { var unresolvedTags = branchResolver.UnresolvedTags.OrderBy(i => i); m_log.WriteLine("Unresolved branches:"); using (m_log.Indent()) { foreach (var tag in unresolvedTags) m_log.WriteLine("{0}", tag); } throw new ImportFailedException(String.Format("Unable to resolve all branches to a single commit: {0}", branchResolver.UnresolvedTags.StringJoin(", "))); } return branchResolver; }
public static void CreateReport(string spUserName, string spUserPWD, string spListID, HtmlToPdf customerWeekly_html, HtmlToPdf customerWeekly_html_coverpage, string isDebugMode) { try { LogFile_CustomerWeekly.WriteBlankLine(); LogFile_CustomerWeekly.WriteLogMessage("=================================================================================================++++++++++"); LogFile_CustomerWeekly.WriteLogMessage("Starting PDF Generator for Customer Weekly Report for List ID: " + spListID.ToString()); //Get the list of records to process DataTable dt = GetDetailsList(spListID); if (dt.Rows.Count < 1) { string errMsg = "ERROR CreateReport: Unable to retrieve record from the DB for List ID: " + spListID.ToString(); LogFile_CustomerWeekly.WriteLogMessage(errMsg); PDF_Report_Generator.SaveErrorToDB(spListID, "LIST_ID", errMsg, 1, "CustomerWeeklyReport"); } foreach (DataRow row in dt.Rows) { int listItemID = int.Parse(row["list_id"].ToString()); string jobNumber = row["job_number"].ToString(); string jobGUID = row["jobGUID"].ToString(); LogFile_CustomerWeekly.WriteLogMessage("Sharepoint ListID: " + listItemID.ToString()); String siteUrl = "https://mySharePointLists.com/reports"; String listName = "Customer Weekly Report"; NetworkCredential credentials = new NetworkCredential(spUserName, spUserPWD, "test"); using (ClientContext clientContext = new ClientContext(siteUrl)) { LogFile_CustomerWeekly.WriteLogMessage("Started Attachment Download " + siteUrl); clientContext.Credentials = credentials; //Get the Site Collection Site oSite = clientContext.Site; clientContext.Load(oSite); clientContext.ExecuteQuery(); // Get the Web Web oWeb = clientContext.Web; clientContext.Load(oWeb); clientContext.ExecuteQuery(); CamlQuery query = new CamlQuery(); query.ViewXml = "<View><Query><Where><Geq><FieldRef Name='ID'/>" + "<Value Type='Number'>" + listItemID + "</Value></Geq></Where></Query><RowLimit>100</RowLimit></View>"; List oList = clientContext.Web.Lists.GetByTitle(listName); clientContext.Load(oList); clientContext.ExecuteQuery(); ListItemCollection items = oList.GetItems(query); clientContext.Load(items); clientContext.ExecuteQuery(); //create the header for each page WebClient wc_pdf_header = new WebClient(); string pdf_header = wc_pdf_header.DownloadString("http://mySiteAssest.com/HTML_Report_Templates/CustomerWeeklyReport/PDF_Header_Template.html"); StringBuilder sb_header = new StringBuilder(pdf_header); byte[] bannerImgAsByteArray; using (var webClient = new WebClient()) { bannerImgAsByteArray = webClient.DownloadData("http://mySiteAssest.com/Images/CreatePDFReport_banner.png"); } //replace the banner image placeholder sb_header.Replace("{banner_image}", photoURI(bannerImgAsByteArray)); //add the header html to all body pages customerWeekly_html.PrintOptions.Header = new HtmlHeaderFooter() { HtmlFragment = sb_header.ToString(), Height = 35 }; //create the footer for each page WebClient wc_pdf_footer = new WebClient(); string pdf_footer = wc_pdf_footer.DownloadString("http://mySiteAssest.com/HTML_Report_Templates/CustomerWeeklyReport/PDF_Footer_Template.html"); //replace the field placeholders StringBuilder sb_footer = new StringBuilder(pdf_footer); sb_footer.Replace("{job_number}", row["job_number"].ToString()); sb_footer.Replace("{project_name}", row["project_name"].ToString()); sb_footer.Replace("{location}", row["location"].ToString()); //add the footer html to all body pages customerWeekly_html.PrintOptions.Footer = new HtmlHeaderFooter() { HtmlFragment = sb_footer.ToString(), Height = 32 }; //Get the pdf coverpage template WebClient wc_pdf_coverpage = new WebClient(); string pdf_coverpage = wc_pdf_coverpage.DownloadString("http://mySiteAssest.com/HTML_Report_Templates/CustomerWeeklyReport/PDF_CoverPage_Template.html"); //create the background image for the cover page byte[] bgImgAsByteArray; using (var webClient = new WebClient()) { bgImgAsByteArray = webClient.DownloadData("http://mySiteAssest.com/Images/CreatePDFReport_Background_Fade.png"); } StringBuilder sb_coverpage = new StringBuilder(pdf_coverpage); sb_coverpage.Replace("{background_image}", photoURI(bgImgAsByteArray)); sb_coverpage.Replace("{cp_job_number}", row["job_number"].ToString()); sb_coverpage.Replace("{cp_project_name}", row["project_name"].ToString()); sb_coverpage.Replace("{cp_location}", row["location"].ToString()); sb_coverpage.Replace("{report_date}", row["week_ending"].ToString()); //add the header html to the coverpage customerWeekly_html_coverpage.PrintOptions.Header = new HtmlHeaderFooter() { HtmlFragment = sb_header.ToString(), Height = 35 }; //replace the coverpage footer placeholders - this page will not have any data in the footer StringBuilder sb_footer_coverpage = new StringBuilder(pdf_footer); sb_footer_coverpage.Replace("Job#: </strong>{job_number}", " "); sb_footer_coverpage.Replace("Project Name: </strong>{project_name}", " "); sb_footer_coverpage.Replace("Location: </strong>{location}", " "); //add the footer html to the coverpage customerWeekly_html_coverpage.PrintOptions.Footer = new HtmlHeaderFooter() { HtmlFragment = sb_footer_coverpage.ToString(), Height = 32 }; //Get the pdf body template WebClient wc_pdf_Body = new WebClient(); string pdf_body = wc_pdf_Body.DownloadString("http://mySiteAssest.com/HTML_Report_Templates/CustomerWeeklyReport/PDF_Body_Template.html"); StringBuilder sb_body = new StringBuilder(pdf_body); DataTable dtVP = GeNARContacts(); bool narContact1Flag = false; bool narContact2Flag = false; bool narContact3Flag = false; foreach (DataRow narContact in dtVP.Rows) { if (narContact["OrderBy"].ToString() == "1") { sb_body.Replace("{vp_name}", narContact["FullName"].ToString()); sb_body.Replace("{vp_title}", narContact["PositionIdName"].ToString()); sb_body.Replace("{vp_phone}", narContact["address1_telephone1"].ToString()); sb_body.Replace("{vp_email}", narContact["internalemailaddress"].ToString()); narContact1Flag = true; } if (narContact["OrderBy"].ToString() == "2") { sb_body.Replace("{sdr_name}", narContact["FullName"].ToString()); sb_body.Replace("{sdr_title}", narContact["PositionIdName"].ToString()); sb_body.Replace("{sdr_phone}", narContact["address1_telephone1"].ToString()); sb_body.Replace("{sdr_email}", narContact["internalemailaddress"].ToString()); narContact2Flag = true; } if (narContact["OrderBy"].ToString() == "3") { sb_body.Replace("{mdr_name}", narContact["FullName"].ToString()); sb_body.Replace("{mdr_title}", narContact["PositionIdName"].ToString()); sb_body.Replace("{mdr_phone}", narContact["address1_telephone1"].ToString()); sb_body.Replace("{mdr_email}", narContact["internalemailaddress"].ToString()); narContact3Flag = true; } } //clear out placeholders for the NAR contacts that we do not have if (!narContact1Flag) { sb_body.Replace("{vp_name}", ""); sb_body.Replace("{vp_title}", ""); sb_body.Replace("{vp_phone}", ""); sb_body.Replace("{vp_email}", ""); } if (!narContact2Flag) { sb_body.Replace("{sdr_name}", ""); sb_body.Replace("{sdr_title}", ""); sb_body.Replace("{sdr_phone}", ""); sb_body.Replace("{sdr_email}", ""); } if (!narContact3Flag) { sb_body.Replace("{mdr_name}", ""); sb_body.Replace("{mdr_title}", ""); sb_body.Replace("{mdr_phone}", ""); sb_body.Replace("{mdr_email}", ""); } //replace the field placeholders in the body sb_body.Replace("{pm_name}", row["pm_name"].ToString()); sb_body.Replace("{pm_phone}", row["pm_phone"].ToString()); sb_body.Replace("{pm_email}", row["pm_email"].ToString()); sb_body.Replace("{apm_name}", row["apm_name"].ToString()); sb_body.Replace("{apm_phone}", row["apm_phone"].ToString()); sb_body.Replace("{apm_email}", row["apm_email"].ToString()); sb_body.Replace("{job_type}", row["job_type"].ToString()); sb_body.Replace("{job_size}", row["job_size"].ToString()); sb_body.Replace("{est_completion_date}", row["est_completion_date"].ToString()); sb_body.Replace("{total_squares}", row["total_squares"].ToString()); sb_body.Replace("{percent_complete}", row["percent_complete"].ToString()); sb_body.Replace("{squares_installed}", row["squares_installed"].ToString()); sb_body.Replace("{total_squares}", row["total_squares"].ToString()); //Create the invdividual photo template WebClient wc_pdf_photopage = new WebClient(); string photopage_Template = wc_pdf_photopage.DownloadString("http://mySiteAssest.com/HTML_Report_Templates/CustomerWeeklyReport/PDF_PhotosPage_Template.html"); StringBuilder sp_photopage = new StringBuilder(photopage_Template); //Create the invdividual photo template WebClient wc_pdf_photo = new WebClient(); string photo_Template = wc_pdf_photo.DownloadString("http://mySiteAssest.com/HTML_Report_Templates/CustomerWeeklyReport/PDF_Photo_Template.html"); StringBuilder photoDivs = new StringBuilder(); bool hasImages = false; foreach (ListItem listItem in items) { if (Int32.Parse(listItem["ID"].ToString()) == listItemID) { string folderURL = oSite.Url + "/customerWeeklyRpts/Lists/" + listName + "/Attachments/" + listItem["ID"]; Folder folder = oWeb.GetFolderByServerRelativeUrl(folderURL); clientContext.Load(folder); try { clientContext.ExecuteQuery(); } catch (ServerException ex) { LogFile_CustomerWeekly.WriteLogMessage(ex.Message); LogFile_CustomerWeekly.WriteLogMessage("No Attachment for ID " + listItem["ID"].ToString()); LogFile_CustomerWeekly.WriteBlankLine(); LogFile_CustomerWeekly.WriteBlankLine(); } FileCollection attachments = folder.Files; clientContext.Load(attachments); clientContext.ExecuteQuery(); //Set the counts used to insert rows of images - 3 images per row int imageCnt = 0; int totalImgCnt = 0; string photo1 = ""; string photo2 = ""; //Loop through the photos attached to the sharepoint report foreach (Microsoft.SharePoint.Client.File oFile in folder.Files) { FileInfo fiPhoto = new FileInfo(oFile.Name); WebClient client1 = new WebClient(); client1.Credentials = credentials; LogFile_CustomerWeekly.WriteLogMessage("Downloading " + oFile.ServerRelativeUrl); if (IsImageExtension(fiPhoto.Extension)) { //convert the photo to a byte image and then resize it so that they are all uniform. byte[] fileContents = client1.DownloadData("https://mySharePointLists.com" + oFile.ServerRelativeUrl); byte[] resizedImage = CreateThumbnail(fileContents, 300); //Get the extension of the file - we can only use images on the PDF - jpg, jpeg, png, gif, bmp LogFile_CustomerWeekly.WriteLogMessage("Photo Extension " + fiPhoto.Extension); imageCnt++; totalImgCnt++; hasImages = true; //Add the image to the photo area template if (imageCnt == 2) { photoDivs.Append(photo_Template); //now convert the byte image to a DataUri stream so we can include it in the pdf photo1 = photoURI(resizedImage); photoDivs.Replace("{photo_number}", totalImgCnt.ToString()); photoDivs.Replace("{photo_source}", photo1); photoDivs.Append("</tr>"); //Reset our image variable for the next row imageCnt = 0; } else { photoDivs.Append("<tr>"); photoDivs.Append(photo_Template); //now convert the byte image to a DataUri stream so we can include it in the pdf photo2 = photoURI(resizedImage); photoDivs.Replace("{photo_number}", totalImgCnt.ToString()); photoDivs.Replace("{photo_source}", photo2); } } else { LogFile_CustomerWeekly.WriteLogMessage("Invalid Photo Extension " + fiPhoto.Extension); } } //Add the last row of images if (imageCnt < 2 && hasImages == true) { photoDivs.Append("</tr>"); } } } if (!hasImages) { //There was no valid image for this job so leave the photo are blank sb_body.Replace("{photos_page}", ""); } else { //There was at least one valid image for this job so insert the photopage template into the body sb_body.Replace("{photos_page}", sp_photopage.ToString()); //Then insert the photo template into the body template sb_body.Replace("{photos}", photoDivs.ToString()); } //Save the completed PDF document to the sharepoint doc location for the job bool spSave = PDF_Report_Generator.createCustomerWeeklyReport(sb_coverpage, customerWeekly_html_coverpage, sb_body, customerWeekly_html, jobNumber, jobGUID, spListID, "1", isDebugMode); var pdfFileName = "CustomerWeeklyReport_" + jobNumber + "_" + spListID.ToString() + "_" + DateTime.Now.ToString("yyyyMMdd") + ".pdf"; if (spSave) { LogFile_CustomerWeekly.WriteLogMessage("SUCCESS: File " + pdfFileName + " saved to sharepoint doc location for job #: " + jobNumber); } else { LogFile_CustomerWeekly.WriteLogMessage("ERROR: File " + pdfFileName + " was not saved to sharepoint doc location for job: " + jobNumber); } //for debugging only -we will not save the pdf to a file location //PDFDoc_Report.SaveAs(@"C:\Projects\Projects\Create_PDF_Reports\Reports\" + pdfFileName); LogFile_CustomerWeekly.WriteLogMessage("=================================================================================================++++++++++"); LogFile_CustomerWeekly.WriteBlankLine(); } } } catch (Exception e) { LogFile_CustomerWeekly.WriteLogMessage("ERROR CreateReport: Unable to create PDF report - " + e.Message); LogFile_CustomerWeekly.WriteBlankLine(); LogFile_CustomerWeekly.WriteBlankLine(); string errMsg = "ERROR CreateReport: Unable to create PDF report - " + e.Message; LogFile_CustomerWeekly.WriteLogMessage(errMsg); PDF_Report_Generator.SaveErrorToDB(spListID, "LIST_ID", errMsg, 1, "CustomerWeeklyReport"); } }
public static List <JObject> GetDocuments(ClientContext cc, FileCollection files, string foldername) { List <JObject> SharePointDocs = new List <JObject>(); for (int f = 0; f < files.Count; f++) { Microsoft.SharePoint.Client.File file = files[f]; ListItem item = file.ListItemAllFields; var json = new JObject(); json.Add(new JProperty("filename", file.Name)); if (foldername != null) { json.Add(new JProperty("folder", foldername)); } json.Add(new JProperty("uri", file.LinkingUri)); foreach (KeyValuePair <string, Object> field in item.FieldValues) { if (field.Value != null) { Regex rg = new Regex(@"Microsoft\.SharePoint\.Client\..*"); var match = rg.Match(field.Value.ToString()); //Check Taxfields if (match.Success && field.Value.ToString().Equals("Microsoft.SharePoint.Client.FieldUserValue")) { FieldUserValue fieldUserValue = field.Value as FieldUserValue; var jsonUser = new JObject(); jsonUser.Add(new JProperty("Email", fieldUserValue.Email)); jsonUser.Add(new JProperty("LookupId", fieldUserValue.LookupId)); jsonUser.Add(new JProperty("LookupValue", fieldUserValue.LookupValue)); json.Add(new JProperty(field.Key, jsonUser)); } else if (match.Success && field.Value.ToString().Equals("Microsoft.SharePoint.Client.FieldLookupValue")) { FieldLookupValue fieldLookupValue = field.Value as FieldLookupValue; var jsonfieldLookup = new JObject(); jsonfieldLookup.Add(new JProperty("LookupID", fieldLookupValue.LookupId)); jsonfieldLookup.Add(new JProperty("LookupValue", fieldLookupValue.LookupValue)); json.Add(new JProperty(field.Key, jsonfieldLookup)); } else if (match.Success && field.Value.ToString().Equals("Microsoft.SharePoint.Client.Taxonomy.TaxonomyFieldValue")) { TaxonomyFieldValue taxonomyFieldValue = field.Value as TaxonomyFieldValue; var jsonTaxField = new JObject(); jsonTaxField.Add(new JProperty("WssId", taxonomyFieldValue.WssId)); jsonTaxField.Add(new JProperty("TermGuid", taxonomyFieldValue.TermGuid)); jsonTaxField.Add(new JProperty("Label", taxonomyFieldValue.Label)); json.Add(new JProperty(field.Key, jsonTaxField)); } else { json.Add(new JProperty(field.Key, field.Value.ToString())); } } } SharePointDocs.Add(json); } return(SharePointDocs); }
static void Main(string[] args) { try { //Get site URL and credentials values from config Uri siteUri = new Uri(ConfigurationManager.AppSettings["SourceSite"].ToString()); //Connect to SharePoint Online using (ClientContext clientContext = new ClientContext(siteUri.ToString())) { SecureString passWord = new SecureString(); foreach (char c in ConfigurationManager.AppSettings["DestinationPassword"].ToCharArray()) { passWord.AppendChar(c); } clientContext.Credentials = new SharePointOnlineCredentials("*****@*****.**", passWord); if (clientContext != null) { //Source list List sourceList = clientContext.Web.Lists.GetByTitle(ConfigurationManager.AppSettings["SourceList"]); //Destination library List destinationLibrary = clientContext.Web.Lists.GetByTitle(ConfigurationManager.AppSettings["DestinationLibrary"]); // try to get all the list items // could get in sections if it exceeds List View Threshold CamlQuery camlQuery = new CamlQuery(); camlQuery.ViewXml = "<View><Query><OrderBy><FieldRef Name='Title' /></OrderBy></Query></View>"; ListItemCollection listItems = sourceList.GetItems(camlQuery); FieldCollection listFields = sourceList.Fields; clientContext.Load(sourceList); clientContext.Load(listFields); clientContext.Load(listItems); clientContext.ExecuteQuery(); // Download attachments for each list item and then upload to new list item foreach (ListItem item in listItems) { string attachmentURL = siteUri + "/Lists/" + ConfigurationManager.AppSettings["SourceList"].ToString() + "/Attachments/" + item["ID"]; Folder folder = clientContext.Web.GetFolderByServerRelativeUrl(attachmentURL); clientContext.Load(folder); try { clientContext.ExecuteQuery(); } catch (ServerException ex) { Console.WriteLine(ex.Message); Console.WriteLine("No Attachment for ID " + item["ID"].ToString()); } FileCollection attachments = folder.Files; clientContext.Load(attachments); clientContext.ExecuteQuery(); // write each file to local disk foreach (SP.File file in folder.Files) { if (clientContext.HasPendingRequest) { clientContext.ExecuteQuery(); } var fileRef = file.ServerRelativeUrl; var fileInfo = SP.File.OpenBinaryDirect(clientContext, fileRef); using (var memory = new MemoryStream()) { byte[] buffer = new byte[1024 * 64]; int nread = 0; while ((nread = fileInfo.Stream.Read(buffer, 0, buffer.Length)) > 0) { memory.Write(buffer, 0, nread); } memory.Seek(0, SeekOrigin.Begin); // at this point you have the contents of your file in memory // save to computer Microsoft.SharePoint.Client.File.SaveBinaryDirect(clientContext, string.Format("/{0}/{1}", ConfigurationManager.AppSettings["AttachmentLibrary"], System.IO.Path.GetFileName(file.Name)), memory, true); } // this call avoids potential problems if any requests are still pending if (clientContext.HasPendingRequest) { clientContext.ExecuteQuery(); } SP.File newFile = clientContext.Web.GetFileByServerRelativeUrl(string.Format("/{0}/{1}", ConfigurationManager.AppSettings["AttachmentLibrary"], System.IO.Path.GetFileName(file.Name))); clientContext.Load(newFile); clientContext.ExecuteQuery(); //check out to make sure not to create multiple versions newFile.CheckOut(); FieldLookupValue applicationName = item["Source"] as FieldLookupValue; // app name may be null if (applicationName == null) { applicationName = new FieldLookupValue(); } applicationName.LookupId = Convert.ToInt32(item["ID"]); ListItem newItem = newFile.ListItemAllFields; newItem["From_x0020_Source"] = applicationName; newItem.Update(); // use OverwriteCheckIn type to make sure not to create multiple versions newFile.CheckIn(string.Empty, CheckinType.OverwriteCheckIn); // Clear requests if any if pending if (clientContext.HasPendingRequest) { clientContext.ExecuteQuery(); } } Console.WriteLine("All list items and attachments copied over. Press any key to close"); Console.ReadKey(); } } } } catch (Exception ex) { Console.WriteLine("Failed: " + ex.Message); Console.WriteLine("Stack Trace: " + ex.StackTrace); Console.ReadKey(); } }
public void TestContextObjectSDP() { Environment.SetEnvironmentVariable("_op_systemMetadata", JsonUtility.ToJsonString(new Dictionary <string, object> { { "_op_siteHostName", "ppe.docs.microsoft.com" } })); using var listener = new TestListenerScope("TestContextObjectSDP"); var schemaFile = CreateFile("template/schemas/contextobject.schema.json", File.ReadAllText("TestData/schemas/contextobject.test.schema.json"), _templateFolder); var tocTemplate = CreateFile("template/toc.json.tmpl", "toc template", _templateFolder); // var coTemplate = CreateFile("template/contextobject.json.tmpl", "{{file_include2}}", _templateFolder); var inputFileName = "co/active.yml"; var includeFile = CreateFile("a b/inc.md", @"[root](../co/active.yml)", _inputFolder); var includeFile2 = CreateFile("c/d/inc.md", @"../../a b/toc.md", _inputFolder); var inputFile = CreateFile(inputFileName, @"### YamlMime:ContextObject breadcrumb_path: https://ppe.docs.microsoft.com/absolute/toc.json toc_rel: ../a b/toc.md file_include: ../a b/inc.md file_include2: ../c/d/inc.md uhfHeaderId: MSDocsHeader-DotNet empty: searchScope: - .NET ", _inputFolder); FileCollection files = new FileCollection(_defaultFiles); files.Add(DocumentType.Article, new[] { inputFile }, _inputFolder); BuildDocument(files); Assert.Equal(4, listener.Items.Count); Assert.Equal(2, listener.Items.Count(s => s.Message.StartsWith($"Invalid file link:(~/{_inputFolder}/a b/toc.md)."))); Assert.NotNull(listener.Items.FirstOrDefault(s => s.Message.StartsWith("There is no template processing document type(s): ContextObject"))); Assert.NotNull(listener.Items.FirstOrDefault(s => s.Message.StartsWith("Invalid file link"))); listener.Items.Clear(); var rawModelFilePath = GetRawModelFilePath(inputFileName); Assert.True(File.Exists(rawModelFilePath)); var rawModel = JsonUtility.Deserialize <JObject>(rawModelFilePath); Assert.Equal("Hello world!", rawModel["meta"].Value <string>()); Assert.Equal("/absolute/toc.json", rawModel["breadcrumb_path"].Value <string>()); Assert.Equal("../a b/toc.md", rawModel["toc_rel"].Value <string>()); Assert.Equal($"<p sourcefile=\"{includeFile}\" sourcestartlinenumber=\"1\" sourceendlinenumber=\"1\" jsonPath=\"/file_include\"><a href=\"~/{inputFile}\" data-raw-source=\"[root](../co/active.yml)\" sourcefile=\"{includeFile}\" sourcestartlinenumber=\"1\" sourceendlinenumber=\"1\">root</a></p>\n", rawModel["file_include"].Value <string>()); Assert.Equal("../../a b/toc.md", rawModel["file_include2"].Value <string>()); Assert.Equal("MSDocsHeader-DotNet", rawModel["uhfHeaderId"].Value <string>()); Assert.Equal(".NET", rawModel["searchScope"][0].Value <string>()); files = new FileCollection(_defaultFiles); files.Add(DocumentType.Article, new[] { inputFile }, _inputFolder); var tocFile = CreateFile("a b/toc.md", "### hello", _inputFolder); files.Add(DocumentType.Article, new[] { tocFile }, _inputFolder); BuildDocument(files); Assert.Equal(2, listener.Items.Count); Assert.NotNull(listener.Items.FirstOrDefault(s => s.Message.StartsWith("There is no template processing document type(s): ContextObject"))); Assert.True(File.Exists(rawModelFilePath)); rawModel = JsonUtility.Deserialize <JObject>(rawModelFilePath); Assert.Equal("Hello world!", rawModel["meta"].Value <string>()); Assert.Equal("/absolute/toc.json", rawModel["breadcrumb_path"].Value <string>()); Assert.Equal("../a%20b/toc.json", rawModel["toc_rel"].Value <string>()); Assert.Equal("MSDocsHeader-DotNet", rawModel["uhfHeaderId"].Value <string>()); Assert.Equal(".NET", rawModel["searchScope"][0].Value <string>()); Assert.Equal("../a%20b/toc.json", rawModel["file_include2"].Value <string>()); }
private static ImmutableDictionary <string, ChangeKindWithDependency> GetIntersectChanges(FileCollection files, ChangeList changeList) { if (changeList == null) { return(null); } var dict = new OSPlatformSensitiveDictionary <ChangeKindWithDependency>(); foreach (var file in files.EnumerateFiles()) { string fileKey = ((RelativePath)file.File).GetPathFromWorkingFolder().ToString(); dict[fileKey] = ChangeKindWithDependency.None; } foreach (ChangeItem change in changeList) { string fileKey = ((RelativePath)change.FilePath).GetPathFromWorkingFolder().ToString(); // always put the change into dict because docfx could access files outside its own scope, like tokens. dict[fileKey] = change.Kind; } return(dict.ToImmutableDictionary(FilePathComparer.OSPlatformSensitiveStringComparer)); }
public void TestUidWithPatternedTag() { using var listener = new TestListenerScope("TestUidWithPatternedTag"); var schemaFile = CreateFile("template/schemas/patterned.uid.test.schema.json", @" { ""$schema"": ""http://dotnet.github.io/docfx/schemas/v1.0/schema.json#"", ""version"": ""1.0.0"", ""title"": ""PatternedUid"", ""description"": ""A simple test schema for sdp's patterned uid"", ""type"": ""object"", ""properties"": { ""uid"": { ""type"": ""string"", ""tags"": [ ""patterned:uid"" ] } } } ", _templateFolder); var inputFile = CreateFile("PatternedUid.yml", @"### YamlMime:PatternedUid uid: azure.hello1 ", _inputFolder); FileCollection files = new FileCollection(_defaultFiles); files.Add(DocumentType.Article, new[] { inputFile }, _inputFolder); BuildDocument(files, new DocumentBuildParameters { Files = files, OutputBaseDir = _outputFolder, ApplyTemplateSettings = _applyTemplateSettings, TemplateManager = _templateManager, TagParameters = new Dictionary <string, JArray> { ["patterned:uid"] = JArray.FromObject(new List <string> { "^azure\\..*" }) }, }); Assert.Equal(2, listener.Items.Count); Assert.NotNull(listener.Items.FirstOrDefault(s => s.Message.StartsWith("There is no template processing document type(s): PatternedUid"))); listener.Items.Clear(); inputFile = CreateFile("PatternedUid2.yml", @"### YamlMime:PatternedUid uid: invalid.hello1 ", _inputFolder); files.Add(DocumentType.Article, new[] { inputFile }, _inputFolder); inputFile = CreateFile("PatternedUid3.yml", @"### YamlMime:PatternedUid uid: invalid.azure.hello2 ", _inputFolder); files.Add(DocumentType.Article, new[] { inputFile }, _inputFolder); var exception = Assert.Throws <DocumentException>(() => BuildDocument(files, new DocumentBuildParameters { Files = files, OutputBaseDir = _outputFolder, ApplyTemplateSettings = _applyTemplateSettings, TemplateManager = _templateManager, TagParameters = new Dictionary <string, JArray> { ["patterned:uid"] = JArray.FromObject(new List <string> { "^azure\\..*" }) }, })); Assert.Equal(2, listener.Items.Count(s => s.Code == ErrorCodes.Build.InvalidPropertyFormat)); }
public void TestValidMetadataReferenceWithIncremental() { using var listener = new TestListenerScope("TestGeneralFeaturesInSDP"); var schemaFile = CreateFile("template/schemas/mta.reference.test.schema.json", @" { ""$schema"": ""http://dotnet.github.io/docfx/schemas/v1.0/schema.json#"", ""version"": ""1.0.0"", ""title"": ""MetadataReferenceTest"", ""description"": ""A simple test schema for sdp"", ""type"": ""object"", ""properties"": { ""metadata"": { ""type"": ""object"" }, ""href"": { ""type"": ""string"", ""contentType"": ""href"" } }, ""metadata"": ""/metadata"" } ", _templateFolder); var inputFileName1 = "page1.yml"; var inputFile1 = CreateFile(inputFileName1, @"### YamlMime:MetadataReferenceTest title: Web Apps Documentation metadata: title: Azure Web Apps Documentation - Tutorials, API Reference meta.description: Learn how to use App Service Web Apps to build and host websites and web applications. ms.service: app-service ms.tgt_pltfrm: na ms.author: carolz href: toc.md sections: - title: 5-Minute Quickstarts toc_rel: ../a b/toc.md uhfHeaderId: MSDocsHeader-DotNet searchScope: - .NET ", _inputFolder); var dependentMarkdown = CreateFile("toc.md", "# Hello", _inputFolder); var inputFileName2 = "page2.yml"; var inputFile2 = CreateFile(inputFileName2, @"### YamlMime:MetadataReferenceTest title: Web Apps Documentation ", _inputFolder); FileCollection files = new FileCollection(_defaultFiles); files.Add(DocumentType.Article, new[] { inputFile1, inputFile2, dependentMarkdown }, _inputFolder); using (new LoggerPhaseScope("FirstRound")) { BuildDocument(files); } Assert.Equal(3, listener.Items.Count); Assert.NotNull(listener.Items.FirstOrDefault(s => s.Message.StartsWith("There is no template processing document type(s): MetadataReferenceTest,Toc"))); listener.Items.Clear(); var rawModelFilePath = GetRawModelFilePath(inputFileName1); Assert.True(File.Exists(rawModelFilePath)); var rawModel = JsonUtility.Deserialize <JObject>(rawModelFilePath); Assert.Equal("overwritten", rawModel["metadata"]["meta"].ToString()); Assert.Equal("postbuild1", rawModel["metadata"]["postMeta"].ToString()); Assert.Equal("1", rawModel["metadata"]["another"].ToString()); Assert.Equal("app-service", rawModel["metadata"]["ms.service"].ToString()); var rawModelFilePath2 = GetRawModelFilePath(inputFileName2); Assert.True(File.Exists(rawModelFilePath2)); var rawModel2 = JsonUtility.Deserialize <JObject>(rawModelFilePath2); Assert.Equal("Hello world!", rawModel2["metadata"]["meta"].ToString()); Assert.Equal("2", rawModel2["metadata"]["another"].ToString()); Assert.Equal("postbuild2", rawModel2["metadata"]["postMeta"].ToString()); // change dependent markdown UpdateFile("toc.md", new string[] { "# Updated" }, _inputFolder); using (new LoggerPhaseScope("SecondRound")) { BuildDocument(files); } rawModel = JsonUtility.Deserialize <JObject>(rawModelFilePath); Assert.Equal("overwritten", rawModel["metadata"]["meta"].ToString()); Assert.Equal("1", rawModel["metadata"]["another"].ToString()); Assert.Equal("app-service", rawModel["metadata"]["ms.service"].ToString()); Assert.Equal("postbuild1", rawModel["metadata"]["postMeta"].ToString()); rawModel2 = JsonUtility.Deserialize <JObject>(rawModelFilePath2); Assert.Equal("Hello world!", rawModel2["metadata"]["meta"].ToString()); Assert.Equal("2", rawModel2["metadata"]["another"].ToString()); Assert.Equal("postbuild2", rawModel2["metadata"]["postMeta"].ToString()); }
public void TestRef() { using var listener = new TestListenerScope("TestRef"); var schemaFile = CreateFile("template/schemas/general.test.schema.json", File.ReadAllText("TestData/schemas/general.test.schema.json"), _templateFolder); var templateFile = CreateFile("template/General.html.tmpl", @"{{#items}} {{#aggregatedExceptions}} {{{message}}} {{{inner.message}}} {{{inner.inner.message}}} {{/aggregatedExceptions}} {{#exception}} {{{message}}} {{/exception}} {{{description}}} {{/items}} ", _templateFolder); var inputFileName = "inputs/exp1.yml"; var inputFile = CreateFile(inputFileName, @"### YamlMime:General items: - exception: message: ""**Hello**"" - aggregatedExceptions: - message: ""1**Hello**"" inner: message: ""1.1**Hello**"" inner: message: ""1.1.1**Hello**"" - message: ""2**Hello**"" inner: message: ""2.1**Hello**"" inner: message: ""2.1.1**Hello**"" - message: ""3**Hello**"" inner: message: ""3.1**Hello**"" inner: message: ""3.1.1**Hello**"" - description: ""**outside**"" ", _inputFolder); FileCollection files = new FileCollection(_defaultFiles); files.Add(DocumentType.Article, new[] { inputFile }, _inputFolder); BuildDocument(files); Assert.Single(listener.Items); var xrefspec = Path.Combine(_outputFolder, "xrefmap.yml"); var xrefmap = YamlUtility.Deserialize <XRefMap>(xrefspec); Assert.Empty(xrefmap.References); var outputFileName = Path.ChangeExtension(inputFileName, ".html"); var outputFilePath = Path.Combine(_outputFolder, outputFileName); Assert.True(File.Exists(outputFilePath)); Assert.Equal(@" <p><strong>Hello</strong></p> <p>1<strong>Hello</strong></p> <p>1.1<strong>Hello</strong></p> <p>1.1.1<strong>Hello</strong></p> <p>2<strong>Hello</strong></p> <p>2.1<strong>Hello</strong></p> <p>2.1.1<strong>Hello</strong></p> <p>3<strong>Hello</strong></p> <p>3.1<strong>Hello</strong></p> <p>3.1.1<strong>Hello</strong></p> <p><strong>outside</strong></p> " .Split(new string[] { "\r\n", "\n" }, StringSplitOptions.RemoveEmptyEntries), File.ReadAllLines(outputFilePath).Where(s => !string.IsNullOrWhiteSpace(s)).Select(s => s.Trim()).ToArray()); }
/// <summary> /// 批量装载 /// </summary> internal static void LoadFromDALPatch(List< FileInfo> pList, FileCollection pCollection) { foreach (File file in pCollection) { FileInfo fileInfo = new FileInfo(); LoadFromDAL(fileInfo, file ); pList.Add(fileInfo); } }
public void TestBuild() { const string documentsBaseDir = "db.documents"; const string outputBaseDir = "db.output"; const string RawModelFileExtension = ".raw.json"; #region Prepare test data if (Directory.Exists(documentsBaseDir)) { Directory.Delete(documentsBaseDir, true); } if (Directory.Exists(outputBaseDir)) { Directory.Delete(outputBaseDir, true); } Directory.CreateDirectory(documentsBaseDir); Directory.CreateDirectory(documentsBaseDir + "/test"); Directory.CreateDirectory(outputBaseDir); var tocFile = Path.Combine(documentsBaseDir, "toc.md"); var conceptualFile = Path.Combine(documentsBaseDir, "test.md"); var conceptualFile2 = Path.Combine(documentsBaseDir, "test/test.md"); var resourceFile = Path.GetFileName(typeof(DocumentBuilderTest).Assembly.Location); var resourceMetaFile = resourceFile + ".meta"; File.WriteAllLines( tocFile, new[] { "# [test1](test.md)", "## [test2](test/test.md)", "# Api", "## [Console](@System.Console)", "## [ConsoleColor](xref:System.ConsoleColor)", }); File.WriteAllLines( conceptualFile, new[] { "---", "uid: XRef1", "a: b", "b:", " c: e", "---", "# Hello World", "Test XRef: @XRef1", "Test link: [link text](test/test.md)", "Test link: [link text 2](../" + resourceFile + ")", "<p>", "test", }); File.WriteAllLines( conceptualFile2, new[] { "---", "uid: XRef2", "a: b", "b:", " c: e", "---", "# Hello World", "Test XRef: @XRef2", "Test link: [link text](../test.md)", "<p>", "test", }); File.WriteAllText(resourceMetaFile, @"{ abc: ""xyz"", uid: ""r1"" }"); File.WriteAllText(MarkdownSytleConfig.MarkdownStyleFileName, @"{ rules : [ ""foo"", { name: ""bar"", disable: true} ], tagRules : [ { tagNames: [""p""], behavior: ""Warning"", messageFormatter: ""Tag {0} is not valid."", openingTagOnly: true } ] }"); FileCollection files = new FileCollection(Environment.CurrentDirectory); files.Add(DocumentType.Article, new[] { tocFile, conceptualFile, conceptualFile2 }); files.Add(DocumentType.Article, new[] { "TestData/System.Console.csyml", "TestData/System.ConsoleColor.csyml" }, p => (((RelativePath)p) - (RelativePath)"TestData/").ToString()); files.Add(DocumentType.Resource, new[] { resourceFile }); #endregion Init(string.Join(".", nameof(DocumentBuilderTest), DocumentBuilder.PhaseName, MarkdownValidatorBuilder.MarkdownValidatePhaseName)); try { using (new LoggerPhaseScope(nameof(DocumentBuilderTest))) using (var builder = new DocumentBuilder()) { var applyTemplateSettings = new ApplyTemplateSettings(documentsBaseDir, outputBaseDir); applyTemplateSettings.RawModelExportSettings.Export = true; var parameters = new DocumentBuildParameters { Files = files, OutputBaseDir = Path.Combine(Environment.CurrentDirectory, outputBaseDir), ApplyTemplateSettings = applyTemplateSettings, Metadata = new Dictionary <string, object> { ["meta"] = "Hello world!", }.ToImmutableDictionary() }; builder.Build(parameters); } { // check log for markdown stylecop. Assert.Equal(2, Listener.Items.Count); Assert.Equal("Tag p is not valid.", Listener.Items[0].Message); Assert.Equal(LogLevel.Warning, Listener.Items[0].LogLevel); Assert.Equal("Tag p is not valid.", Listener.Items[1].Message); Assert.Equal(LogLevel.Warning, Listener.Items[1].LogLevel); } { // check toc. Assert.True(File.Exists(Path.Combine(outputBaseDir, Path.ChangeExtension(tocFile, RawModelFileExtension)))); var model = JsonUtility.Deserialize <TocViewModel>(Path.Combine(outputBaseDir, Path.ChangeExtension(tocFile, RawModelFileExtension))); Assert.NotNull(model); Assert.Equal("test1", model[0].Name); Assert.Equal("test.json", model[0].Href); Assert.NotNull(model[0].Items); Assert.Equal("test2", model[0].Items[0].Name); Assert.Equal("test/test.json", model[0].Items[0].Href); Assert.Equal("Api", model[1].Name); Assert.Null(model[1].Href); Assert.NotNull(model[1].Items); Assert.Equal("Console", model[1].Items[0].Name); Assert.Equal("../System.Console.json", model[1].Items[0].Href); Assert.Equal("ConsoleColor", model[1].Items[1].Name); Assert.Equal("../System.ConsoleColor.json", model[1].Items[1].Href); } { // check conceptual. Assert.True(File.Exists(Path.Combine(outputBaseDir, Path.ChangeExtension(conceptualFile, RawModelFileExtension)))); var model = JsonUtility.Deserialize <Dictionary <string, object> >(Path.Combine(outputBaseDir, Path.ChangeExtension(conceptualFile, RawModelFileExtension))); Assert.Equal( "<h1 id=\"hello-world\">Hello World</h1>", model["rawTitle"]); Assert.Equal( "\n<p>Test XRef: <xref href=\"XRef1\" data-throw-if-not-resolved=\"False\" data-raw=\"@XRef1\"></xref>\n" + "Test link: <a href=\"~/db.documents/test/test.md\">link text</a>\n" + "Test link: <a href=\"~/" + resourceFile + "\">link text 2</a></p>\n" + "<p><p>\n" + "test</p>\n", model["conceptual"]); Assert.Equal("Conceptual", model["type"]); Assert.Equal("Hello world!", model["meta"]); Assert.Equal("b", model["a"]); } { // check mref. Assert.True(File.Exists(Path.Combine(outputBaseDir, Path.ChangeExtension("System.Console.csyml", RawModelFileExtension)))); Assert.True(File.Exists(Path.Combine(outputBaseDir, Path.ChangeExtension("System.ConsoleColor.csyml", RawModelFileExtension)))); } { // check resource. // as there is no template, resource file will not be copied Assert.True(!File.Exists(Path.Combine(outputBaseDir, resourceFile))); Assert.True(File.Exists(Path.Combine(outputBaseDir, resourceFile + RawModelFileExtension))); var meta = JsonUtility.Deserialize <Dictionary <string, object> >(Path.Combine(outputBaseDir, resourceFile + RawModelFileExtension)); Assert.Equal(3, meta.Count); Assert.True(meta.ContainsKey("meta")); Assert.Equal("Hello world!", meta["meta"]); Assert.True(meta.ContainsKey("abc")); Assert.Equal("xyz", meta["abc"]); Assert.True(meta.ContainsKey("uid")); Assert.Equal("r1", meta["uid"]); } } finally { CleanUp(); Directory.Delete(documentsBaseDir, true); Directory.Delete(outputBaseDir, true); File.Delete(resourceMetaFile); } }
public void TestMarkdownStyleInPlugins() { #region Prepare test data var resourceFile = Path.GetFileName(typeof(DocumentBuilderTest).Assembly.Location); var resourceMetaFile = resourceFile + ".meta"; CreateFile("conceptual.html.primary.tmpl", "{{{conceptual}}}", _templateFolder); var tocFile = CreateFile("toc.md", new[] { "# [test1](test.md)", "## [test2](test/test.md)", "# Api", "## [Console](@System.Console)", "## [ConsoleColor](xref:System.ConsoleColor)", }, _inputFolder); var conceptualFile = CreateFile("test.md", new[] { "---", "uid: XRef1", "a: b", "b:", " c: e", "---", "# Hello World", "Test XRef: @XRef1", "Test link: [link text](test/test.md)", "Test link: [link text 2](../" + resourceFile + ")", "Test link style xref: [link text 3](xref:XRef2 \"title\")", "Test link style xref with anchor: [link text 4](xref:XRef2#anchor \"title\")", "Test encoded link style xref with anchor: [link text 5](xref:%58%52%65%66%32#anchor \"title\")", "Test invalid link style xref with anchor: [link text 6](xref:invalid#anchor \"title\")", "Test autolink style xref: <xref:XRef2>", "Test autolink style xref with anchor: <xref:XRef2#anchor>", "Test encoded autolink style xref with anchor: <xref:%58%52%65%66%32#anchor>", "Test invalid autolink style xref with anchor: <xref:invalid#anchor>", "Test short xref: @XRef2", "<p>", "test", }, _inputFolder); var conceptualFile2 = CreateFile("test/test.md", new[] { "---", "uid: XRef2", "a: b", "b:", " c: e", "---", "# Hello World", "Test XRef: @XRef2", "Test link: [link text](../test.md)", "<p><div>", "test", }, _inputFolder); File.WriteAllText(resourceMetaFile, @"{ abc: ""xyz"", uid: ""r1"" }"); File.WriteAllText(MarkdownSytleConfig.MarkdownStyleFileName, @"{ settings : [ { category: ""div"", disable: true}, { category: ""p"", id: ""p-3"", disable: true} ], }"); CreateFile( MarkdownSytleDefinition.MarkdownStyleDefinitionFolderName + "/p" + MarkdownSytleDefinition.MarkdownStyleDefinitionFilePostfix, @"{ tagRules : { ""p-1"": { tagNames: [""p""], behavior: ""Warning"", messageFormatter: ""Tag {0} is not valid."", openingTagOnly: true }, ""p-2"": { tagNames: [""p""], behavior: ""Warning"", messageFormatter: ""Tag {0} is not valid."", openingTagOnly: false, disable: true }, ""p-3"": { tagNames: [""p""], behavior: ""Warning"", messageFormatter: ""Tag {0} is not valid."", openingTagOnly: false, } } } ", _templateFolder); CreateFile( MarkdownSytleDefinition.MarkdownStyleDefinitionFolderName + "/div" + MarkdownSytleDefinition.MarkdownStyleDefinitionFilePostfix, @"{ tagRules : { ""div-1"": { tagNames: [""div""], behavior: ""Warning"", messageFormatter: ""Tag {0} is not valid."", openingTagOnly: true } } } ", _templateFolder); FileCollection files = new FileCollection(Directory.GetCurrentDirectory()); files.Add(DocumentType.Article, new[] { tocFile, conceptualFile, conceptualFile2 }); files.Add(DocumentType.Article, new[] { "TestData/System.Console.csyml", "TestData/System.ConsoleColor.csyml" }, "TestData/", null); files.Add(DocumentType.Resource, new[] { resourceFile }); #endregion Init(MarkdownValidatorBuilder.MarkdownValidatePhaseName); try { using (new LoggerPhaseScope(nameof(DocumentBuilderTest))) { BuildDocument( files, new Dictionary <string, object> { ["meta"] = "Hello world!", }, templateFolder: _templateFolder); } { // check log for markdown stylecop. Assert.Equal(2, Listener.Items.Count); Assert.Equal("Tag p is not valid.", Listener.Items[0].Message); Assert.Equal(LogLevel.Warning, Listener.Items[0].LogLevel); Assert.Equal("Tag p is not valid.", Listener.Items[1].Message); Assert.Equal(LogLevel.Warning, Listener.Items[1].LogLevel); } } finally { CleanUp(); File.Delete(resourceMetaFile); } }
public void ProcessMarkdownTocWithAbsoluteHrefShouldSucceed() { var content = @" #[Topic1 Language](/href1) # ##Topic1.1 Language C# ###[Topic1.1.1](/href1.1.1) ### ##[Topic1.2]() ## #[Topic2](http://href.com) # "; var toc = _fileCreator.CreateFile(content, FileType.MarkdownToc); FileCollection files = new FileCollection(_inputFolder); files.Add(DocumentType.Article, new[] { toc }); BuildDocument(files); var outputRawModelPath = Path.GetFullPath(Path.Combine(_outputFolder, Path.ChangeExtension(toc, RawModelFileExtension))); Assert.True(File.Exists(outputRawModelPath)); var model = JsonUtility.Deserialize <TocItemViewModel>(outputRawModelPath); var expectedModel = new TocItemViewModel { Items = new TocViewModel { new TocItemViewModel { Name = "Topic1 Language", Href = "/href1", TopicHref = "/href1", Items = new TocViewModel { new TocItemViewModel { Name = "Topic1.1 Language C#", Items = new TocViewModel { new TocItemViewModel { Name = "Topic1.1.1", Href = "/href1.1.1", TopicHref = "/href1.1.1" } } }, new TocItemViewModel { Name = "Topic1.2", Href = string.Empty, TopicHref = string.Empty } } }, new TocItemViewModel { Name = "Topic2", Href = "http://href.com", TopicHref = "http://href.com" } } }; AssertTocEqual(expectedModel, model); }
private static void Analyse() { var parser = new CvsLogParser(m_config.Sandbox, m_config.CvsLogFileName, m_config.BranchMatcher); var builder = new CommitBuilder(m_log, parser.Parse()); var exclusionFilter = new ExclusionFilter(m_log, m_config); IEnumerable<Commit> commits = builder.GetCommits() .SplitMultiBranchCommits() .FilterCommitsOnExcludedBranches() .FilterExcludedFiles(exclusionFilter) .AddCommitsToFiles() .Verify(m_log) .ToListIfNeeded(); // build lookup of all files var allFiles = new FileCollection(parser.Files); var includedFiles = new FileCollection(parser.Files.Where(f => m_config.IncludeFile(f.Name))); WriteAllCommitsLog(commits); WriteExcludedFileLog(parser); var branchResolver = ResolveBranches(commits, includedFiles); commits = branchResolver.Commits; var tagResolver = ResolveTags(commits, includedFiles); commits = tagResolver.Commits; WriteTagLog("allbranches.log", branchResolver, parser.ExcludedBranches, m_config.BranchRename); WriteTagLog("alltags.log", tagResolver, parser.ExcludedTags, m_config.TagRename); WriteUserLog("allusers.log", commits); var streams = commits.SplitBranchStreams(branchResolver.ResolvedTags); // resolve merges var mergeResolver = new MergeResolver(m_log, streams); mergeResolver.Resolve(); WriteBranchLogs(streams); // add any "head-only" files exclusionFilter.CreateHeadOnlyCommits(m_config.HeadOnlyBranches, streams, allFiles); // store data needed for import m_streams = streams; }
public void ProcessSwaggerShouldSucceed() { FileCollection files = new FileCollection(_defaultFiles); BuildDocument(files); var outputRawModelPath = GetRawModelFilePath("contacts.json"); Assert.True(File.Exists(outputRawModelPath)); var model = JsonUtility.Deserialize <RestApiRootItemViewModel>(outputRawModelPath); Assert.Equal("graph.windows.net/myorganization/Contacts/1.0", model.Uid); Assert.Equal("graph_windows_net_myorganization_Contacts_1_0", model.HtmlId); Assert.Equal(10, model.Children.Count); Assert.Equal("Hello world!", model.Metadata["meta"]); // Verify $ref in path var item0 = model.Children[0]; Assert.Equal("graph.windows.net/myorganization/Contacts/1.0/get contacts", item0.Uid); Assert.Equal("<p sourcefile=\"TestData/swagger/contacts.json\" sourcestartlinenumber=\"1\" sourceendlinenumber=\"1\">You can get a collection of contacts from your tenant.</p>\n", item0.Summary); Assert.Single(item0.Parameters); Assert.Equal("1.6", item0.Parameters[0].Metadata["default"]); Assert.Single(item0.Responses); Assert.Equal("200", item0.Responses[0].HttpStatusCode); // Verify tags of child Assert.Equal("contacts", item0.Tags[0]); var item1 = model.Children[1]; Assert.Equal("contacts", item1.Tags[0]); Assert.Equal("pet store", item1.Tags[1]); // Verify tags of root Assert.Equal(3, model.Tags.Count); var tag0 = model.Tags[0]; Assert.Equal("contact", tag0.Name); Assert.Equal("<p sourcefile=\"TestData/swagger/contacts.json\" sourcestartlinenumber=\"1\" sourceendlinenumber=\"1\">Everything about the <strong>contacts</strong></p>\n", tag0.Description); Assert.Equal("contact-bookmark", tag0.HtmlId); Assert.Single(tag0.Metadata); var externalDocs = (JObject)tag0.Metadata["externalDocs"]; Assert.NotNull(externalDocs); Assert.Equal("Find out more", externalDocs["description"]); Assert.Equal("http://swagger.io", externalDocs["url"]); var tag1 = model.Tags[1]; Assert.Equal("pet_store", tag1.HtmlId); // Verify path parameters // Path parameter applicable for get operation Assert.Equal(2, item1.Parameters.Count); Assert.Equal("object_id", item1.Parameters[0].Name); Assert.Equal("api-version", item1.Parameters[1].Name); Assert.Equal(true, item1.Parameters[1].Metadata["required"]); // Override ""api-version" parameters by $ref for patch operation var item2 = model.Children[2]; Assert.Equal(3, item2.Parameters.Count); Assert.Equal("object_id", item2.Parameters[0].Name); Assert.Equal("api-version", item2.Parameters[1].Name); Assert.Equal(false, item2.Parameters[1].Metadata["required"]); // Override ""api-version" parameters by self definition for delete operation var item3 = model.Children[3]; Assert.Equal(2, item3.Parameters.Count); Assert.Equal("object_id", item3.Parameters[0].Name); Assert.Equal("api-version", item3.Parameters[1].Name); Assert.Equal(false, item3.Parameters[1].Metadata["required"]); // When operation parameters is not set, inherit from th parameters for post operation var item4 = model.Children[4]; Assert.Single(item4.Parameters); Assert.Equal("api-version", item4.Parameters[0].Name); Assert.Equal(true, item4.Parameters[0].Metadata["required"]); // When 'definitions' has direct child with $ref defined, should resolve it var item5 = model.Children[6]; var parameter2 = (JObject)item5.Parameters[2].Metadata["schema"]; Assert.Equal("string", parameter2["type"]); Assert.Equal("uri", parameter2["format"]); // Verify markup result of parameters Assert.Equal("<p sourcefile=\"TestData/swagger/contacts.json\" sourcestartlinenumber=\"1\" sourceendlinenumber=\"1\">The request body <em>contains</em> a single property that specifies the URL of the user or contact to add as manager.</p>\n", item5.Parameters[2].Description); Assert.Equal("<p sourcefile=\"TestData/swagger/contacts.json\" sourcestartlinenumber=\"1\" sourceendlinenumber=\"1\"><strong>uri</strong> description.</p>\n", ((string)parameter2["description"])); Assert.Equal("<p sourcefile=\"TestData/swagger/contacts.json\" sourcestartlinenumber=\"1\" sourceendlinenumber=\"1\">No Content. Indicates <strong>success</strong>. No response body is returned.</p>\n", item5.Responses[0].Description); // Verify for markup result of securityDefinitions var securityDefinitions = (JObject)model.Metadata.Single(m => m.Key == "securityDefinitions").Value; var auth = (JObject)securityDefinitions["auth"]; Assert.Equal("<p sourcefile=\"TestData/swagger/contacts.json\" sourcestartlinenumber=\"1\" sourceendlinenumber=\"1\">securityDefinitions <em>description</em>.</p>\n", auth["description"].ToString()); }
public void ProcessMarkdownTocWithRelativeHrefShouldSucceed() { var file1 = _fileCreator.CreateFile(string.Empty, FileType.MarkdownContent); var file2 = _fileCreator.CreateFile(string.Empty, FileType.MarkdownContent, "a"); var content = $@" #[Topic1](/href1) ##[Topic1.1]({file1}) ###[Topic1.1.1]({file2}) ##[Topic1.2]() #[Topic2](http://href.com) #[Topic3](invalid.md) "; var toc = _fileCreator.CreateFile(content, FileType.MarkdownToc); FileCollection files = new FileCollection(_inputFolder); files.Add(DocumentType.Article, new[] { file1, file2, toc }); BuildDocument(files); var outputRawModelPath = Path.Combine(_outputFolder, Path.ChangeExtension(toc, RawModelFileExtension)); Assert.True(File.Exists(outputRawModelPath)); var model = JsonUtility.Deserialize <TocItemViewModel>(outputRawModelPath); var expectedModel = new TocItemViewModel { Items = new TocViewModel { new TocItemViewModel { Name = "Topic1", Href = "/href1", TopicHref = "/href1", Items = new TocViewModel { new TocItemViewModel { Name = "Topic1.1", Href = file1, TopicHref = file1, Items = new TocViewModel { new TocItemViewModel { Name = "Topic1.1.1", Href = file2, TopicHref = file2 } } }, new TocItemViewModel { Name = "Topic1.2", Href = string.Empty, TopicHref = string.Empty } } }, new TocItemViewModel { Name = "Topic2", Href = "http://href.com", TopicHref = "http://href.com" }, new TocItemViewModel { Name = "Topic3", Href = "invalid.md", TopicHref = "invalid.md" } } }; AssertTocEqual(expectedModel, model); }
public void Export(string path, FileCollection fileCollection, Object asset, ExportOptions options) { Export(path, fileCollection, new Object[] { asset }, options); }
public void ProcessYamlTocWithReferencedTocShouldSucceed() { var file1 = _fileCreator.CreateFile(string.Empty, FileType.MarkdownContent); var file2 = _fileCreator.CreateFile(string.Empty, FileType.MarkdownContent, "sub1"); var file3 = _fileCreator.CreateFile(string.Empty, FileType.MarkdownContent, "sub1/sub2"); var referencedToc = _fileCreator.CreateFile($@" - name: Topic href: {Path.GetFileName(file3)} ", FileType.YamlToc, "sub1/sub2"); var subToc = _fileCreator.CreateFile($@" #[Topic]({Path.GetFileName(file2)}) #[ReferencedToc](sub2/{Path.GetFileName(referencedToc)}) ", FileType.MarkdownToc, "sub1"); var content = $@" - name: Topic1 href: {file1} items: - name: Topic1.1 href: {subToc} items: - name: Topic1.1.1 - name: Topic1.1.2 - name: Topic1.2 href: {subToc} homepage: {file1} - name: Topic2 href: {referencedToc} "; var toc = _fileCreator.CreateFile(content, FileType.YamlToc); FileCollection files = new FileCollection(_inputFolder); files.Add(DocumentType.Article, new[] { file1, file2, file3, toc, subToc }); BuildDocument(files); var outputRawModelPath = Path.Combine(_outputFolder, Path.ChangeExtension(toc, RawModelFileExtension)); Assert.True(File.Exists(outputRawModelPath)); var model = JsonUtility.Deserialize <TocItemViewModel>(outputRawModelPath); var expectedModel = new TocItemViewModel { Items = new TocViewModel { new TocItemViewModel { Name = "Topic1", Href = file1, TopicHref = file1, Items = new TocViewModel { new TocItemViewModel { Name = "Topic1.1", Href = null, // For referenced toc, the content from the referenced toc is expanded as the items of current toc, and href is cleared TopicHref = null, Items = new TocViewModel { new TocItemViewModel { Name = "Topic", Href = file2, TopicHref = file2, }, new TocItemViewModel { Name = "ReferencedToc", Items = new TocViewModel { new TocItemViewModel { Name = "Topic", Href = file3, TopicHref = file3, } } } } }, new TocItemViewModel { Name = "Topic1.2", Href = file1, // For referenced toc, href should be overwritten by homepage TopicHref = file1, Homepage = file1, Items = new TocViewModel { new TocItemViewModel { Name = "Topic", Href = file2, TopicHref = file2, }, new TocItemViewModel { Name = "ReferencedToc", Items = new TocViewModel { new TocItemViewModel { Name = "Topic", Href = file3, TopicHref = file3, } } } } } } }, new TocItemViewModel { Name = "Topic2", Href = null, Items = new TocViewModel { new TocItemViewModel { Name = "Topic", Href = file3, TopicHref = file3, } } } } }; AssertTocEqual(expectedModel, model); // Referenced TOC File should not exist var referencedTocPath = Path.Combine(_outputFolder, Path.ChangeExtension(subToc, RawModelFileExtension)); Assert.False(File.Exists(referencedTocPath)); }
public void Export(string path, FileCollection fileCollection, IEnumerable <Object> assets, ExportOptions options) { EventExportPreparationStarted?.Invoke(); VirtualSerializedFile virtualFile = new VirtualSerializedFile(options); List <IExportCollection> collections = new List <IExportCollection>(); // speed up fetching a little bit List <Object> depList = new List <Object>(); HashSet <Object> depSet = new HashSet <Object>(); HashSet <Object> queued = new HashSet <Object>(); depList.AddRange(assets); depSet.UnionWith(depList); for (int i = 0; i < depList.Count; i++) { Object asset = depList[i]; if (!queued.Contains(asset)) { IExportCollection collection = CreateCollection(virtualFile, asset, options); foreach (Object element in collection.Assets) { queued.Add(element); } collections.Add(collection); } #warning TODO: if IsGenerateGUIDByContent set it should build collections and write actual references with persistent GUIS, but skip dependencies if (Config.IsExportDependencies) { foreach (Object dependency in asset.FetchDependencies(true)) { if (dependency == null) { continue; } if (!depSet.Contains(dependency)) { depList.Add(dependency); depSet.Add(dependency); } } } } depList.Clear(); depSet.Clear(); queued.Clear(); EventExportPreparationFinished?.Invoke(); EventExportStarted?.Invoke(); ProjectAssetContainer container = new ProjectAssetContainer(this, virtualFile, fileCollection.FetchAssets(), collections, options); for (int i = 0; i < collections.Count; i++) { IExportCollection collection = collections[i]; container.CurrentCollection = collection; bool isExported = collection.Export(container, path); if (isExported) { Logger.Log(LogType.Info, LogCategory.Export, $"'{collection.Name}' exported"); } EventExportProgressUpdated?.Invoke(i, collections.Count); } EventExportFinished?.Invoke(); }
public void TestBuild() { #region Prepare test data var resourceFile = Path.GetFileName(typeof(DocumentBuilderTest).Assembly.Location); var resourceMetaFile = resourceFile + ".meta"; CreateFile("conceptual.html.primary.tmpl", "{{{conceptual}}}", _templateFolder); var tocFile = CreateFile("toc.md", new[] { "# [test1](test.md)", "## [test2](test/test.md)", "# Api", "## [Console](@System.Console)", "## [ConsoleColor](xref:System.ConsoleColor)", }, _inputFolder); var conceptualFile = CreateFile("test.md", new[] { "---", "uid: XRef1", "a: b", "b:", " c: e", "---", "<!-- I'm comment -->", "<!-- I'm not title-->", "<!-- Raw title is in the line below -->", "# Hello World", "Test XRef: @XRef1", "Test link: [link text](test/test.md)", "Test link: [link text 2](../" + resourceFile + ")", "Test link style xref: [link text 3](xref:XRef2 \"title\")", "Test link style xref with anchor: [link text 4](xref:XRef2#anchor \"title\")", "Test encoded link style xref with anchor: [link text 5](xref:%58%52%65%66%32#anchor \"title\")", "Test invalid link style xref with anchor: [link text 6](xref:invalid#anchor \"title\")", "Test autolink style xref: <xref:XRef2>", "Test autolink style xref with anchor: <xref:XRef2#anchor>", "Test encoded autolink style xref with anchor: <xref:%58%52%65%66%32#anchor>", "Test invalid autolink style xref with anchor: <xref:invalid#anchor>", "Test short xref: @XRef2", "Test xref with query string: <xref href=\"XRef2?text=Foo%3CT%3E\"/>", "Test invalid xref with query string: <xref href=\"invalid?alt=Foo%3CT%3E\"/>", "Test xref with attribute: <xref href=\"XRef2\" text=\"Foo<T>\"/>", "Test xref with attribute: <xref href=\"XRef2\" name=\"Foo<T>\"/>", "Test invalid xref with attribute: <xref href=\"invalid\" alt=\"Foo<T>\"/>", "Test invalid xref with attribute: <xref href=\"invalid\" fullname=\"Foo<T>\"/>", "Test external xref with absolute URL and anchor: @str", "Test invalid autolink xref: <xref:?displayProperty=fullName>", "<p>", "test", }, _inputFolder); var conceptualFile2 = CreateFile("test/test.md", new[] { "---", "uid: XRef2", "a: b", "b:", " c: e", "---", "# Hello World", "Test XRef: @XRef2", "Test link: [link text](../test.md)", "<p>", "test", }, _inputFolder); File.WriteAllText(resourceMetaFile, @"{ abc: ""xyz"", uid: ""r1"" }"); File.WriteAllText(MarkdownSytleConfig.MarkdownStyleFileName, @"{ rules : [ ""foo"", { name: ""bar"", disable: true} ], tagRules : [ { tagNames: [""p""], behavior: ""Warning"", messageFormatter: ""Tag {0} is not valid."", openingTagOnly: true } ] }"); FileCollection files = new FileCollection(Directory.GetCurrentDirectory()); files.Add(DocumentType.Article, new[] { tocFile, conceptualFile, conceptualFile2 }); files.Add(DocumentType.Article, new[] { "TestData/System.Console.csyml", "TestData/System.ConsoleColor.csyml" }, "TestData/", null); files.Add(DocumentType.Resource, new[] { resourceFile }); #endregion Init(MarkdownValidatorBuilder.MarkdownValidatePhaseName); try { using (new LoggerPhaseScope(nameof(DocumentBuilderTest))) { BuildDocument( files, new Dictionary <string, object> { ["meta"] = "Hello world!", }, templateFolder: _templateFolder); } { // check log for markdown stylecop. Assert.Equal(2, Listener.Items.Count); Assert.Equal("Tag p is not valid.", Listener.Items[0].Message); Assert.Equal(LogLevel.Warning, Listener.Items[0].LogLevel); Assert.Equal("Tag p is not valid.", Listener.Items[1].Message); Assert.Equal(LogLevel.Warning, Listener.Items[1].LogLevel); } { // check toc. Assert.True(File.Exists(Path.Combine(_outputFolder, Path.ChangeExtension(tocFile, RawModelFileExtension)))); var model = JsonUtility.Deserialize <TocItemViewModel>(Path.Combine(_outputFolder, Path.ChangeExtension(tocFile, RawModelFileExtension))).Items; Assert.NotNull(model); Assert.Equal("test1", model[0].Name); Assert.Equal("test.html", model[0].Href); Assert.NotNull(model[0].Items); Assert.Equal("test2", model[0].Items[0].Name); Assert.Equal("test/test.html", model[0].Items[0].Href); Assert.Equal("Api", model[1].Name); Assert.Null(model[1].Href); Assert.NotNull(model[1].Items); Assert.Equal("Console", model[1].Items[0].Name); Assert.Equal("../System.Console.csyml", model[1].Items[0].Href); Assert.Equal("ConsoleColor", model[1].Items[1].Name); Assert.Equal("../System.ConsoleColor.csyml", model[1].Items[1].Href); } { // check conceptual. var conceptualOutputPath = Path.Combine(_outputFolder, Path.ChangeExtension(conceptualFile, ".html")); Assert.True(File.Exists(conceptualOutputPath)); Assert.True(File.Exists(Path.Combine(_outputFolder, Path.ChangeExtension(conceptualFile, RawModelFileExtension)))); var model = JsonUtility.Deserialize <Dictionary <string, object> >(Path.Combine(_outputFolder, Path.ChangeExtension(conceptualFile, RawModelFileExtension))); Assert.Equal( $"<h1 id=\"hello-world\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"10\" sourceendlinenumber=\"10\">Hello World</h1>", model["rawTitle"]); Assert.Equal( string.Join( "\n", "<!-- I'm comment -->", "<!-- I'm not title-->", "<!-- Raw title is in the line below -->", "", $"<p sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"11\" sourceendlinenumber=\"32\">Test XRef: <xref href=\"XRef1\" data-throw-if-not-resolved=\"False\" data-raw-source=\"@XRef1\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"11\" sourceendlinenumber=\"11\"></xref>", $"Test link: <a href=\"~/{_inputFolder}/test/test.md\" data-raw-source=\"[link text](test/test.md)\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"12\" sourceendlinenumber=\"12\">link text</a>", $"Test link: <a href=\"~/{resourceFile}\" data-raw-source=\"[link text 2](../Microsoft.DocAsCode.Build.Engine.Tests.dll)\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"13\" sourceendlinenumber=\"13\">link text 2</a>", $"Test link style xref: <a href=\"xref:XRef2\" title=\"title\" data-raw-source=\"[link text 3](xref:XRef2 "title")\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"14\" sourceendlinenumber=\"14\">link text 3</a>", $"Test link style xref with anchor: <a href=\"xref:XRef2#anchor\" title=\"title\" data-raw-source=\"[link text 4](xref:XRef2#anchor "title")\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"15\" sourceendlinenumber=\"15\">link text 4</a>", $"Test encoded link style xref with anchor: <a href=\"xref:%58%52%65%66%32#anchor\" title=\"title\" data-raw-source=\"[link text 5](xref:%58%52%65%66%32#anchor "title")\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"16\" sourceendlinenumber=\"16\">link text 5</a>", $"Test invalid link style xref with anchor: <a href=\"xref:invalid#anchor\" title=\"title\" data-raw-source=\"[link text 6](xref:invalid#anchor "title")\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"17\" sourceendlinenumber=\"17\">link text 6</a>", $"Test autolink style xref: <xref href=\"XRef2\" data-throw-if-not-resolved=\"True\" data-raw-source=\"<xref:XRef2>\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"18\" sourceendlinenumber=\"18\"></xref>", $"Test autolink style xref with anchor: <xref href=\"XRef2#anchor\" data-throw-if-not-resolved=\"True\" data-raw-source=\"<xref:XRef2#anchor>\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"19\" sourceendlinenumber=\"19\"></xref>", $"Test encoded autolink style xref with anchor: <xref href=\"%58%52%65%66%32#anchor\" data-throw-if-not-resolved=\"True\" data-raw-source=\"<xref:%58%52%65%66%32#anchor>\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"20\" sourceendlinenumber=\"20\"></xref>", $"Test invalid autolink style xref with anchor: <xref href=\"invalid#anchor\" data-throw-if-not-resolved=\"True\" data-raw-source=\"<xref:invalid#anchor>\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"21\" sourceendlinenumber=\"21\"></xref>", $"Test short xref: <xref href=\"XRef2\" data-throw-if-not-resolved=\"False\" data-raw-source=\"@XRef2\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"22\" sourceendlinenumber=\"22\"></xref>", "Test xref with query string: <xref href=\"XRef2?text=Foo%3CT%3E\"></xref>", "Test invalid xref with query string: <xref href=\"invalid?alt=Foo%3CT%3E\"></xref>", "Test xref with attribute: <xref href=\"XRef2\" text=\"Foo<T>\"></xref>", "Test xref with attribute: <xref href=\"XRef2\" name=\"Foo<T>\"></xref>", "Test invalid xref with attribute: <xref href=\"invalid\" alt=\"Foo<T>\"></xref>", "Test invalid xref with attribute: <xref href=\"invalid\" fullname=\"Foo<T>\"></xref>", $"Test external xref with absolute URL and anchor: <xref href=\"str\" data-throw-if-not-resolved=\"False\" data-raw-source=\"@str\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"29\" sourceendlinenumber=\"29\"></xref>", $"Test invalid autolink xref: <xref href=\"?displayProperty=fullName\" data-throw-if-not-resolved=\"True\" data-raw-source=\"<xref:?displayProperty=fullName>\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"30\" sourceendlinenumber=\"30\"></xref>", "<p>", @"test</p>", ""), model[Constants.PropertyName.Conceptual]); Assert.Equal( string.Join( "\n", "<!-- I'm comment -->", "<!-- I'm not title-->", "<!-- Raw title is in the line below -->", "", "<p>Test XRef: <a class=\"xref\" href=\"test.html\">Hello World</a>", "Test link: <a href=\"test/test.html\">link text</a>", "Test link: <a href=\"../Microsoft.DocAsCode.Build.Engine.Tests.dll\">link text 2</a>", "Test link style xref: <a class=\"xref\" href=\"test/test.html\" title=\"title\">link text 3</a>", "Test link style xref with anchor: <a class=\"xref\" href=\"test/test.html#anchor\" title=\"title\">link text 4</a>", "Test encoded link style xref with anchor: <a class=\"xref\" href=\"test/test.html#anchor\" title=\"title\">link text 5</a>", "Test invalid link style xref with anchor: <a href=\"xref:invalid#anchor\" title=\"title\">link text 6</a>", "Test autolink style xref: <a class=\"xref\" href=\"test/test.html\">Hello World</a>", "Test autolink style xref with anchor: <a class=\"xref\" href=\"test/test.html#anchor\">Hello World</a>", "Test encoded autolink style xref with anchor: <a class=\"xref\" href=\"test/test.html#anchor\">Hello World</a>", "Test invalid autolink style xref with anchor: <xref:invalid#anchor>", "Test short xref: <a class=\"xref\" href=\"test/test.html\">Hello World</a>", "Test xref with query string: <a class=\"xref\" href=\"test/test.html\">Foo<T></a>", "Test invalid xref with query string: <span class=\"xref\">Foo<T></span>", "Test xref with attribute: <a class=\"xref\" href=\"test/test.html\">Foo<T></a>", "Test xref with attribute: <a class=\"xref\" href=\"test/test.html\">Foo<T></a>", "Test invalid xref with attribute: <span class=\"xref\">Foo<T></span>", "Test invalid xref with attribute: <span class=\"xref\">Foo<T></span>", "Test external xref with absolute URL and anchor: <a class=\"xref\" href=\"https://docs.python.org/3.5/library/stdtypes.html#str\">str</a>", "Test invalid autolink xref: <xref:?displayProperty=fullName>", "<p>", "test</p>", ""), File.ReadAllText(conceptualOutputPath)); Assert.Equal("Conceptual", model["type"]); Assert.Equal("Hello world!", model["meta"]); Assert.Equal("b", model["a"]); } { // check mref. Assert.True(File.Exists(Path.Combine(_outputFolder, Path.ChangeExtension("System.Console.csyml", RawModelFileExtension)))); Assert.True(File.Exists(Path.Combine(_outputFolder, Path.ChangeExtension("System.ConsoleColor.csyml", RawModelFileExtension)))); } { // check resource. Assert.True(File.Exists(Path.Combine(_outputFolder, resourceFile))); Assert.True(File.Exists(Path.Combine(_outputFolder, resourceFile + RawModelFileExtension))); var meta = JsonUtility.Deserialize <Dictionary <string, object> >(Path.Combine(_outputFolder, resourceFile + RawModelFileExtension)); Assert.Equal(4, meta.Count); Assert.True(meta.ContainsKey("meta")); Assert.Equal("Hello world!", meta["meta"]); Assert.True(meta.ContainsKey("abc")); Assert.Equal("xyz", meta["abc"]); Assert.True(meta.ContainsKey(Constants.PropertyName.Uid)); Assert.Equal("r1", meta[Constants.PropertyName.Uid]); } } finally { CleanUp(); File.Delete(resourceMetaFile); } }
public async Task BindModelAsync(ModelBindingContext bindingContext) { if (bindingContext == null) { throw new ArgumentNullException(nameof(bindingContext)); } _logger.AttemptingToBindModel(bindingContext); var createFileCollection = bindingContext.ModelType == typeof(IFormFileCollection); if (!createFileCollection && !ModelBindingHelper.CanGetCompatibleCollection <IFormFile>(bindingContext)) { // Silently fail if unable to create an instance or use the current instance. return; } ICollection <IFormFile> postedFiles; if (createFileCollection) { postedFiles = new List <IFormFile>(); } else { postedFiles = ModelBindingHelper.GetCompatibleCollection <IFormFile>(bindingContext); } // If we're at the top level, then use the FieldName (parameter or property name). // This handles the fact that there will be nothing in the ValueProviders for this parameter // and so we'll do the right thing even though we 'fell-back' to the empty prefix. var modelName = bindingContext.IsTopLevelObject ? bindingContext.BinderModelName ?? bindingContext.FieldName : bindingContext.ModelName; await GetFormFilesAsync(modelName, bindingContext, postedFiles); // If ParameterBinder incorrectly overrode ModelName, fall back to OriginalModelName prefix. Comparisons // are tedious because e.g. top-level parameter or property is named Blah and it contains a BlahBlah // property. OriginalModelName may be null in tests. if (postedFiles.Count == 0 && bindingContext.OriginalModelName != null && !string.Equals(modelName, bindingContext.OriginalModelName, StringComparison.Ordinal) && !modelName.StartsWith(bindingContext.OriginalModelName + "[", StringComparison.Ordinal) && !modelName.StartsWith(bindingContext.OriginalModelName + ".", StringComparison.Ordinal)) { modelName = ModelNames.CreatePropertyModelName(bindingContext.OriginalModelName, modelName); await GetFormFilesAsync(modelName, bindingContext, postedFiles); } object value; if (bindingContext.ModelType == typeof(IFormFile)) { if (postedFiles.Count == 0) { // Silently fail if the named file does not exist in the request. _logger.DoneAttemptingToBindModel(bindingContext); return; } value = postedFiles.First(); } else { if (postedFiles.Count == 0 && !bindingContext.IsTopLevelObject) { // Silently fail if no files match. Will bind to an empty collection (treat empty as a success // case and not reach here) if binding to a top-level object. _logger.DoneAttemptingToBindModel(bindingContext); return; } // Perform any final type mangling needed. var modelType = bindingContext.ModelType; if (modelType == typeof(IFormFile[])) { Debug.Assert(postedFiles is List <IFormFile>); value = ((List <IFormFile>)postedFiles).ToArray(); } else if (modelType == typeof(IFormFileCollection)) { Debug.Assert(postedFiles is List <IFormFile>); value = new FileCollection((List <IFormFile>)postedFiles); } else { value = postedFiles; } } // We need to add a ValidationState entry because the modelName might be non-standard. Otherwise // the entry we create in model state might not be marked as valid. bindingContext.ValidationState.Add(value, new ValidationStateEntry() { Key = modelName, }); bindingContext.ModelState.SetModelValue( modelName, rawValue: null, attemptedValue: null); bindingContext.Result = ModelBindingResult.Success(value); _logger.DoneAttemptingToBindModel(bindingContext); }
protected override void CopyToExistingMethod() { using (FileCollection fc = new FileCollection(_selectedCopyToFilename)) { if (readFiles(fc)) { copyToSave(fc); } } }
public void ProcessMrefWithTocShouldSucceed() { var files = new FileCollection(Directory.GetCurrentDirectory()); files.Add(DocumentType.Article, new[] { "TestData/mref/CatLibrary.Cat`2.yml" }, "TestData/"); files.Add(DocumentType.Article, new[] { "TestData/mref/toc.yml" }, "TestData/"); BuildDocument(files); { var outputRawModelPath = GetRawModelFilePath("CatLibrary.Cat`2.yml"); Assert.True(File.Exists(outputRawModelPath)); var model = JsonUtility.Deserialize <ApiBuildOutput>(outputRawModelPath); Assert.NotNull(model); Assert.Equal("Hello world!", model.Metadata["meta"]); Assert.Equal(true, model.Metadata["_splitReference"]); Assert.Equal(20, model.Children.Count); } { var outputRawModelPath = GetRawModelFilePath("CatLibrary.Cat-2.-ctor.yml"); Assert.True(File.Exists(outputRawModelPath)); var model = JsonUtility.Deserialize <ApiBuildOutput>(outputRawModelPath); Assert.NotNull(model); Assert.Equal("Hello world!", model.Metadata["meta"]); Assert.Equal(true, model.Metadata["_splitReference"]); Assert.Equal(MemberType.Constructor, model.Type); Assert.Equal(3, model.Children.Count); Assert.Equal(new List <string> { "net2", "net46" }, model.Platform); } { var outputRawModelPath = GetRawModelFilePath("toc.yml"); Assert.True(File.Exists(outputRawModelPath)); var model = JsonUtility.Deserialize <TocItemViewModel>(outputRawModelPath); Assert.NotNull(model); Assert.Equal(1, model.Items.Count); Assert.Equal("CatLibrary.Cat%602.html", model.Items[0].TopicHref); Assert.Equal(13, model.Items[0].Items.Count); Assert.Equal("CatLibrary.Cat-2.op_Addition.html", model.Items[0].Items[0].TopicHref); Assert.Equal("Addition", model.Items[0].Items[0].Name); Assert.Equal("CatLibrary.Cat-2.op_Subtraction.html", model.Items[0].Items[12].TopicHref); Assert.Equal("Subtraction", model.Items[0].Items[12].Name); var ctor = model.Items[0].Items.FirstOrDefault(s => s.Name == "Cat"); Assert.NotNull(ctor); Assert.Equal("CatLibrary.Cat`2.#ctor*", ctor.TopicUid); Assert.Equal("Constructor", ctor.Metadata["type"].ToString()); Assert.Equal(new List <string> { "net2", "net46" }, JArray.FromObject(ctor.Metadata[Constants.PropertyName.Platform]).Select(s => s.ToString()).ToList()); Assert.Equal(new List <string> { "net2", "net46" }, JArray.FromObject(ctor.Metadata[Constants.MetadataName.Version]).Select(s => s.ToString()).ToList()); } { var manifestFile = Path.GetFullPath(Path.Combine(_outputFolder, "manifest.json")); var manifest = JsonUtility.Deserialize <Manifest>(manifestFile); Assert.Equal(17, manifest.Files.Count); // NOTE: split output files have the same source file path var groups = manifest.Files.GroupBy(s => s.SourceRelativePath).ToList().OrderByDescending(s => s.Count()).ToList(); Assert.Equal(1, groups.Count); } }
private bool copyToSave(FileCollection fc) { bool result = true; using (Utils.ProgressBlock progress = new Utils.ProgressBlock(this, STR_SAVING, STR_SAVINGGEOCACHES, CopyToList.Count, 0)) { byte[] memBuffer = new byte[10 * 1024 * 1024]; byte notFree = 1; byte notFreeF = 2; List<RecordInfo> freeGeocacheRecords = (from RecordInfo ri in fc._geocachesInDB.Values where ri.FreeSlot select ri).OrderByDescending(x => x.Length).ToList(); List<RecordInfo> freeLogImageRecords = (from RecordInfo ri in fc._logimgsInDB.Values where ri.FreeSlot select ri).OrderByDescending(x => x.Length).ToList(); List<RecordInfo> freeLogRecords = (from RecordInfo ri in fc._logsInDB.Values where ri.FreeSlot select ri).OrderByDescending(x => x.Length).ToList(); List<RecordInfo> freeWaypointRecords = (from RecordInfo ri in fc._wptsInDB.Values where ri.FreeSlot select ri).OrderByDescending(x => x.Length).ToList(); List<RecordInfo> freeUserWaypointRecords = (from RecordInfo ri in fc._usrwptsInDB.Values where ri.FreeSlot select ri).OrderByDescending(x => x.Length).ToList(); List<RecordInfo> freeGeocacheImageRecords = (from RecordInfo ri in fc._geocacheimgsInDB.Values where ri.FreeSlot select ri).OrderByDescending(x => x.Length).ToList(); using (MemoryStream ms = new MemoryStream(memBuffer)) using (BinaryWriter bw = new BinaryWriter(ms)) using (FileStream fsLogImages = File.Open(fc.LogImagesFilename, FileMode.OpenOrCreate, FileAccess.Write)) using (FileStream fsWaypoints = File.Open(fc.WaypointsFilename, FileMode.OpenOrCreate, FileAccess.Write)) using (FileStream fsUserWaypoints = File.Open(fc.UserWaypointsFilename, FileMode.OpenOrCreate, FileAccess.Write)) using (FileStream fsGeocacheImages = File.Open(fc.GeocacheImagesFilename, FileMode.OpenOrCreate, FileAccess.Write)) { //********************************************** // GEOCACHES //********************************************** long recordLength = 0; byte[] extraBuffer = new byte[200]; int index = 0; int procStep = 0; foreach (Framework.Data.Geocache gc in CopyToList) { //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFree); bw.Write(gc.Code); bw.Write(gc.Archived); WriteIntegerArray(bw, gc.AttributeIds); bw.Write(gc.Available); bw.Write(gc.City ?? ""); bw.Write(gc.Container.ID); bw.Write(gc.CustomCoords); bw.Write(gc.Country ?? ""); bw.Write(gc.ContainsCustomLatLon); if (gc.ContainsCustomLatLon) { bw.Write((double)gc.CustomLat); bw.Write((double)gc.CustomLon); } bw.Write(gc.Difficulty); bw.Write(gc.EncodedHints ?? ""); bw.Write(gc.Favorites); bw.Write(gc.Flagged); bw.Write(gc.Found); bw.Write(gc.GeocacheType.ID); bw.Write(gc.ID ?? ""); bw.Write(gc.Lat); bw.Write(gc.Lon); bw.Write(gc.MemberOnly); bw.Write(gc.Municipality ?? ""); bw.Write(gc.Name ?? ""); bw.Write(gc.Notes ?? ""); bw.Write(gc.Owner ?? ""); bw.Write(gc.OwnerId ?? ""); bw.Write(gc.PersonaleNote ?? ""); bw.Write(gc.PlacedBy ?? ""); bw.Write(((DateTime)gc.PublishedTime).ToString("s")); bw.Write(gc.State ?? ""); bw.Write(gc.Terrain); bw.Write(gc.Title ?? ""); bw.Write(gc.Url ?? ""); bw.Write(gc.DataFromDate.ToString("s")); bw.Write(gc.Locked); writeRecord(fc._geocachesInDB, gc.Code, ms, bw, fc._fsGeocaches, memBuffer, extraBuffer, freeGeocacheRecords); //other record string id = string.Concat("F_", gc.Code); //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFreeF); bw.Write(id); bw.Write(gc.ShortDescription ?? ""); bw.Write(gc.ShortDescriptionInHtml); bw.Write(gc.LongDescription ?? ""); bw.Write(gc.LongDescriptionInHtml); writeRecord(fc._geocachesInDB, id, ms, bw, fc._fsGeocaches, memBuffer, extraBuffer, freeGeocacheRecords); List<Framework.Data.Log> lglist = Utils.DataAccess.GetLogs(Core.Logs, gc.Code); if (lglist.Count > 0) { recordLength = 0; extraBuffer = new byte[50]; foreach (Framework.Data.Log l in lglist) { //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFree); bw.Write(l.ID); bw.Write(l.DataFromDate.ToString("s")); bw.Write(l.Date.ToString("s")); bw.Write(l.Finder ?? ""); bw.Write(l.GeocacheCode ?? ""); bw.Write(l.ID); bw.Write(l.LogType.ID); writeRecord(fc._logsInDB, l.ID, ms, bw, fc._fsLogs, memBuffer, extraBuffer, freeLogRecords); id = string.Concat("F_", l.ID); //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFreeF); bw.Write(id); bw.Write(l.TBCode ?? ""); bw.Write(l.FinderId ?? ""); bw.Write(l.Text ?? ""); bw.Write(l.Encoded); writeRecord(fc._logsInDB, id, ms, bw, fc._fsLogs, memBuffer, extraBuffer, freeLogRecords); List<Framework.Data.LogImage> lgimglist = Utils.DataAccess.GetLogImages(Core.LogImages, l.ID); if (lgimglist.Count > 0) { recordLength = 0; extraBuffer = new byte[10]; foreach (Framework.Data.LogImage li in lgimglist) { //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFree); bw.Write(li.ID); bw.Write(li.DataFromDate.ToString("s")); bw.Write(li.LogID ?? ""); bw.Write(li.Name ?? ""); bw.Write(li.Url ?? ""); writeRecord(fc._logimgsInDB, li.ID, ms, bw, fsLogImages, memBuffer, extraBuffer, freeLogImageRecords); } } } } List<Framework.Data.Waypoint> wptlist = Utils.DataAccess.GetWaypointsFromGeocache(Core.Waypoints, gc.Code); if (wptlist.Count > 0) { recordLength = 0; extraBuffer = new byte[10]; foreach (Framework.Data.Waypoint wp in wptlist) { //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFree); bw.Write(wp.Code); bw.Write(wp.Comment ?? ""); bw.Write(wp.DataFromDate.ToString("s")); bw.Write(wp.Description ?? ""); bw.Write(wp.GeocacheCode ?? ""); bw.Write(wp.ID ?? ""); if (wp.Lat == null || wp.Lon == null) { bw.Write(false); } else { bw.Write(true); bw.Write((double)wp.Lat); bw.Write((double)wp.Lon); } bw.Write(wp.Name ?? ""); bw.Write(wp.Time.ToString("s")); bw.Write(wp.Url ?? ""); bw.Write(wp.UrlName ?? ""); bw.Write(wp.WPType.ID); writeRecord(fc._wptsInDB, wp.Code, ms, bw, fsWaypoints, memBuffer, extraBuffer, freeWaypointRecords); } } List<Framework.Data.UserWaypoint> usrwptlist = Utils.DataAccess.GetUserWaypointsFromGeocache(Core.UserWaypoints, gc.Code); if (usrwptlist.Count > 0) { recordLength = 0; extraBuffer = new byte[10]; foreach (Framework.Data.UserWaypoint wp in usrwptlist) { //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFree); bw.Write(wp.ID.ToString()); bw.Write(wp.Description ?? ""); bw.Write(wp.GeocacheCode ?? ""); bw.Write(wp.Lat); bw.Write(wp.Lon); bw.Write(wp.Date.ToString("s")); writeRecord(fc._usrwptsInDB, wp.ID.ToString(), ms, bw, fsUserWaypoints, memBuffer, extraBuffer, freeUserWaypointRecords); } } List<Framework.Data.GeocacheImage> geocacheimglist = Utils.DataAccess.GetGeocacheImages(Core.GeocacheImages, gc.Code); if (geocacheimglist.Count > 0) { recordLength = 0; extraBuffer = new byte[100]; foreach (Framework.Data.GeocacheImage li in geocacheimglist) { //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFree); bw.Write(li.ID); bw.Write(li.DataFromDate.ToString("s")); bw.Write(li.GeocacheCode ?? ""); bw.Write(li.Description ?? ""); bw.Write(li.Name ?? ""); bw.Write(li.Url ?? ""); bw.Write(li.MobileUrl ?? ""); bw.Write(li.ThumbUrl ?? ""); writeRecord(fc._geocacheimgsInDB, li.ID.ToString(), ms, bw, fsGeocacheImages, memBuffer, extraBuffer, freeGeocacheImageRecords); } } index++; procStep++; if (procStep >= 1000) { progress.UpdateProgress(STR_SAVING, STR_SAVINGGEOCACHES, CopyToList.Count, index); procStep = 0; } } } } //********************************************** //fc.DatabaseInfoFilename //********************************************** XmlDocument doc = new XmlDocument(); XmlElement root = doc.CreateElement("info"); doc.AppendChild(root); XmlElement el = doc.CreateElement("IsLittleEndian"); XmlText txt = doc.CreateTextNode(BitConverter.IsLittleEndian.ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("GAPPVersion"); txt = doc.CreateTextNode(Core.Version.ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("StorageVersion"); txt = doc.CreateTextNode("1"); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("GeocacheCount"); txt = doc.CreateTextNode((from RecordInfo ri in fc._geocachesInDB.Values where !ri.FreeSlot && !ri.ID.StartsWith("F_") select ri).Count().ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("LogCount"); txt = doc.CreateTextNode((from RecordInfo ri in fc._logsInDB.Values where !ri.FreeSlot && !ri.ID.StartsWith("F_") select ri).Count().ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("LogImagesCount"); txt = doc.CreateTextNode((from RecordInfo ri in fc._logimgsInDB.Values where !ri.FreeSlot select ri).Count().ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("WaypointCount"); txt = doc.CreateTextNode((from RecordInfo ri in fc._wptsInDB.Values where !ri.FreeSlot select ri).Count().ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("UserWaypointCount"); txt = doc.CreateTextNode((from RecordInfo ri in fc._usrwptsInDB.Values where !ri.FreeSlot select ri).Count().ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("GeocacheImagesCount"); txt = doc.CreateTextNode((from RecordInfo ri in fc._geocacheimgsInDB.Values where !ri.FreeSlot select ri).Count().ToString()); el.AppendChild(txt); root.AppendChild(el); doc.Save(fc.DatabaseInfoFilename); return result; }
public TagResolver(ILogger log, FileCollection allFiles) : base(log: log, allFiles: allFiles) { m_log = log; }
/// <summary> /// 获得分页列表,无论是否是缓存实体都从数据库直接拿取数据 /// </summary> /// <param name="pPageIndex">页数</param> /// <param name="pPageSize">每页列表</param> /// <param name="pOrderBy">排序</param> /// <param name="pSortExpression">排序字段</param> /// <param name="pRecordCount">列表行数</param> /// <returns>数据分页</returns> public static List<FileInfo> GetPagedList(int pPageIndex,int pPageSize,SortDirection pOrderBy,string pSortExpression,out int pRecordCount) { if(pPageIndex<=1) pPageIndex=1; List< FileInfo> list = new List< FileInfo>(); Query q = File .CreateQuery(); q.PageIndex = pPageIndex; q.PageSize = pPageSize; q.ORDER_BY(pSortExpression,pOrderBy.ToString()); FileCollection collection=new FileCollection(); collection.LoadAndCloseReader(q.ExecuteReader()); foreach (File file in collection) { FileInfo fileInfo = new FileInfo(); LoadFromDAL(fileInfo, file); list.Add(fileInfo); } pRecordCount=q.GetRecordCount(); return list; }
public Project() { Files = new FileCollection(); }
public void TestBuildConceptualWithTemplateShouldSucceed() { CreateFile("conceptual.html.js", @" exports.transform = function (model){ return JSON.stringify(model, null, ' '); }; exports.xref = null; ", _templateFolder); CreateFile("toc.tmpl.js", @" exports.getOptions = function (){ return { isShared: true }; }; ", _templateFolder); CreateFile("conceptual.html.tmpl", "{{.}}", _templateFolder); var conceptualFile = CreateFile("test.md", new[] { "---", "uid: XRef1", "---", "# Hello World", "Test link: [link text](test/test.md)", "test", }, _inputFolder); var conceptualFile2 = CreateFile("test/test.md", new[] { "---", "uid: XRef2", "---", "test", }, _inputFolder); var tocFile = CreateFile("toc.md", new[] { "#[Test](test.md)" }, _inputFolder); var tocFile2 = CreateFile("test/toc.md", new[] { "#[Test](test.md)" }, _inputFolder); FileCollection files = new FileCollection(Directory.GetCurrentDirectory()); files.Add(DocumentType.Article, new[] { conceptualFile, conceptualFile2, tocFile, tocFile2 }); BuildDocument( files, new Dictionary <string, object> { ["meta"] = "Hello world!", }, templateFolder: _templateFolder); { // check toc. Assert.True(File.Exists(Path.Combine(_outputFolder, Path.ChangeExtension(tocFile, RawModelFileExtension)))); var model = JsonUtility.Deserialize <Dictionary <string, object> >(Path.Combine(_outputFolder, Path.ChangeExtension(tocFile, RawModelFileExtension))); var expected = new Dictionary <string, object> { ["_lang"] = "csharp", ["_tocPath"] = $"{_inputFolder}/toc", ["_rel"] = "../", ["_path"] = $"{_inputFolder}/toc", ["_key"] = $"{_inputFolder}/toc.md", ["_tocRel"] = "toc", ["_tocKey"] = $"~/{_inputFolder}/toc.md", ["items"] = new object[] { new { name = "Test", href = "test.html", topicHref = "test.html" } }, ["__global"] = new { _shared = new Dictionary <string, object> { [$"~/{_inputFolder}/toc.md"] = new Dictionary <string, object> { ["_lang"] = "csharp", ["_tocPath"] = $"{_inputFolder}/toc", ["_rel"] = "../", ["_path"] = $"{_inputFolder}/toc", ["_key"] = $"{_inputFolder}/toc.md", ["_tocRel"] = "toc", ["_tocKey"] = $"~/{_inputFolder}/toc.md", ["items"] = new object[] { new { name = "Test", href = "test.html", topicHref = "test.html" } }, }, [$"~/{_inputFolder}/test/toc.md"] = new Dictionary <string, object> { ["_lang"] = "csharp", ["_tocPath"] = $"{_inputFolder}/test/toc", ["_rel"] = "../../", ["_path"] = $"{_inputFolder}/test/toc", ["_key"] = $"{_inputFolder}/test/toc.md", ["_tocRel"] = "toc", ["_tocKey"] = $"~/{_inputFolder}/test/toc.md", ["items"] = new object[] { new { name = "Test", href = "test.html", topicHref = "test.html" } }, } } } }; AssertMetadataEqual(expected, model); } { // check conceptual. var conceptualOutputPath = Path.Combine(_outputFolder, Path.ChangeExtension(conceptualFile, ".html")); Assert.True(File.Exists(conceptualOutputPath)); Assert.True(File.Exists(Path.Combine(_outputFolder, Path.ChangeExtension(conceptualFile, RawModelFileExtension)))); var model = JsonUtility.Deserialize <Dictionary <string, object> >(Path.Combine(_outputFolder, Path.ChangeExtension(conceptualFile, RawModelFileExtension))); var expected = new Dictionary <string, object> { ["_lang"] = "csharp", ["_tocPath"] = $"{_inputFolder}/toc", ["_rel"] = "../", ["_path"] = $"{_inputFolder}/test.html", ["_key"] = $"{_inputFolder}/test.md", ["_tocRel"] = "toc", ["_tocKey"] = $"~/{_inputFolder}/toc.md", ["_systemKeys"] = new[] { "conceptual", "type", "source", "path", "documentation", "title", "rawTitle", "wordCount" }, ["conceptual"] = $"\n<p sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"5\" sourceendlinenumber=\"6\">Test link: <a href=\"~/{_inputFolder}/test/test.md\" data-raw-source=\"[link text](test/test.md)\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"5\" sourceendlinenumber=\"5\">link text</a>\ntest</p>\n", ["type"] = "Conceptual", ["source"] = model["source"], // reuse model's source, not testing this ["documentation"] = model["source"], ["path"] = $"{_inputFolder}/test.md", ["meta"] = "Hello world!", ["title"] = "Hello World", ["rawTitle"] = $"<h1 id=\"hello-world\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"4\" sourceendlinenumber=\"4\">Hello World</h1>", ["uid"] = "XRef1", ["wordCount"] = 5, ["__global"] = new { _shared = new Dictionary <string, object> { [$"~/{_inputFolder}/toc.md"] = new Dictionary <string, object> { ["_lang"] = "csharp", ["_tocPath"] = $"{_inputFolder}/toc", ["_rel"] = "../", ["_path"] = $"{_inputFolder}/toc", ["_key"] = $"{_inputFolder}/toc.md", ["_tocRel"] = "toc", ["_tocKey"] = $"~/{_inputFolder}/toc.md", ["items"] = new object[] { new { name = "Test", href = "test.html", topicHref = "test.html" } }, }, [$"~/{_inputFolder}/test/toc.md"] = new Dictionary <string, object> { ["_lang"] = "csharp", ["_tocPath"] = $"{_inputFolder}/test/toc", ["_rel"] = "../../", ["_path"] = $"{_inputFolder}/test/toc", ["_key"] = $"{_inputFolder}/test/toc.md", ["_tocRel"] = "toc", ["_tocKey"] = $"~/{_inputFolder}/test/toc.md", ["items"] = new object[] { new { name = "Test", href = "test.html", topicHref = "test.html" } }, } } } }; AssertMetadataEqual(expected, model); } }
public bool Save(FileCollection fc, bool forceFullData) { bool result = true; using (Utils.ProgressBlock fixpr = new Utils.ProgressBlock(this, STR_SAVING, STR_SAVINGDATA, 1, 0)) { byte[] memBuffer = new byte[10 * 1024 * 1024]; byte isFree = 0; byte notFree = 1; byte notFreeF = 2; using (MemoryStream ms = new MemoryStream(memBuffer)) using (BinaryWriter bw = new BinaryWriter(ms)) { //********************************************** //fc.DatabaseInfoFilename //********************************************** XmlDocument doc = new XmlDocument(); XmlElement root = doc.CreateElement("info"); doc.AppendChild(root); XmlElement el = doc.CreateElement("IsLittleEndian"); XmlText txt = doc.CreateTextNode(BitConverter.IsLittleEndian.ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("GAPPVersion"); txt = doc.CreateTextNode(Core.Version.ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("StorageVersion"); txt = doc.CreateTextNode("1"); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("GeocacheCount"); txt = doc.CreateTextNode(Core.Geocaches.Count.ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("LogCount"); txt = doc.CreateTextNode(Core.Logs.Count.ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("LogImagesCount"); txt = doc.CreateTextNode(Core.LogImages.Count.ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("WaypointCount"); txt = doc.CreateTextNode(Core.Waypoints.Count.ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("UserWaypointCount"); txt = doc.CreateTextNode(Core.UserWaypoints.Count.ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("GeocacheImagesCount"); txt = doc.CreateTextNode(Core.GeocacheImages.Count.ToString()); el.AppendChild(txt); root.AppendChild(el); doc.Save(fc.DatabaseInfoFilename); //********************************************** //********************************************** //********************************************** // GEOCACHES //********************************************** //delete geocaches that are not in the list anymore. List<RecordInfo> deletedRecords = (from RecordInfo ri in fc._geocachesInDB.Values where !ri.FreeSlot && ri.ID[0] != 'F' && Core.Geocaches.GetGeocache(ri.ID) == null select ri).ToList(); List<RecordInfo> deletedFRecords = new List<RecordInfo>(); foreach (RecordInfo ri in deletedRecords) { string id = ri.ID; //mark current record as free (change id) fc._geocachesInDB.Remove(ri.ID); ri.ID = string.Concat("_", ri.ID); ri.FreeSlot = true; fc._geocachesInDB.Add(ri.ID, ri); //scratch file to mark it as free fc._fsGeocaches.Position = ri.Offset + sizeof(long); fc._fsGeocaches.WriteByte(isFree); //get the F_ record too RecordInfo fri = fc._geocachesInDB[string.Concat("F_", id)] as RecordInfo; if (fri != null && !fri.FreeSlot) { //mark current record as free (change id) fc._geocachesInDB.Remove(fri.ID); fri.ID = string.Concat("_", fri.ID); fri.FreeSlot = true; fc._geocachesInDB.Add(fri.ID, fri); //scratch file to mark it as free fc._fsGeocaches.Position = fri.Offset + sizeof(long); fc._fsGeocaches.WriteByte(isFree); deletedFRecords.Add(fri); } } deletedRecords.AddRange(deletedFRecords); //now get all the selected and data changed geocaches List<Framework.Data.Geocache> gclist = (from Framework.Data.Geocache wp in Core.Geocaches where !wp.Saved select wp).ToList(); if (gclist.Count > 0) { using (Utils.ProgressBlock progress = new Utils.ProgressBlock(this, STR_SAVING, STR_SAVINGGEOCACHES, gclist.Count, 0)) { //fix block > ID = GC12345 //fulldata > ID = F_GC12345 long recordLength = 0; byte[] extraBuffer = new byte[200]; List<RecordInfo> freeRecords = (from RecordInfo ri in fc._geocachesInDB.Values where ri.FreeSlot select ri).OrderByDescending(x=>x.Length).ToList(); int index = 0; int procStep = 0; foreach (Framework.Data.Geocache gc in gclist) { //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFree); bw.Write(gc.Code); bw.Write(gc.Archived); WriteIntegerArray(bw, gc.AttributeIds); bw.Write(gc.Available); bw.Write(gc.City ?? ""); bw.Write(gc.Container.ID); bw.Write(gc.CustomCoords); bw.Write(gc.Country ?? ""); bw.Write(gc.ContainsCustomLatLon); if (gc.ContainsCustomLatLon) { bw.Write((double)gc.CustomLat); bw.Write((double)gc.CustomLon); } bw.Write(gc.Difficulty); bw.Write(gc.EncodedHints ?? ""); bw.Write(gc.Favorites); bw.Write(gc.Flagged); bw.Write(gc.Found); bw.Write(gc.GeocacheType.ID); bw.Write(gc.ID ?? ""); bw.Write(gc.Lat); bw.Write(gc.Lon); bw.Write(gc.MemberOnly); bw.Write(gc.Municipality ?? ""); bw.Write(gc.Name ?? ""); bw.Write(gc.Notes ?? ""); bw.Write(gc.Owner ?? ""); bw.Write(gc.OwnerId ?? ""); bw.Write(gc.PersonaleNote ?? ""); bw.Write(gc.PlacedBy ?? ""); bw.Write(((DateTime)gc.PublishedTime).ToString("s")); bw.Write(gc.State ?? ""); bw.Write(gc.Terrain); bw.Write(gc.Title ?? ""); bw.Write(gc.Url ?? ""); bw.Write(gc.DataFromDate.ToString("s")); bw.Write(gc.Locked); writeRecord(fc._geocachesInDB, gc.Code, ms, bw, fc._fsGeocaches, memBuffer, extraBuffer, freeRecords); //other record if (forceFullData || gc.FullDataLoaded) { string id = string.Concat("F_", gc.Code); //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFreeF); bw.Write(id); bw.Write(gc.ShortDescription ?? ""); bw.Write(gc.ShortDescriptionInHtml); bw.Write(gc.LongDescription ?? ""); bw.Write(gc.LongDescriptionInHtml); writeRecord(fc._geocachesInDB, id, ms, bw, fc._fsGeocaches, memBuffer, extraBuffer, freeRecords); } gc.Saved = true; index++; procStep++; if (procStep >= 1000) { progress.UpdateProgress(STR_SAVING, STR_SAVINGGEOCACHES, gclist.Count, index); procStep = 0; } } } fc._fsGeocaches.Flush(); } //********************************************** // LOGS //********************************************** //delete geocaches that are not in the list anymore. deletedRecords = (from RecordInfo ri in fc._logsInDB.Values where !ri.FreeSlot && ri.ID[0] != 'F' && Core.Logs.GetLog(ri.ID) == null select ri).ToList(); deletedFRecords.Clear(); foreach (RecordInfo ri in deletedRecords) { string id = ri.ID; //mark current record as free (change id) fc._logsInDB.Remove(ri.ID); ri.ID = string.Concat("_", ri.ID); ri.FreeSlot = true; fc._logsInDB.Add(ri.ID, ri); //scratch file to mark it as free fc._fsLogs.Position = ri.Offset + sizeof(long); fc._fsLogs.WriteByte(isFree); //get the F_ record too RecordInfo fri = fc._logsInDB[string.Concat("F_", id)] as RecordInfo; if (fri != null && !fri.FreeSlot) { //mark current record as free (change id) fc._logsInDB.Remove(fri.ID); fri.ID = string.Concat("_", fri.ID); fri.FreeSlot = true; fc._logsInDB.Add(fri.ID, fri); //scratch file to mark it as free fc._fsLogs.Position = fri.Offset + sizeof(long); fc._fsLogs.WriteByte(isFree); deletedFRecords.Add(fri); } } deletedRecords.AddRange(deletedFRecords); List<Framework.Data.Log> lglist = (from Framework.Data.Log wp in Core.Logs where !wp.Saved select wp).ToList(); if (lglist.Count > 0) { List<RecordInfo> freeRecords = (from RecordInfo ri in fc._logsInDB.Values where ri.FreeSlot select ri).OrderByDescending(x => x.Length).ToList(); int index = 0; int procStep = 0; using (Utils.ProgressBlock progress = new ProgressBlock(this, STR_SAVING, STR_SAVINGLOGS, lglist.Count, 0)) { long recordLength = 0; byte[] extraBuffer = new byte[50]; foreach (Framework.Data.Log l in lglist) { //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFree); bw.Write(l.ID); bw.Write(l.DataFromDate.ToString("s")); bw.Write(l.Date.ToString("s")); bw.Write(l.Finder ?? ""); bw.Write(l.GeocacheCode ?? ""); bw.Write(l.ID); bw.Write(l.LogType.ID); writeRecord(fc._logsInDB, l.ID, ms, bw, fc._fsLogs, memBuffer, extraBuffer, freeRecords); if (forceFullData || l.FullDataLoaded) { string id = string.Concat("F_", l.ID); //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFreeF); bw.Write(id); bw.Write(l.TBCode ?? ""); bw.Write(l.FinderId ?? ""); bw.Write(l.Text ?? ""); bw.Write(l.Encoded); writeRecord(fc._logsInDB, id, ms, bw, fc._fsLogs, memBuffer, extraBuffer, freeRecords); } l.Saved = true; index++; procStep++; if (procStep >= 1000) { progress.UpdateProgress(STR_SAVING, STR_SAVINGLOGS, lglist.Count, index); procStep = 0; } } } fc._fsLogs.Flush(); } //********************************************** // WAYPOINTS //********************************************** using (FileStream fs = File.Open(fc.WaypointsFilename, FileMode.OpenOrCreate, FileAccess.Write)) { //delete geocaches that are not in the list anymore. deletedRecords = (from RecordInfo ri in fc._wptsInDB.Values where !ri.FreeSlot && Core.Waypoints.getWaypoint(ri.ID) == null select ri).ToList(); foreach (RecordInfo ri in deletedRecords) { //mark current record as free (change id) fc._wptsInDB.Remove(ri.ID); ri.ID = string.Concat("_", ri.ID); ri.FreeSlot = true; fc._wptsInDB.Add(ri.ID, ri); //scratch file to mark it as free fs.Position = ri.Offset + sizeof(long); fs.WriteByte(isFree); } List<Framework.Data.Waypoint> wptlist = (from Framework.Data.Waypoint wp in Core.Waypoints where !wp.Saved select wp).ToList(); if (wptlist.Count > 0) { List<RecordInfo> freeRecords = (from RecordInfo ri in fc._wptsInDB.Values where ri.FreeSlot select ri).OrderByDescending(x => x.Length).ToList(); int index = 0; int procStep = 0; using (Utils.ProgressBlock progress = new ProgressBlock(this, STR_SAVING, STR_SAVINGWAYPOINTS, wptlist.Count, 0)) { long recordLength = 0; byte[] extraBuffer = new byte[10]; foreach (Framework.Data.Waypoint wp in wptlist) { //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFree); bw.Write(wp.Code); bw.Write(wp.Comment ?? ""); bw.Write(wp.DataFromDate.ToString("s")); bw.Write(wp.Description ?? ""); bw.Write(wp.GeocacheCode ?? ""); bw.Write(wp.ID ?? ""); if (wp.Lat == null || wp.Lon == null) { bw.Write(false); } else { bw.Write(true); bw.Write((double)wp.Lat); bw.Write((double)wp.Lon); } bw.Write(wp.Name ?? ""); bw.Write(wp.Time.ToString("s")); bw.Write(wp.Url ?? ""); bw.Write(wp.UrlName ?? ""); bw.Write(wp.WPType.ID); writeRecord(fc._wptsInDB, wp.Code, ms, bw, fs, memBuffer, extraBuffer, freeRecords); wp.Saved = true; index++; procStep++; if (procStep >= 1000) { progress.UpdateProgress(STR_SAVING, STR_SAVINGWAYPOINTS, lglist.Count, index); procStep = 0; } } } } fs.Flush(); } //********************************************** // LOGIMAGES //********************************************** using (FileStream fs = File.Open(fc.LogImagesFilename, FileMode.OpenOrCreate, FileAccess.Write)) { //delete geocaches that are not in the list anymore. deletedRecords = (from RecordInfo ri in fc._logimgsInDB.Values where !ri.FreeSlot && Core.LogImages.GetLogImage(ri.ID) == null select ri).ToList(); foreach (RecordInfo ri in deletedRecords) { //mark current record as free (change id) fc._logimgsInDB.Remove(ri.ID); ri.ID = string.Concat("_", ri.ID); ri.FreeSlot = true; fc._logimgsInDB.Add(ri.ID, ri); //scratch file to mark it as free fs.Position = ri.Offset + sizeof(long); fs.WriteByte(isFree); } List<Framework.Data.LogImage> lgimglist = (from Framework.Data.LogImage wp in Core.LogImages where !wp.Saved select wp).ToList(); if (lgimglist.Count > 0) { List<RecordInfo> freeRecords = (from RecordInfo ri in fc._logimgsInDB.Values where ri.FreeSlot select ri).OrderByDescending(x => x.Length).ToList(); int index = 0; int procStep = 0; using (Utils.ProgressBlock progress = new ProgressBlock(this, STR_SAVING, STR_SAVINGLOGIMAGES, lgimglist.Count, 0)) { long recordLength = 0; byte[] extraBuffer = new byte[10]; foreach (Framework.Data.LogImage li in lgimglist) { //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFree); bw.Write(li.ID); bw.Write(li.DataFromDate.ToString("s")); bw.Write(li.LogID ?? ""); bw.Write(li.Name ?? ""); bw.Write(li.Url ?? ""); writeRecord(fc._logimgsInDB, li.ID, ms, bw, fs, memBuffer, extraBuffer, freeRecords); li.Saved = true; index++; procStep++; if (procStep >= 1000) { progress.UpdateProgress(STR_SAVING, STR_SAVINGLOGIMAGES, lgimglist.Count, index); procStep = 0; } } } } fs.Flush(); } //********************************************** // GEOCACHEIMAGES //********************************************** using (FileStream fs = File.Open(fc.GeocacheImagesFilename, FileMode.OpenOrCreate, FileAccess.Write)) { //delete geocaches that are not in the list anymore. deletedRecords = (from RecordInfo ri in fc._geocacheimgsInDB.Values where !ri.FreeSlot && Core.GeocacheImages.GetGeocacheImage(ri.ID) == null select ri).ToList(); foreach (RecordInfo ri in deletedRecords) { //mark current record as free (change id) fc._geocacheimgsInDB.Remove(ri.ID); ri.ID = string.Concat("_", ri.ID); ri.FreeSlot = true; fc._geocacheimgsInDB.Add(ri.ID, ri); //scratch file to mark it as free fs.Position = ri.Offset + sizeof(long); fs.WriteByte(isFree); } List<Framework.Data.GeocacheImage> lgimglist = (from Framework.Data.GeocacheImage wp in Core.GeocacheImages where !wp.Saved select wp).ToList(); if (lgimglist.Count > 0) { List<RecordInfo> freeRecords = (from RecordInfo ri in fc._geocacheimgsInDB.Values where ri.FreeSlot select ri).OrderByDescending(x => x.Length).ToList(); int index = 0; int procStep = 0; using (Utils.ProgressBlock progress = new ProgressBlock(this, STR_SAVING, STR_SAVINGGEOCACHEIMAGES, lgimglist.Count, 0)) { long recordLength = 0; byte[] extraBuffer = new byte[100]; foreach (Framework.Data.GeocacheImage li in lgimglist) { //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFree); bw.Write(li.ID); bw.Write(li.DataFromDate.ToString("s")); bw.Write(li.GeocacheCode ?? ""); bw.Write(li.Description ?? ""); bw.Write(li.Name ?? ""); bw.Write(li.Url ?? ""); bw.Write(li.MobileUrl ?? ""); bw.Write(li.ThumbUrl ?? ""); writeRecord(fc._geocacheimgsInDB, li.ID, ms, bw, fs, memBuffer, extraBuffer, freeRecords); li.Saved = true; index++; procStep++; if (procStep >= 1000) { progress.UpdateProgress(STR_SAVING, STR_SAVINGGEOCACHEIMAGES, lgimglist.Count, index); procStep = 0; } } } } fs.Flush(); } //********************************************** // USER WAYPOINTS //********************************************** using (FileStream fs = File.Open(fc.UserWaypointsFilename, FileMode.OpenOrCreate, FileAccess.Write)) { //delete geocaches that are not in the list anymore. deletedRecords = (from RecordInfo ri in fc._usrwptsInDB.Values where !ri.FreeSlot && Core.UserWaypoints.getWaypoint(int.Parse(ri.ID)) == null select ri).ToList(); foreach (RecordInfo ri in deletedRecords) { //mark current record as free (change id) fc._usrwptsInDB.Remove(ri.ID); ri.ID = string.Concat("_", ri.ID); ri.FreeSlot = true; fc._usrwptsInDB.Add(ri.ID, ri); //scratch file to mark it as free fs.Position = ri.Offset + sizeof(long); fs.WriteByte(isFree); } List<Framework.Data.UserWaypoint> usrwptlist = (from Framework.Data.UserWaypoint wp in Core.UserWaypoints where !wp.Saved select wp).ToList(); if (usrwptlist.Count > 0) { List<RecordInfo> freeRecords = (from RecordInfo ri in fc._usrwptsInDB.Values where ri.FreeSlot select ri).OrderByDescending(x => x.Length).ToList(); long recordLength = 0; byte[] extraBuffer = new byte[10]; foreach (Framework.Data.UserWaypoint wp in usrwptlist) { //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFree); bw.Write(wp.ID.ToString()); bw.Write(wp.Description ?? ""); bw.Write(wp.GeocacheCode ?? ""); bw.Write(wp.Lat); bw.Write(wp.Lon); bw.Write(wp.Date.ToString("s")); writeRecord(fc._usrwptsInDB, wp.ID.ToString(), ms, bw, fs, memBuffer, extraBuffer, freeRecords); wp.Saved = true; } } fs.Flush(); } } } return result; }
private List <Pollux.Entities.DocumentoItem> BuscarArquivosSharePoint(string urlSite, string urlFolderDetail, DateTime?DataCriacao, DateTime?DataInicial, DateTime?DataFinal) { List <Pollux.Entities.DocumentoItem> listaDocs = new List <Pollux.Entities.DocumentoItem>(); using (ClientContext spClientContext = new ClientContext(urlSite)) { spClientContext.Credentials = new NetworkCredential(usuarioSharePoint, senhaSharePoint, domain); var rootWeb = spClientContext.Web; Folder pastaPrincipal = rootWeb.GetFolderByServerRelativeUrl(urlFolderDetail); spClientContext.Load(pastaPrincipal, fs => fs.Files, p => p.Folders); spClientContext.ExecuteQuery(); FolderCollection folderCollection = pastaPrincipal.Folders; FileCollection fileCollection = pastaPrincipal.Files; foreach (var arquivo in fileCollection) { if (DataCriacao.HasValue) { if (arquivo.TimeCreated.Date == DataCriacao) { listaDocs.Add(MontarDocumento(arquivo, urlSite)); } } else if (DataInicial.HasValue && DataFinal.HasValue) { if ((arquivo.TimeCreated.Date >= DataInicial) && (arquivo.TimeCreated.Date <= DataFinal)) { listaDocs.Add(MontarDocumento(arquivo, urlSite)); } } else { listaDocs.Add(MontarDocumento(arquivo, urlSite)); } } } using (ClientContext spClientContext = new ClientContext(urlSite)) { spClientContext.Credentials = new NetworkCredential(usuarioSharePoint, senhaSharePoint, domain); var rootweb = spClientContext.Web; FolderCollection folderCollection = rootweb.GetFolderByServerRelativeUrl(urlFolderDetail).Folders; spClientContext.Load(folderCollection, fs => fs.Include(f => f.ListItemAllFields)); spClientContext.ExecuteQuery(); foreach (Folder folder in folderCollection) { var item = folder.ListItemAllFields; var datacriacao = (DateTime)item["Created"]; var nomedapasta = (string)item["Title"]; var urlrelativa = (string)item["FileRef"]; if (DataCriacao.HasValue) { if (datacriacao.Date == DataCriacao) { listaDocs.Add(MontarDocumentoPasta(datacriacao, nomedapasta, urlrelativa, urlSite)); } } else if (DataInicial.HasValue && DataFinal.HasValue) { if ((datacriacao.Date >= DataInicial) && (datacriacao <= DataFinal)) { listaDocs.Add(MontarDocumentoPasta(datacriacao, nomedapasta, urlrelativa, urlSite)); } } else { listaDocs.Add(MontarDocumentoPasta(datacriacao, nomedapasta, urlrelativa, urlSite)); } } } return(listaDocs); }
public override bool PrepareNew() { bool result = false; using (System.Windows.Forms.SaveFileDialog dlg = new System.Windows.Forms.SaveFileDialog()) { dlg.InitialDirectory = System.IO.Path.GetDirectoryName(PluginSettings.Instance.ActiveDataFile); dlg.Filter = "*.gpp|*.gpp"; dlg.FileName = ""; if (dlg.ShowDialog() == System.Windows.Forms.DialogResult.OK) { try { if (System.IO.File.Exists(dlg.FileName)) { System.IO.File.Delete(dlg.FileName); } string fn = FileCollection.getFilename(dlg.FileName, EXT_GEOCACHES); if (System.IO.File.Exists(fn)) { System.IO.File.Delete(fn); } fn = FileCollection.getFilename(dlg.FileName, EXT_LOGIMAGES); if (System.IO.File.Exists(fn)) { System.IO.File.Delete(fn); } fn = FileCollection.getFilename(dlg.FileName, EXT_GEOCACHEIMAGES); if (System.IO.File.Exists(fn)) { System.IO.File.Delete(fn); } fn = FileCollection.getFilename(dlg.FileName, EXT_LOGS); if (System.IO.File.Exists(fn)) { System.IO.File.Delete(fn); } fn = FileCollection.getFilename(dlg.FileName, EXT_USERWAYPOINTS); if (System.IO.File.Exists(fn)) { System.IO.File.Delete(fn); } fn = FileCollection.getFilename(dlg.FileName, EXT_WAYPPOINTS); if (System.IO.File.Exists(fn)) { System.IO.File.Delete(fn); } PluginSettings.Instance.ActiveDataFile = dlg.FileName; SetDataSourceName(PluginSettings.Instance.ActiveDataFile); using (FrameworkDataUpdater upd = new FrameworkDataUpdater(Core)) { Core.Geocaches.Clear(); Core.Logs.Clear(); Core.LogImages.Clear(); Core.Waypoints.Clear(); Core.UserWaypoints.Clear(); Core.GeocacheImages.Clear(); } FileCollection newFileCollection = new FileCollection(PluginSettings.Instance.ActiveDataFile); if (_fileCollection != null) { _fileCollection.Dispose(); _fileCollection = null; } _fileCollection = newFileCollection; return true; } catch { } } } return result; }
private static ITagResolver ResolveTags(IEnumerable<Commit> commits, FileCollection includedFiles) { var tagResolver = new TagResolver(m_log, includedFiles) { PartialTagThreshold = m_config.PartialTagThreshold }; // resolve tags var allTags = includedFiles.SelectMany(f => f.AllTags).Where(t => m_config.TagMatcher.Match(t)); if (!tagResolver.Resolve(allTags.Distinct(), commits)) { // ignore branchpoint tags that are unresolved var unresolvedTags = tagResolver.UnresolvedTags.OrderBy(i => i); m_log.WriteLine("Unresolved tags:"); using (m_log.Indent()) { foreach (var tag in unresolvedTags) m_log.WriteLine("{0}", tag); } throw new ImportFailedException(String.Format("Unable to resolve all tags to a single commit: {0}", unresolvedTags.StringJoin(", "))); } m_resolvedTags = tagResolver.ResolvedTags; return tagResolver; }
public void TestBuildWithInvalidPath() { #region Prepare test data var resourceFile = Path.GetFileName(typeof(DocumentBuilderTest).Assembly.Location); var resourceMetaFile = resourceFile + ".meta"; CreateFile("conceptual.html.primary.tmpl", "{{{conceptual}}}", _templateFolder); var tocFile = CreateFile("toc.md", new[] { "# [test1](test.md)", "## [test2](test/test.md)", }, _inputFolder); var conceptualFile = CreateFile("test.md", new[] { "# Hello World", "Test link: [link 1](test/test.md)", "Test link: [link 2](http://www.microsoft.com)", "Test link: [link 3](a b c.md)", "Test link: [link 4](c:\\a.md)", "Test link: [link 5](\\a.md)", "Test link: [link 6](urn:a.md)", "Test link: [link 7](bad urn:a.md)", "Test link: [link 8](test/test.md#top)", "Test link: [link 9](a.md#top)", "Test link: [link 10](#top)", }, _inputFolder); var conceptualFile2 = CreateFile("test/test.md", new[] { "# Hello World", "Test link: [link 1](../test.md)", }, _inputFolder); FileCollection files = new FileCollection(Directory.GetCurrentDirectory()); files.Add(DocumentType.Article, new[] { tocFile, conceptualFile, conceptualFile2 }); #endregion try { using (new LoggerPhaseScope(nameof(DocumentBuilderTest))) { BuildDocument( files, new Dictionary <string, object> { ["meta"] = "Hello world!", }, templateFolder: _templateFolder); } { // check toc. Assert.True(File.Exists(Path.Combine(_outputFolder, Path.ChangeExtension(tocFile, RawModelFileExtension)))); var model = JsonUtility.Deserialize <TocItemViewModel>(Path.Combine(_outputFolder, Path.ChangeExtension(tocFile, RawModelFileExtension))).Items; Assert.NotNull(model); Assert.Equal("test1", model[0].Name); Assert.Equal("test.html", model[0].Href); Assert.NotNull(model[0].Items); Assert.Equal("test2", model[0].Items[0].Name); Assert.Equal("test/test.html", model[0].Items[0].Href); } { // check conceptual. var conceptualOutputPath = Path.Combine(_outputFolder, Path.ChangeExtension(conceptualFile, ".html")); Assert.True(File.Exists(conceptualOutputPath)); Assert.True(File.Exists(Path.Combine(_outputFolder, Path.ChangeExtension(conceptualFile, RawModelFileExtension)))); var model = JsonUtility.Deserialize <Dictionary <string, object> >(Path.Combine(_outputFolder, Path.ChangeExtension(conceptualFile, RawModelFileExtension))); Assert.Equal( $"<h1 id=\"hello-world\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"1\" sourceendlinenumber=\"1\">Hello World</h1>", model["rawTitle"]); Assert.Equal( string.Join( "\n", "", $"<p sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"2\" sourceendlinenumber=\"11\">Test link: <a href=\"~/{_inputFolder}/test/test.md\" data-raw-source=\"[link 1](test/test.md)\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"2\" sourceendlinenumber=\"2\">link 1</a>", $"Test link: <a href=\"http://www.microsoft.com\" data-raw-source=\"[link 2](http://www.microsoft.com)\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"3\" sourceendlinenumber=\"3\">link 2</a>", $"Test link: <a href=\"a b c.md\" data-raw-source=\"[link 3](a b c.md)\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"4\" sourceendlinenumber=\"4\">link 3</a>", $"Test link: <a href=\"c:\\a.md\" data-raw-source=\"[link 4](c:\\a.md)\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"5\" sourceendlinenumber=\"5\">link 4</a>", $"Test link: <a href=\"\\a.md\" data-raw-source=\"[link 5](\\a.md)\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"6\" sourceendlinenumber=\"6\">link 5</a>", $"Test link: <a href=\"urn:a.md\" data-raw-source=\"[link 6](urn:a.md)\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"7\" sourceendlinenumber=\"7\">link 6</a>", $"Test link: <a href=\"bad urn:a.md\" data-raw-source=\"[link 7](bad urn:a.md)\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"8\" sourceendlinenumber=\"8\">link 7</a>", $"Test link: <a href=\"~/{_inputFolder}/test/test.md#top\" data-raw-source=\"[link 8](test/test.md#top)\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"9\" sourceendlinenumber=\"9\">link 8</a>", $"Test link: <a href=\"a.md#top\" data-raw-source=\"[link 9](a.md#top)\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"10\" sourceendlinenumber=\"10\">link 9</a>", $"Test link: <a href=\"#top\" data-raw-source=\"[link 10](#top)\" sourcefile=\"{_inputFolder}/test.md\" sourcestartlinenumber=\"11\" sourceendlinenumber=\"11\">link 10</a></p>", ""), model[Constants.PropertyName.Conceptual]); Assert.Equal( string.Join( "\n", "", "<p>Test link: <a href=\"test/test.html\">link 1</a>", $"Test link: <a href=\"http://www.microsoft.com\">link 2</a>", $"Test link: <a href=\"a b c.md\">link 3</a>", $"Test link: <a href=\"c:\\a.md\">link 4</a>", $"Test link: <a href=\"\\a.md\">link 5</a>", $"Test link: <a href=\"urn:a.md\">link 6</a>", $"Test link: <a href=\"bad urn:a.md\">link 7</a>", $"Test link: <a href=\"test/test.html#top\">link 8</a>", $"Test link: <a href=\"a.md#top\">link 9</a>", $"Test link: <a href=\"#top\">link 10</a></p>", ""), File.ReadAllText(conceptualOutputPath)); Assert.Equal("Conceptual", model["type"]); Assert.Equal("Hello world!", model["meta"]); } } finally { } }
/// <summary> /// Constructs a new UnityFileBuilder. /// </summary> /// <param name="InSplitLength">The accumulated length at which to automatically split a unity file, or -1 to disable automatic splitting.</param> public UnityFileBuilder(int InSplitLength) { UnityFiles = new List<FileCollection>(); CurrentCollection = new FileCollection(); SplitLength = InSplitLength; }
public void TestBuild() { const string documentsBaseDir = "documents"; const string outputBaseDir = "output"; #region Prepare test data if (Directory.Exists(documentsBaseDir)) { Directory.Delete(documentsBaseDir, true); } if (Directory.Exists(outputBaseDir)) { Directory.Delete(outputBaseDir, true); } Directory.CreateDirectory(documentsBaseDir); Directory.CreateDirectory(documentsBaseDir + "/test"); Directory.CreateDirectory(outputBaseDir); var conceptualFile = Path.Combine(documentsBaseDir, "test.md"); var conceptualFile2 = Path.Combine(documentsBaseDir, "test/test.md"); var resourceFile = Path.GetFileName(typeof(DocumentBuilderTest).Assembly.Location); var resourceMetaFile = resourceFile + ".meta"; File.WriteAllLines( conceptualFile, new[] { "---", "uid: XRef1", "a: b", "b:", " c: e", "---", "# Hello World", "Test XRef: @XRef1", "Test link: [link text](test/test.md)", "Test link: [link text 2](../" + resourceFile + ")", "<p>", "test", }); File.WriteAllLines( conceptualFile2, new[] { "---", "uid: XRef2", "a: b", "b:", " c: e", "---", "# Hello World", "Test XRef: @XRef2", "Test link: [link text](../test.md)", "<p>", "test", }); File.WriteAllText(resourceMetaFile, @"{ abc: ""xyz"", uid: ""r1"" }"); FileCollection files = new FileCollection(Environment.CurrentDirectory); files.Add(DocumentType.Article, new[] { conceptualFile, conceptualFile2 }); files.Add(DocumentType.Article, "TestData", new[] { "System.Console.csyml", "System.ConsoleColor.csyml" }); files.Add(DocumentType.Resource, new[] { resourceFile }); #endregion Init(); try { new DocumentBuilder().Build( new DocumentBuildParameters { Files = files, OutputBaseDir = Path.Combine(Environment.CurrentDirectory, outputBaseDir), Metadata = new Dictionary <string, object> { ["meta"] = "Hello world!", }.ToImmutableDictionary() }); } finally { Logger.UnregisterAllListeners(); } { // check conceptual. Assert.True(File.Exists(Path.Combine(outputBaseDir, Path.ChangeExtension(conceptualFile, ".json")))); var model = JsonUtility.Deserialize <Dictionary <string, object> >(Path.Combine(outputBaseDir, Path.ChangeExtension(conceptualFile, ".json"))); Assert.Equal( "<h1 id=\"hello-world\">Hello World</h1>\n" + "<p>Test XRef: <xref href=\"XRef1\"></xref>\n" + "Test link: <a href=\"~/documents/test/test.md\">link text</a>\n" + "Test link: <a href=\"~/" + resourceFile + "\">link text 2</a></p>\n" + "<p><p>\n" + "test</p>\n", model["conceptual"]); Assert.Equal("Conceptual", model["type"]); Assert.Equal("Hello world!", model["meta"]); Assert.Equal("b", model["a"]); } { // check resource. Assert.True(File.Exists(Path.Combine(outputBaseDir, resourceFile))); Assert.True(File.Exists(Path.Combine(outputBaseDir, resourceFile + ".json"))); var meta = JsonUtility.Deserialize <Dictionary <string, object> >(Path.Combine(outputBaseDir, resourceFile + ".json")); Assert.Equal(3, meta.Count); Assert.True(meta.ContainsKey("meta")); Assert.Equal("Hello world!", meta["meta"]); Assert.True(meta.ContainsKey("abc")); Assert.Equal("xyz", meta["abc"]); Assert.True(meta.ContainsKey("uid")); Assert.Equal("r1", meta["uid"]); } { // check manifest file. var filepath = Path.Combine(outputBaseDir, DocumentBuildContext.ManifestFileName); Assert.True(File.Exists(filepath)); var manifest = YamlUtility.Deserialize <List <Dictionary <string, object> > >(filepath); Assert.Equal(5, manifest.Count); Assert.Equal("Conceptual", manifest[0]["type"]); Assert.Equal(@"documents/test.json", manifest[0]["model"]); Assert.Equal("Conceptual", manifest[1]["type"]); Assert.Equal(@"documents/test/test.json", manifest[1]["model"]); Assert.Equal("ManagedReference", manifest[2]["type"]); Assert.Equal(@"System.Console.json", manifest[2]["model"]); Assert.Equal("ManagedReference", manifest[3]["type"]); Assert.Equal(@"System.ConsoleColor.json", manifest[3]["model"]); Assert.Equal("Resource", manifest[4]["type"]); Assert.Equal("Microsoft.DocAsCode.EntityModel.Tests.dll.json", manifest[4]["model"]); Assert.Equal("Microsoft.DocAsCode.EntityModel.Tests.dll", manifest[4]["resource"]); } { // check file map var filepath = Path.Combine(outputBaseDir, DocumentBuildContext.FileMapFileName); Assert.True(File.Exists(filepath)); var filemap = YamlUtility.Deserialize <Dictionary <string, string> >(filepath); Assert.Equal(5, filemap.Count); Assert.Equal("~/documents/test.json", filemap["~/documents/test.md"]); Assert.Equal("~/documents/test/test.json", filemap["~/documents/test/test.md"]); Assert.Equal("~/System.Console.json", filemap["~/System.Console.csyml"]); Assert.Equal("~/System.ConsoleColor.json", filemap["~/System.ConsoleColor.csyml"]); Assert.Equal("~/Microsoft.DocAsCode.EntityModel.Tests.dll", filemap["~/Microsoft.DocAsCode.EntityModel.Tests.dll"]); } { // check external xref spec var filepath = Path.Combine(outputBaseDir, DocumentBuildContext.ExternalXRefSpecFileName); Assert.True(File.Exists(filepath)); var xref = YamlUtility.Deserialize <List <XRefSpec> >(filepath); Assert.Equal(0, xref.Count); } { // check internal xref spec var filepath = Path.Combine(outputBaseDir, DocumentBuildContext.InternalXRefSpecFileName); Assert.True(File.Exists(filepath)); var xref = YamlUtility.Deserialize <List <XRefSpec> >(filepath); Assert.Equal(68, xref.Count); Assert.NotNull(xref.Single(s => s.Uid == "System.Console")); Assert.NotNull(xref.Single(s => s.Uid == "System.Console.BackgroundColor")); Assert.NotNull(xref.Single(s => s.Uid == "System.Console.SetOut(System.IO.TextWriter)")); Assert.NotNull(xref.Single(s => s.Uid == "System.Console.WriteLine(System.Int32)")); Assert.NotNull(xref.Single(s => s.Uid == "System.ConsoleColor")); Assert.NotNull(xref.Single(s => s.Uid == "System.ConsoleColor.Black")); } #region Cleanup Directory.Delete(documentsBaseDir, true); Directory.Delete(outputBaseDir, true); File.Delete(resourceMetaFile); #endregion }
/// <summary> /// Returns the list of built unity files. The UnityFileBuilder is unusable after this. /// </summary> /// <returns></returns> public List<FileCollection> GetUnityFiles() { EndCurrentUnityFile(); var Result = UnityFiles; // Null everything to ensure that failure will occur if you accidentally reuse this object. CurrentCollection = null; UnityFiles = null; return Result; }
private static ImmutableDictionary <string, ChangeKindWithDependency> GetIntersectChanges(FileCollection files, ChangeList changeList) { if (changeList == null) { return(null); } var dict = new Dictionary <string, ChangeKindWithDependency>(); foreach (var file in files.EnumerateFiles()) { string fileKey = ((RelativePath)file.File).GetPathFromWorkingFolder().ToString(); dict[fileKey] = ChangeKindWithDependency.None; } foreach (ChangeItem change in changeList) { string fileKey = ((RelativePath)change.FilePath).GetPathFromWorkingFolder().ToString(); if (dict.ContainsKey(fileKey)) { dict[fileKey] = change.Kind; } } return(dict.ToImmutableDictionary()); }
public FileUploaderControl() { // Required to initialize variables InitializeComponent(); MaxFileSizeKB = -1; ChunkSizeMB = 3; MultiSelect = true; _files = new FileCollection { MaxUploads = 2 }; icFiles.ItemsSource = _files; progressPercent.DataContext = _files; txtUploadedBytes.DataContext = _files; txtPercent.DataContext = _files; this.Loaded += new RoutedEventHandler(FileUploaderControl_Loaded); _files.CollectionChanged += new System.Collections.Specialized.NotifyCollectionChangedEventHandler(_files_CollectionChanged); _files.PercentageChanged += new EventHandler(_files_PercentageChanged); _files.AllFilesFinished += new EventHandler(_files_AllFilesFinished); _files.ErrorOccurred += new EventHandler<UploadErrorOccurredEventArgs>(_files_ErrorOccurred); }
public MailQueueRepository(IOptions <RepositoriesOptions> repositoryOptionsAccessor, ILogger <MailQueueRepository> logger) { _logger = logger; _mailQueueRepository = new FileCollection <MailModel>(Path.Combine(repositoryOptionsAccessor.Value.Path, nameof(MailQueueRepository))); }
public async override Task<bool> InitializeAsync(Framework.Interfaces.ICore core) { var p = new PluginSettings(core); core.LanguageItems.Add(new Framework.Data.LanguageItem(STR_LOADING)); core.LanguageItems.Add(new Framework.Data.LanguageItem(STR_LOADINGDATA)); core.LanguageItems.Add(new Framework.Data.LanguageItem(STR_LOADINGGEOCACHES)); core.LanguageItems.Add(new Framework.Data.LanguageItem(STR_LOADINGLOGS)); core.LanguageItems.Add(new Framework.Data.LanguageItem(STR_LOADINGLOGIMAGES)); core.LanguageItems.Add(new Framework.Data.LanguageItem(STR_LOADINGGEOCACHEIMAGES)); core.LanguageItems.Add(new Framework.Data.LanguageItem(STR_LOADINGWAYPOINTS)); core.LanguageItems.Add(new Framework.Data.LanguageItem(STR_SAVING)); core.LanguageItems.Add(new Framework.Data.LanguageItem(STR_SAVINGDATA)); core.LanguageItems.Add(new Framework.Data.LanguageItem(STR_SAVINGGEOCACHES)); core.LanguageItems.Add(new Framework.Data.LanguageItem(STR_SAVINGLOGS)); core.LanguageItems.Add(new Framework.Data.LanguageItem(STR_SAVINGLOGIMAGES)); core.LanguageItems.Add(new Framework.Data.LanguageItem(STR_SAVINGGEOCACHEIMAGES)); core.LanguageItems.Add(new Framework.Data.LanguageItem(STR_SAVINGLOGIMAGES)); core.LanguageItems.Add(new Framework.Data.LanguageItem(STR_SAVINGWAYPOINTS)); core.LanguageItems.Add(new Framework.Data.LanguageItem(STR_BACKINGUPDATA)); core.LanguageItems.Add(new Framework.Data.LanguageItem(STR_RESTORINGDATA)); core.LanguageItems.Add(new Framework.Data.LanguageItem(SettingsPanel.STR_BACKUPFOLDER)); core.LanguageItems.Add(new Framework.Data.LanguageItem(SettingsPanel.STR_MAXCOUNT)); core.LanguageItems.Add(new Framework.Data.LanguageItem(SettingsPanel.STR_MAXDAYS)); core.LanguageItems.Add(new Framework.Data.LanguageItem(RestoreForm.STR_TITLE)); core.LanguageItems.Add(new Framework.Data.LanguageItem(RestoreForm.STR_BACKUPFOLDER)); core.LanguageItems.Add(new Framework.Data.LanguageItem(RestoreForm.STR_BACKUPS)); core.LanguageItems.Add(new Framework.Data.LanguageItem(RestoreForm.STR_DATE)); core.LanguageItems.Add(new Framework.Data.LanguageItem(RestoreForm.STR_FILE)); core.LanguageItems.Add(new Framework.Data.LanguageItem(RestoreForm.STR_OK)); core.LanguageItems.Add(new Framework.Data.LanguageItem(RestoreForm.STR_PATH)); core.LanguageItems.Add(new Framework.Data.LanguageItem(RestoreForm.STR_RESTOREFOLDER)); core.LanguageItems.Add(new Framework.Data.LanguageItem(RestoreForm.STR_WARNING)); core.LanguageItems.Add(new Framework.Data.LanguageItem(RestoreForm.STR_OVERWRITE)); if (string.IsNullOrEmpty(PluginSettings.Instance.ActiveDataFile)) { PluginSettings.Instance.ActiveDataFile = System.IO.Path.Combine(core.PluginDataPath, "GAPPDataStorage.gpp" ); } try { _fileCollection = new FileCollection(PluginSettings.Instance.ActiveDataFile); } catch { } SetDataSourceName(PluginSettings.Instance.ActiveDataFile); core.Logs.LoadFullData += new Framework.EventArguments.LoadFullLogEventHandler(Logs_LoadFullData); core.Geocaches.LoadFullData += new Framework.EventArguments.LoadFullGeocacheEventHandler(Geocaches_LoadFullData); return await base.InitializeAsync(core); }
private bool readFiles(FileCollection fc) { bool result = false; try { //todo: when version is not compatible anymore, do a check on storage version!! int lsize = sizeof(long); byte[] memBuffer = new byte[10 * 1024 * 1024]; using (MemoryStream ms = new MemoryStream(memBuffer)) using (BinaryReader br = new BinaryReader(ms)) using (Utils.ProgressBlock progress = new Utils.ProgressBlock(this, STR_LOADINGDATA, STR_LOADINGDATA, 1, 0)) { FileStream fs = fc._fsGeocaches; fs.Position = 0; long eof = fs.Length; while (fs.Position < eof) { RecordInfo ri = new RecordInfo(); ri.Offset = fs.Position; fs.Read(memBuffer, 0, lsize + 1); ms.Position = 0; ri.Length = br.ReadInt64(); if (memBuffer[lsize] == 0) { //free ri.FreeSlot = true; ri.ID = string.Concat("_", ri.Offset.ToString()); } else { //lazy loading ri.FreeSlot = false; int readCount = Math.Min(42, (int)(ri.Length - lsize - 1)); fs.Read(memBuffer, 0, readCount); ms.Position = 0; ri.ID = br.ReadString(); } fs.Position = ri.Offset + ri.Length; fc._geocachesInDB.Add(ri.ID, ri); } fs = fc._fsLogs; fs.Position = 0; eof = fs.Length; while (fs.Position < eof) { RecordInfo ri = new RecordInfo(); ri.Offset = fs.Position; fs.Read(memBuffer, 0, lsize + 1); ms.Position = 0; ri.Length = br.ReadInt64(); if (memBuffer[lsize] == 0) { //free ri.FreeSlot = true; ri.ID = string.Concat("_", ri.Offset.ToString()); } else { //lazy loading ri.FreeSlot = false; int readCount = Math.Min(32, (int)(ri.Length - lsize - 1)); fs.Read(memBuffer, 0, readCount); ms.Position = 0; ri.ID = br.ReadString(); } fs.Position = ri.Offset + ri.Length; fc._logsInDB.Add(ri.ID, ri); } using (fs = File.Open(fc.WaypointsFilename, FileMode.OpenOrCreate, FileAccess.Read)) { fs.Position = 0; eof = fs.Length; while (fs.Position < eof) { RecordInfo ri = new RecordInfo(); ri.Offset = fs.Position; fs.Read(memBuffer, 0, lsize + 1); ms.Position = 0; ri.Length = br.ReadInt64(); if (memBuffer[lsize] == 0) { //free ri.FreeSlot = true; ri.ID = string.Concat("_", ri.Offset.ToString()); } else { //lazy loading ri.FreeSlot = false; int readCount = Math.Min(32, (int)(ri.Length - lsize - 1)); fs.Read(memBuffer, 0, readCount); ms.Position = 0; ri.ID = br.ReadString(); } fs.Position = ri.Offset + ri.Length; fc._wptsInDB.Add(ri.ID, ri); } } using (fs = File.Open(fc.UserWaypointsFilename, FileMode.OpenOrCreate, FileAccess.Read)) { fs.Position = 0; eof = fs.Length; while (fs.Position < eof) { RecordInfo ri = new RecordInfo(); ri.Offset = fs.Position; fs.Read(memBuffer, 0, lsize + 1); ms.Position = 0; ri.Length = br.ReadInt64(); if (memBuffer[lsize] == 0) { //free ri.FreeSlot = true; ri.ID = string.Concat("_", ri.Offset.ToString()); } else { //lazy loading ri.FreeSlot = false; int readCount = Math.Min(32, (int)(ri.Length - lsize - 1)); fs.Read(memBuffer, 0, readCount); ms.Position = 0; ri.ID = br.ReadString(); } fs.Position = ri.Offset + ri.Length; fc._usrwptsInDB.Add(ri.ID, ri); } } using (fs = File.Open(fc.LogImagesFilename, FileMode.OpenOrCreate, FileAccess.Read)) { fs.Position = 0; eof = fs.Length; while (fs.Position < eof) { RecordInfo ri = new RecordInfo(); ri.Offset = fs.Position; fs.Read(memBuffer, 0, lsize + 1); ms.Position = 0; ri.Length = br.ReadInt64(); if (memBuffer[lsize] == 0) { //free ri.FreeSlot = true; ri.ID = string.Concat("_", ri.Offset.ToString()); } else { //lazy loading ri.FreeSlot = false; int readCount = Math.Min(100, (int)(ri.Length - lsize - 1)); fs.Read(memBuffer, 0, readCount); ms.Position = 0; ri.ID = br.ReadString(); } fs.Position = ri.Offset + ri.Length; fc._logimgsInDB.Add(ri.ID, ri); } } using (fs = File.Open(fc.GeocacheImagesFilename, FileMode.OpenOrCreate, FileAccess.Read)) { fs.Position = 0; eof = fs.Length; while (fs.Position < eof) { RecordInfo ri = new RecordInfo(); ri.Offset = fs.Position; fs.Read(memBuffer, 0, lsize + 1); ms.Position = 0; ri.Length = br.ReadInt64(); if (memBuffer[lsize] == 0) { //free ri.FreeSlot = true; ri.ID = string.Concat("_", ri.Offset.ToString()); } else { //lazy loading ri.FreeSlot = false; int readCount = Math.Min(64, (int)(ri.Length - lsize - 1)); fs.Read(memBuffer, 0, readCount); ms.Position = 0; ri.ID = br.ReadString(); } fs.Position = ri.Offset + ri.Length; fc._geocacheimgsInDB.Add(ri.ID, ri); } } } result = true; } catch { } return(result); }
public override bool SaveAs() { bool result = false; FileCollection newFileCollection = new FileCollection(PluginSettings.Instance.ActiveDataFile); result = Save(newFileCollection, true); if (_fileCollection != null) { _fileCollection.Dispose(); _fileCollection = null; } _fileCollection = newFileCollection; return result; }
protected override bool PrepareCopyToNew() { bool result = false; using (System.Windows.Forms.SaveFileDialog dlg = new System.Windows.Forms.SaveFileDialog()) { if (string.Compare(PluginSettings.Instance.ActiveDataFile, dlg.FileName, true) != 0) { if (string.IsNullOrEmpty(_lastCopyToFolder)) { _lastCopyToFolder = System.IO.Path.GetDirectoryName(PluginSettings.Instance.ActiveDataFile); } dlg.InitialDirectory = _lastCopyToFolder; dlg.Filter = "*.gpp|*.gpp"; dlg.FileName = ""; if (dlg.ShowDialog() == System.Windows.Forms.DialogResult.OK) { try { if (System.IO.File.Exists(dlg.FileName)) { System.IO.File.Delete(dlg.FileName); } string fn = FileCollection.getFilename(dlg.FileName, EXT_GEOCACHES); if (System.IO.File.Exists(fn)) { System.IO.File.Delete(fn); } fn = FileCollection.getFilename(dlg.FileName, EXT_LOGIMAGES); if (System.IO.File.Exists(fn)) { System.IO.File.Delete(fn); } fn = FileCollection.getFilename(dlg.FileName, EXT_GEOCACHEIMAGES); if (System.IO.File.Exists(fn)) { System.IO.File.Delete(fn); } fn = FileCollection.getFilename(dlg.FileName, EXT_LOGS); if (System.IO.File.Exists(fn)) { System.IO.File.Delete(fn); } fn = FileCollection.getFilename(dlg.FileName, EXT_USERWAYPOINTS); if (System.IO.File.Exists(fn)) { System.IO.File.Delete(fn); } fn = FileCollection.getFilename(dlg.FileName, EXT_WAYPPOINTS); if (System.IO.File.Exists(fn)) { System.IO.File.Delete(fn); } _selectedCopyToFilename = dlg.FileName; result = true; } catch { } } } } return(result); }
public override bool Open(bool geocachesOnly) { bool result = false; if (_fileCollection != null) { _fileCollection.Dispose(); _fileCollection = null; } _fileCollection = new FileCollection(PluginSettings.Instance.ActiveDataFile); result = Load(geocachesOnly); return result; }
private bool copyToSave(FileCollection fc) { bool result = true; using (Utils.ProgressBlock progress = new Utils.ProgressBlock(this, STR_SAVING, STR_SAVINGGEOCACHES, CopyToList.Count, 0)) { byte[] memBuffer = new byte[10 * 1024 * 1024]; byte notFree = 1; byte notFreeF = 2; List <RecordInfo> freeGeocacheRecords = (from RecordInfo ri in fc._geocachesInDB.Values where ri.FreeSlot select ri).OrderByDescending(x => x.Length).ToList(); List <RecordInfo> freeLogImageRecords = (from RecordInfo ri in fc._logimgsInDB.Values where ri.FreeSlot select ri).OrderByDescending(x => x.Length).ToList(); List <RecordInfo> freeLogRecords = (from RecordInfo ri in fc._logsInDB.Values where ri.FreeSlot select ri).OrderByDescending(x => x.Length).ToList(); List <RecordInfo> freeWaypointRecords = (from RecordInfo ri in fc._wptsInDB.Values where ri.FreeSlot select ri).OrderByDescending(x => x.Length).ToList(); List <RecordInfo> freeUserWaypointRecords = (from RecordInfo ri in fc._usrwptsInDB.Values where ri.FreeSlot select ri).OrderByDescending(x => x.Length).ToList(); List <RecordInfo> freeGeocacheImageRecords = (from RecordInfo ri in fc._geocacheimgsInDB.Values where ri.FreeSlot select ri).OrderByDescending(x => x.Length).ToList(); using (MemoryStream ms = new MemoryStream(memBuffer)) using (BinaryWriter bw = new BinaryWriter(ms)) using (FileStream fsLogImages = File.Open(fc.LogImagesFilename, FileMode.OpenOrCreate, FileAccess.Write)) using (FileStream fsWaypoints = File.Open(fc.WaypointsFilename, FileMode.OpenOrCreate, FileAccess.Write)) using (FileStream fsUserWaypoints = File.Open(fc.UserWaypointsFilename, FileMode.OpenOrCreate, FileAccess.Write)) using (FileStream fsGeocacheImages = File.Open(fc.GeocacheImagesFilename, FileMode.OpenOrCreate, FileAccess.Write)) { //********************************************** // GEOCACHES //********************************************** long recordLength = 0; byte[] extraBuffer = new byte[200]; int index = 0; int procStep = 0; foreach (Framework.Data.Geocache gc in CopyToList) { //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFree); bw.Write(gc.Code); bw.Write(gc.Archived); WriteIntegerArray(bw, gc.AttributeIds); bw.Write(gc.Available); bw.Write(gc.City ?? ""); bw.Write(gc.Container.ID); bw.Write(gc.CustomCoords); bw.Write(gc.Country ?? ""); bw.Write(gc.ContainsCustomLatLon); if (gc.ContainsCustomLatLon) { bw.Write((double)gc.CustomLat); bw.Write((double)gc.CustomLon); } bw.Write(gc.Difficulty); bw.Write(gc.EncodedHints ?? ""); bw.Write(gc.Favorites); bw.Write(gc.Flagged); bw.Write(gc.Found); bw.Write(gc.GeocacheType.ID); bw.Write(gc.ID ?? ""); bw.Write(gc.Lat); bw.Write(gc.Lon); bw.Write(gc.MemberOnly); bw.Write(gc.Municipality ?? ""); bw.Write(gc.Name ?? ""); bw.Write(gc.Notes ?? ""); bw.Write(gc.Owner ?? ""); bw.Write(gc.OwnerId ?? ""); bw.Write(gc.PersonaleNote ?? ""); bw.Write(gc.PlacedBy ?? ""); bw.Write(((DateTime)gc.PublishedTime).ToString("s")); bw.Write(gc.State ?? ""); bw.Write(gc.Terrain); bw.Write(gc.Title ?? ""); bw.Write(gc.Url ?? ""); bw.Write(gc.DataFromDate.ToString("s")); bw.Write(gc.Locked); writeRecord(fc._geocachesInDB, gc.Code, ms, bw, fc._fsGeocaches, memBuffer, extraBuffer, freeGeocacheRecords); //other record string id = string.Concat("F_", gc.Code); //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFreeF); bw.Write(id); bw.Write(gc.ShortDescription ?? ""); bw.Write(gc.ShortDescriptionInHtml); bw.Write(gc.LongDescription ?? ""); bw.Write(gc.LongDescriptionInHtml); writeRecord(fc._geocachesInDB, id, ms, bw, fc._fsGeocaches, memBuffer, extraBuffer, freeGeocacheRecords); List <Framework.Data.Log> lglist = Utils.DataAccess.GetLogs(Core.Logs, gc.Code); if (lglist.Count > 0) { recordLength = 0; extraBuffer = new byte[50]; foreach (Framework.Data.Log l in lglist) { //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFree); bw.Write(l.ID); bw.Write(l.DataFromDate.ToString("s")); bw.Write(l.Date.ToString("s")); bw.Write(l.Finder ?? ""); bw.Write(l.GeocacheCode ?? ""); bw.Write(l.ID); bw.Write(l.LogType.ID); writeRecord(fc._logsInDB, l.ID, ms, bw, fc._fsLogs, memBuffer, extraBuffer, freeLogRecords); id = string.Concat("F_", l.ID); //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFreeF); bw.Write(id); bw.Write(l.TBCode ?? ""); bw.Write(l.FinderId ?? ""); bw.Write(l.Text ?? ""); bw.Write(l.Encoded); writeRecord(fc._logsInDB, id, ms, bw, fc._fsLogs, memBuffer, extraBuffer, freeLogRecords); List <Framework.Data.LogImage> lgimglist = Utils.DataAccess.GetLogImages(Core.LogImages, l.ID); if (lgimglist.Count > 0) { recordLength = 0; extraBuffer = new byte[10]; foreach (Framework.Data.LogImage li in lgimglist) { //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFree); bw.Write(li.ID); bw.Write(li.DataFromDate.ToString("s")); bw.Write(li.LogID ?? ""); bw.Write(li.Name ?? ""); bw.Write(li.Url ?? ""); writeRecord(fc._logimgsInDB, li.ID, ms, bw, fsLogImages, memBuffer, extraBuffer, freeLogImageRecords); } } } } List <Framework.Data.Waypoint> wptlist = Utils.DataAccess.GetWaypointsFromGeocache(Core.Waypoints, gc.Code); if (wptlist.Count > 0) { recordLength = 0; extraBuffer = new byte[10]; foreach (Framework.Data.Waypoint wp in wptlist) { //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFree); bw.Write(wp.Code); bw.Write(wp.Comment ?? ""); bw.Write(wp.DataFromDate.ToString("s")); bw.Write(wp.Description ?? ""); bw.Write(wp.GeocacheCode ?? ""); bw.Write(wp.ID ?? ""); if (wp.Lat == null || wp.Lon == null) { bw.Write(false); } else { bw.Write(true); bw.Write((double)wp.Lat); bw.Write((double)wp.Lon); } bw.Write(wp.Name ?? ""); bw.Write(wp.Time.ToString("s")); bw.Write(wp.Url ?? ""); bw.Write(wp.UrlName ?? ""); bw.Write(wp.WPType.ID); writeRecord(fc._wptsInDB, wp.Code, ms, bw, fsWaypoints, memBuffer, extraBuffer, freeWaypointRecords); } } List <Framework.Data.UserWaypoint> usrwptlist = Utils.DataAccess.GetUserWaypointsFromGeocache(Core.UserWaypoints, gc.Code); if (usrwptlist.Count > 0) { recordLength = 0; extraBuffer = new byte[10]; foreach (Framework.Data.UserWaypoint wp in usrwptlist) { //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFree); bw.Write(wp.ID.ToString()); bw.Write(wp.Description ?? ""); bw.Write(wp.GeocacheCode ?? ""); bw.Write(wp.Lat); bw.Write(wp.Lon); bw.Write(wp.Date.ToString("s")); writeRecord(fc._usrwptsInDB, wp.ID.ToString(), ms, bw, fsUserWaypoints, memBuffer, extraBuffer, freeUserWaypointRecords); } } List <Framework.Data.GeocacheImage> geocacheimglist = Utils.DataAccess.GetGeocacheImages(Core.GeocacheImages, gc.Code); if (geocacheimglist.Count > 0) { recordLength = 0; extraBuffer = new byte[100]; foreach (Framework.Data.GeocacheImage li in geocacheimglist) { //write to block ms.Position = 0; //block header bw.Write(recordLength); //overwrite afterwards bw.Write(notFree); bw.Write(li.ID); bw.Write(li.DataFromDate.ToString("s")); bw.Write(li.GeocacheCode ?? ""); bw.Write(li.Description ?? ""); bw.Write(li.Name ?? ""); bw.Write(li.Url ?? ""); bw.Write(li.MobileUrl ?? ""); bw.Write(li.ThumbUrl ?? ""); writeRecord(fc._geocacheimgsInDB, li.ID.ToString(), ms, bw, fsGeocacheImages, memBuffer, extraBuffer, freeGeocacheImageRecords); } } index++; procStep++; if (procStep >= 1000) { progress.UpdateProgress(STR_SAVING, STR_SAVINGGEOCACHES, CopyToList.Count, index); procStep = 0; } } } } //********************************************** //fc.DatabaseInfoFilename //********************************************** XmlDocument doc = new XmlDocument(); XmlElement root = doc.CreateElement("info"); doc.AppendChild(root); XmlElement el = doc.CreateElement("IsLittleEndian"); XmlText txt = doc.CreateTextNode(BitConverter.IsLittleEndian.ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("GAPPVersion"); txt = doc.CreateTextNode(Core.Version.ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("StorageVersion"); txt = doc.CreateTextNode("1"); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("GeocacheCount"); txt = doc.CreateTextNode((from RecordInfo ri in fc._geocachesInDB.Values where !ri.FreeSlot && !ri.ID.StartsWith("F_") select ri).Count().ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("LogCount"); txt = doc.CreateTextNode((from RecordInfo ri in fc._logsInDB.Values where !ri.FreeSlot && !ri.ID.StartsWith("F_") select ri).Count().ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("LogImagesCount"); txt = doc.CreateTextNode((from RecordInfo ri in fc._logimgsInDB.Values where !ri.FreeSlot select ri).Count().ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("WaypointCount"); txt = doc.CreateTextNode((from RecordInfo ri in fc._wptsInDB.Values where !ri.FreeSlot select ri).Count().ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("UserWaypointCount"); txt = doc.CreateTextNode((from RecordInfo ri in fc._usrwptsInDB.Values where !ri.FreeSlot select ri).Count().ToString()); el.AppendChild(txt); root.AppendChild(el); el = doc.CreateElement("GeocacheImagesCount"); txt = doc.CreateTextNode((from RecordInfo ri in fc._geocacheimgsInDB.Values where !ri.FreeSlot select ri).Count().ToString()); el.AppendChild(txt); root.AppendChild(el); doc.Save(fc.DatabaseInfoFilename); return(result); }