private static List <MetaDoc> LoadTextWorks() { List <MetaDoc> textWorks = new List <MetaDoc>(); if (Globals.TextWorkFolder().Length == 0) { return(textWorks); } if (!Directory.Exists(Globals.TextWorkFolder())) { return(textWorks); } string[] filePaths = Directory.GetFiles(Globals.TextWorkFolder(), "*.xml"); for (int iFile = 0; iFile < filePaths.Length; iFile++) { MetaDoc txtWK = new MetaDoc(); if (txtWK.Load(filePaths[iFile]) && txtWK.IsShowInGlobalIndex()) { textWorks.Add(txtWK); } } // sort by last revision descending textWorks.Sort((x, y) => - 1 * x.GetLastRevisionDate(true).CompareTo(y.GetLastRevisionDate(true))); return(textWorks); }
public static bool SynchDocuments(Ftp ftpClient, List <MetaDoc> textWorks) { Globals.m_Logger.Info(string.Format("Publishing documents...")); for (int iTw = 0; iTw < textWorks.Count; iTw++) { MetaDoc txtWK = textWorks[iTw]; if (!txtWK.IsShowInGlobalIndex()) { continue; } string htmlFilePath = Utils.ChangePathExtension(Globals.IndexFolder() + txtWK.GetFileName(), Globals.HTML_EXT); string fileName = Path.GetFileName(htmlFilePath); if (txtWK.GetHashes().Count > 1) { // 1 because here 0 is the current one ftpClient.rename(Globals.FTPRemoteFolder() + "/" + fileName, txtWK.GetHashes()[1].GetHash() + Globals.HTML_EXT); } // TODO, move to revisions... ftpClient.upload(Globals.FTPRemoteFolder() + "/" + fileName, htmlFilePath); Globals.m_Logger.Info(string.Format("{0} added to remote ftp folder {1}", htmlFilePath, Globals.FTPRemoteFolder())); } return(true); }
private static bool BuildHTMLFromTextWork(MetaDoc xmlTextWork) { bool bRet = false; Globals.m_Logger.Info(string.Format("Converting {0} to html", xmlTextWork.GetFileName())); HTMLWriter htmlWriter = new HTMLWriter(xmlTextWork); if (!htmlWriter.Save()) { Globals.m_Logger.Error(string.Format("Unable to save {0}!", htmlWriter.GetOutFileName())); } else { Globals.m_Logger.Info(string.Format("{0} generated", htmlWriter.GetOutFileName())); bRet = true; } bRet &= htmlWriter.ValidateHTML(); return(bRet); }
/// <project>Lettera22</project> /// <copyright company="Claudio Tortorelli"> /// Copyright (c) 2019 All Rights Reserved /// </copyright> /// <author>Claudio Tortorelli</author> /// <email>[email protected]</email> /// <web>http://www.claudiotortorelli.it</web> /// <date>Nov 2019</date> /// <summary> /// </summary> /// https://choosealicense.com/licenses/mit/ static void Main(string[] args) { Lettera22Program.Main(args); if (cmdLine.IsHash()) { if (!File.Exists(cmdLine.GetDocToProcess())) { Globals.m_Logger.Error("File not found: " + cmdLine.GetDocToProcess()); Lettera22Program.Close(); } Globals.m_Logger.Info("SHA256:"); Globals.m_Logger.Info(Utils.GetHashSha256(cmdLine.GetDocToProcess())); Lettera22Program.Close(); } try { List <MetaDoc> textWorks = LoadTextWorks(); List <MetaDoc> worksToProcess = textWorks; if (cmdLine.GetDocToProcess().Length > 0) { worksToProcess = new List <MetaDoc>(); MetaDoc txtWK = new MetaDoc(); if (txtWK.Load(cmdLine.GetDocToProcess()) && txtWK.IsShowInGlobalIndex()) { Globals.m_Logger.Warn("Linking specific doc: " + cmdLine.GetDocToProcess()); worksToProcess.Add(txtWK); } } if (worksToProcess.Count == 0) { throw new Exception("No textwork found: index is not processed"); } List <MetaDoc> updatedWorks = UpgradeHTMLToPublishFolder(worksToProcess, cmdLine.IsForced()); if (updatedWorks.Count > 0) { string indexHash = Index.ProcessIndex(textWorks); if (indexHash.Length == 0) { throw new Exception("Unable to rebuild index"); } if (!Utils.IsConnectionAvailable()) { Globals.m_Logger.Warn("No connection. No more to do..."); Lettera22Program.Close(); } if (!Globals.IsFTPEnabled()) { Globals.m_Logger.Warn("FTP is not enabled in options. No more action is possible"); Lettera22Program.Close(); } if (Globals.IsIndexSignatureEnabled()) { string indexFilePath = Globals.IndexFolder() + Globals.IndexFileName(); if (!Index.SignIndex(indexFilePath, indexHash) && !cmdLine.IsForced()) { throw new Exception("Unable to sign index"); } } Ftp ftpClient = new Ftp(Globals.FTPHost(), Globals.FTPUser(), Globals.FTPPassword()); if (!SynchIndex(ftpClient)) { throw new Exception("Unable to update remote index"); } if (!SynchSharedRes(ftpClient)) { throw new Exception("Unable to update remote resources"); } if (!SynchDocuments(ftpClient, updatedWorks)) { throw new Exception("Unable to update remote documents"); } if (cmdLine.IsShowResult()) { System.Diagnostics.Process.Start(Globals.GetMainUrl()); } } else { Globals.m_Logger.Info("Nothing to update..."); } } catch (Exception ex) { Globals.m_Logger.Error(ex.Message, ex); } finally { Lettera22Program.Close(); } }
public HTMLWriter(MetaDoc textWork) { m_TextWork = textWork; m_BiblioRefs = new List <string>(); }
/** * this methods parse the input text line by line and allocate related units and their contents */ protected RetCode Parse() { if (m_FileInPath.Length == 0) { return(RetCode.ERR_WRONGPATH); } if (!File.Exists(m_FileInPath)) { return(RetCode.ERR_FILENOTEXIST); } string[] lines = File.ReadAllLines(m_FileInPath); if (lines.Length == 0) { return(RetCode.ERR_FILEEMPTY); } m_ParsedWork = new MetaDoc(); int curLevel = 1; bool bInsideContent = false; bool bParsingIntro = false; int unitContentCount = 1; Unit curUnit = null; List <Unit> unitList = new List <Unit>(); string line = ""; // line currently read string prevLine = ""; string introText = ""; try { for (int iLine = 0; iLine < lines.Length; iLine++) { prevLine = line; line = lines[iLine]; // skip comments if (line.StartsWith(m_Tags.Comment)) { continue; } if (line.Length == 0 && !bInsideContent) // keep empty line inside units only { continue; } if (!bInsideContent) { // here parse the document's general attribute tags if (line.IndexOf(m_Tags.Header) >= 0) { m_ParsedWork.SetHeader(StripTag(line, m_Tags.Header)); bParsingIntro = false; } else if (line.IndexOf(m_Tags.Title) >= 0) { m_ParsedWork.SetTitle(StripTag(line, m_Tags.Title)); bParsingIntro = false; } else if (line.IndexOf(m_Tags.Subtitle) >= 0) { m_ParsedWork.SetSubTitle(StripTag(line, m_Tags.Subtitle)); bParsingIntro = false; } else if (line.IndexOf(m_Tags.Author) >= 0) { m_ParsedWork.SetAuthor(StripTag(line, m_Tags.Author)); bParsingIntro = false; } else if (line.IndexOf(m_Tags.Place) >= 0) { m_ParsedWork.SetPlace(StripTag(line, m_Tags.Place)); bParsingIntro = false; } else if (line.IndexOf(m_Tags.PublishDate) >= 0) { m_ParsedWork.SetShowPublishDate(true); bParsingIntro = false; } else if (line.IndexOf(m_Tags.Revision) >= 0) { m_ParsedWork.SetShowRevision(true); bParsingIntro = false; } else if (line.IndexOf(m_Tags.RebuildDate) >= 0) { m_ParsedWork.SetShowRebuildDate(true); bParsingIntro = false; } else if (line.IndexOf(m_Tags.Abstract) >= 0) { m_ParsedWork.SetAbstract(StripTag(line, m_Tags.Abstract)); bParsingIntro = false; } else if (line.IndexOf(m_Tags.Summary) >= 0) { m_ParsedWork.SetSummaryEnabled(false); bParsingIntro = false; } else if (line.IndexOf(m_Tags.Intro) >= 0) { introText += line; bParsingIntro = true; } else if (line.IndexOf(m_Tags.Category) >= 0) { m_ParsedWork.SetCategory(StripTag(line, m_Tags.Category)); bParsingIntro = false; } else if (line.IndexOf(m_Tags.NoGlobal) >= 0) { m_ParsedWork.SetShowInGlobalIndex(false); bParsingIntro = false; } else if (line.IndexOf(m_Tags.NoProcess) >= 0) { m_ParsedWork.SetNoProcess(true); bParsingIntro = false; } else if (line.IndexOf(m_Tags.Dedication) >= 0) { m_ParsedWork.SetDedication(StripTag(line, m_Tags.Dedication)); bParsingIntro = false; } else if (line.IndexOf(m_Tags.Creation) >= 0) { m_ParsedWork.SetCreationDate(StripTag(line, m_Tags.Creation)); bParsingIntro = false; } else if (line.IndexOf(m_Tags.ShowUnitNumber) >= 0) { m_ParsedWork.SetShowUnitNumber(true); bParsingIntro = false; } else if (line.IndexOf(m_Tags.Start) >= 0) // now start! { bParsingIntro = false; m_ParsedWork.SetIntro(StripTag(introText, m_Tags.Intro)); bInsideContent = true; } else { if (bParsingIntro) { introText += ("\n" + line); } } continue; } //// from here start parsing the text // normalize the line stripping the indentation level curLevel = GetIndentationLevel(line); line = StripIndentation(line); if (curUnit == null && line.Length == 0) { continue; // skip starting empty lines } if (line.StartsWith(m_Tags.UnitStart)) // new unit is found { // store previous unit if (curUnit != null) { unitList.Add(curUnit); } // create a new unit curUnit = new Unit(unitList.Count + 1); curUnit.SetLevel(curLevel); curUnit.SetTitleVisible(!line.Contains(m_Tags.UnitNoTitle)); curUnit.SetVisibleInSummary(!line.Contains(m_Tags.UnitNoSummary)); line = line.Replace(m_Tags.UnitNoTitle, ""); line = line.Replace(m_Tags.UnitNoSummary, ""); line = line.Replace(m_Tags.UnitStart, ""); curUnit.SetTitle(line); unitContentCount = 0; continue; } //// parse contents if (curUnit == null) { throw new Exception("No unit defined in the body text! Every content must be included inside a unit"); } if (prevLine.Length == 0 && line.Length == 0) { continue; // avoid multiple empty lines } Content predictedContent = TestLineContentType(line, curUnit.GetLastContent()); if (predictedContent == null) { continue; } ParseCode parseRet = ParseCode.NO_PARSE; if (predictedContent.GetType() == typeof(Quote)) { parseRet = LineParseQuote(ref line, ref unitContentCount, ref curUnit); } else if (predictedContent.GetType() == typeof(MetaImage)) { parseRet = LineParseImage(ref line, ref unitContentCount, ref curUnit); } else if (predictedContent.GetType() == typeof(ItemList)) { parseRet = LineParseItemList(ref line, ref unitContentCount, ref curUnit); } else if (predictedContent.GetType() == typeof(Paragraph)) { parseRet = LineParseParagraph(ref line, ref unitContentCount, ref curUnit); } else { throw new Exception("Not handled content detection"); } if (parseRet == ParseCode.ERROR) { Globals.m_Logger.Error(string.Format("Content parser error parsing line nr. {0}", iLine)); } else if (parseRet == ParseCode.NO_PARSE) { Globals.m_Logger.Warn(string.Format("Content parser warning: nothing to parse at line {0}", iLine)); } } // handle last unit if (curUnit != null) { unitList.Add(curUnit); } } catch (Exception ex) { Globals.m_Logger.Error(ex.ToString()); return(RetCode.ERR_EXCEPTION); } // ok, parsed. Now we organize the units' gerarchy Unit root = BuildUnitTree(unitList); if (root == null) { return(RetCode.ERR_INVALIDTREE); } m_ParsedWork.SetUnit(root); m_ParsedWork.SetCurrentHash(Utils.GetHashSha256(m_FileInPath)); return(RetCode.NO_ERR); }