bool WriteFirmwarePalette(Document.Document tempDocument, string CPCBitmapFirmwareFilename) { int[] firmwarePalette = null; firmwarePalette = new int[16]; for (int i = 0; i < 16; i++) { int palIndex = 0; // black as default color (unused) if (i < tempDocument.GetCPCPaletteIndices().Count) { palIndex = tempDocument.GetCPCPaletteIndices()[i]; } int firmwareColor = palIndex; int finalIndex = i + tempDocument.StartIndex; if (finalIndex < 16) { firmwarePalette[i + tempDocument.StartIndex] = firmwareColor; } } var bitmapCompiler = BitmapCompiler.CreateCompiler(tempDocument.VideoMode); if (!bitmapCompiler.WritePalette(CPCBitmapFirmwareFilename, firmwarePalette, null)) { return(false); } return(true); }
public void UpdateDependencies(PhactoryHost.Database.Resource resource) { FileInfo fileInfo = Host.GetFileInfo(resource); if (fileInfo == null) { return; } if (!fileInfo.Exists) { return; } if (!IsResourceSupported(resource)) { return; } Document.Document tempDocument = Host.XMLRead <Document.Document>(Host.GetFileInfo(resource).FullName); List <PhactoryHost.Database.Resource> dependentResources = new List <PhactoryHost.Database.Resource>(); Host.RefreshDependencies(resource, dependentResources); }
public List <int> GetBrokenResourceIDs(PhactoryHost.Database.Resource resource) { FileInfo fileInfo = Host.GetFileInfo(resource); if (fileInfo == null) { return(null); } if (!fileInfo.Exists) { return(null); } List <int> brokenResourceIDs = new List <int>(); Document.Document document = Host.XMLRead <Document.Document>(Host.GetFileInfo(resource).FullName); if (document != null) { foreach (Document.Item item in document.Items) { if (Host.GetResource(item.ResourceID) == null) { brokenResourceIDs.Add(item.ResourceID); } } } return(brokenResourceIDs); }
protected override void Convert(Document.Document document, int startLine, int endLine) { List <string> lines = new List <string>(); for (int i = startLine; i <= endLine; ++i) { LineSegment line = document.GetLineSegment(i); lines.Add(document.GetText(line.Offset, line.Length)); } for (int i = 0; i < lines.Count - 1; ++i) { if (lines[i] == "") { lines.RemoveAt(i); --i; } } for (int i = 0; i < lines.Count; ++i) { LineSegment line = document.GetLineSegment(startLine + i); document.Replace(line.Offset, line.Length, lines[i].ToString()); } // remove removed lines for (int i = startLine + lines.Count; i <= endLine; ++i) { LineSegment line = document.GetLineSegment(startLine + lines.Count); document.Remove(line.Offset, line.TotalLength); } }
public void UpdateDependencies(PhactoryHost.Database.Resource resource) { FileInfo fileInfo = Host.GetFileInfo(resource); if (fileInfo == null) { return; } if (!fileInfo.Exists) { return; } if (!IsResourceSupported(resource)) { return; } Document.Document tempDocument = Host.XMLRead <Document.Document>(Host.GetFileInfo(resource).FullName); List <PhactoryHost.Database.Resource> dependentResources = new List <PhactoryHost.Database.Resource>(); foreach (Document.File file in tempDocument.Files) { PhactoryHost.Database.Resource depResource = Host.GetResource(file.ResourceID); if ((depResource != null) && (resource != depResource)) { dependentResources.Add(depResource); } } Host.RefreshDependencies(resource, dependentResources); }
public bool Compile(PhactoryHost.Database.Resource resource) { FileInfo fileInfo = Host.GetFileInfo(resource); if (fileInfo == null) { return(false); } if (!IsResourceSupported(resource)) { return(false); } List <String> resourceFilenames = new List <String>(); List <String> paddingFilenames = new List <String>(); List <int> address = new List <int>(); Document.Document document = Host.XMLRead <Document.Document>(Host.GetFileInfo(resource).FullName); if (document == null) { return(false); } document.Expand(); foreach (Document.File file in document.Files) { PhactoryHost.Database.Resource iResource = Host.GetResource(file.ResourceID); if (iResource == null) { Host.Log("Unknown resource identifier : " + file.ResourceID); return(false); } FileInfo iFileInfo = Host.GetFileInfo(iResource); resourceFilenames.Add(iFileInfo.FullName); paddingFilenames.Add(file.Pad256 ? "true" : "false"); address.Add((file.SetAddress == false)?0:file.Address); } var compiler = new Phactory.Modules.BigFile.Compiler.BigFileCompiler(); string resourceRelativePathNoExt = resource.RelativePath; resourceRelativePathNoExt = resourceRelativePathNoExt.Replace(".cpcbigfile", ""); App.Controller.View.AppDoEvents(); string baseFilename = Host.MakeFullPath(resourceRelativePathNoExt); string headerFilename = Host.MakeFullPath(resourceRelativePathNoExt + ".H"); if (!compiler.Compile(baseFilename, headerFilename, resourceFilenames, paddingFilenames, address, document.TruncateFiles, document.FilesInBank, document.BaseAddress)) { return(false); } return(true); }
public (bool Saved, ushort Id) TryAddContent(byte[] content, uint typeId) { // data are specific to one data type if (typeId != Header.PageTypeId) { return(false, 0); } var compressedContent = content; if (compressedContent.Length > this.Header.PageNumberOfFreeBytes) { return(false, 0); } // its enough space in this page // now find the place to save var sliceIterator = _pageMemorySlice.Slice(0); var foundEmptySlot = FindEmptySlot(ref sliceIterator); if (!foundEmptySlot) { return(false, 0); } Document.Document document = new Document.Document(sliceIterator, this.Header.PageNextDocumentId); document.Update(content); //this._cachedDocuments.Add(document.DocumentId, document); this.IsDirty = true; ref PageHeader header = ref Header;
protected override void Convert(Document.Document document, int startOffset, int length) { string what = document.GetText(startOffset, length); string spaces = new string(' ', Shared.TEP.TabIndent); document.Replace(startOffset, length, what.Replace(spaces, "\t")); }
public void RefreshOutput(PhactoryHost.Database.Resource resource) { FileInfo fileInfo = Host.GetFileInfo(resource); if (fileInfo == null) { return; } if (!fileInfo.Exists) { return; } if (!IsResourceSupported(resource)) { return; } Document.Document document = Host.XMLRead <Document.Document>(Host.GetFileInfo(resource).FullName); if (document is Document.Document) { List <string> outputFilenames = new List <string>(); string outputFilename = fileInfo.Name + ".bin"; outputFilenames.Add(outputFilename); Host.RefreshOutput(resource, outputFilenames); } }
bool WritePalette(Document.Document tempDocument, string CPCBitmapFilename) { int[] gateArrayPalette = null; gateArrayPalette = new int[16]; for (int i = 0; i < 16; i++) { int palIndex = 0; // black as default color (unused) if (i < tempDocument.GetCPCPaletteIndices().Count) { palIndex = tempDocument.GetCPCPaletteIndices()[i]; } int gateArrayColor = tempDocument.GetCPCPaletteGateArray()[palIndex]; int finalIndex = i + tempDocument.StartIndex; if (finalIndex < 16) { gateArrayPalette[i + tempDocument.StartIndex] = gateArrayColor; } } var bitmapCompiler = BitmapCompiler.CreateCompiler(tempDocument.VideoMode); if (!bitmapCompiler.WritePalette(CPCBitmapFilename, gateArrayPalette, tempDocument.CPCAsicPalette)) { return(false); } return(true); }
protected override void Convert(Document.Document document, int y1, int y2) { for (int i = y2; i >= y1; --i) { LineSegment line = document.GetLineSegment(i); if (line.Length > 0) { // count how many whitespace characters there are at the start int whiteSpace; for (whiteSpace = 0; whiteSpace < line.Length && char.IsWhiteSpace(document.GetCharAt(line.Offset + whiteSpace)); whiteSpace++) { // deliberately empty } if (whiteSpace > 0) { string newLine = document.GetText(line.Offset, whiteSpace); string newPrefix = newLine.Replace("\t", new string(' ', Shared.TEP.TabIndent)); document.Replace(line.Offset, whiteSpace, newPrefix); } } } }
public IActionResult Edit(Document.Document entity) { entity.Author = _fn.User().UserName; entity.IdUser = (int)_fn.User().Id; _documentService.Update(entity); return(Json("")); }
private int CopyFieldsNoDeletions(FieldsWriter fieldsWriter, IndexReader reader, FieldsReader matchingFieldsReader) { int maxDoc = reader.MaxDoc; int docCount = 0; if (matchingFieldsReader != null) { // We can bulk-copy because the fieldInfos are "congruent" while (docCount < maxDoc) { int len = System.Math.Min(MAX_RAW_MERGE_DOCS, maxDoc - docCount); IndexInput stream = matchingFieldsReader.RawDocs(rawDocLengths, docCount, len); fieldsWriter.AddRawDocuments(stream, rawDocLengths, len); docCount += len; checkAbort.Work(300 * len); } } else { for (; docCount < maxDoc; docCount++) { // NOTE: it's very important to first assign to doc then pass it to // termVectorsWriter.addAllDocVectors; see LUCENE-1282 Document.Document doc = reader.Document(docCount); fieldsWriter.AddDocument(doc); checkAbort.Work(300); } } return(docCount); }
protected override void Convert(Document.Document document, int startOffset, int length) { StringBuilder sb = new StringBuilder(); string sin = document.GetText(startOffset, length); char lastChar = '_'; // Fix any errant spaces. sin = sin.Replace(' ', '_'); if (sin != null) { foreach (char c in sin) { if (c == '_') { sb.Append(' '); } else if (lastChar == '_') { sb.Append(char.ToUpper(c)); } else { sb.Append(char.ToLower(c)); } lastChar = c; } } document.Replace(startOffset, length, sb.ToString()); }
// append fields from storedFieldReaders public override Document.Document Document(int n, FieldSelector fieldSelector) { EnsureOpen(); Document.Document result = new Document.Document(); foreach (IndexReader reader in storedFieldReaders) { bool include = (fieldSelector == null); if (!include) { var fields = readerToFields[reader]; foreach (var field in fields) { if (fieldSelector.Accept(field) != FieldSelectorResult.NO_LOAD) { include = true; break; } } } if (include) { var fields = reader.Document(n, fieldSelector).GetFields(); foreach (var field in fields) { result.Add(field); } } } return(result); }
public bool Run(PhactoryHost.Database.Resource resource) { PhactoryHost.Database.Resource parentResource = Host.GetResource(resource.IdDependencies[0]); Document.Document document = Host.XMLRead <Document.Document>(Host.GetFileInfo(parentResource).FullName); string fileToExecute = ""; foreach (Document.Item item in document.Items) { if (item.ExecAdress != 0) { fileToExecute = item.AmsdosFilename; } } string DSKFilename = Host.GetFileInfo(resource).FullName; DSKFilename = DSKFilename.Replace(".cpcdsk", ".dsk"); FileInfo DSKFileInfo = new FileInfo(DSKFilename); string WinAPEFullPath = Host.GetPluginsPath() + "WinAPE.exe"; string arguments = "\"" + DSKFileInfo.FullName + "\" /A"; if (Host.IsVerboseOutput()) { Host.Log(WinAPEFullPath + " " + arguments); } return(Host.StartAndWaitAfterProcess(WinAPEFullPath, arguments, DSKFileInfo.DirectoryName)); }
private int CopyFieldsWithDeletions(FieldsWriter fieldsWriter, IndexReader reader, FieldsReader matchingFieldsReader) { int docCount = 0; int maxDoc = reader.MaxDoc; if (matchingFieldsReader != null) { // We can bulk-copy because the fieldInfos are "congruent" for (int j = 0; j < maxDoc;) { if (reader.IsDeleted(j)) { // skip deleted docs ++j; continue; } // We can optimize this case (doing a bulk byte copy) since the field // numbers are identical int start = j, numDocs = 0; do { j++; numDocs++; if (j >= maxDoc) { break; } if (reader.IsDeleted(j)) { j++; break; } }while (numDocs < MAX_RAW_MERGE_DOCS); IndexInput stream = matchingFieldsReader.RawDocs(rawDocLengths, start, numDocs); fieldsWriter.AddRawDocuments(stream, rawDocLengths, numDocs); docCount += numDocs; checkAbort.Work(300 * numDocs); } } else { for (int j = 0; j < maxDoc; j++) { if (reader.IsDeleted(j)) { // skip deleted docs continue; } // NOTE: it's very important to first assign to doc then pass it to // termVectorsWriter.addAllDocVectors; see LUCENE-1282 Document.Document doc = reader.Document(j); fieldsWriter.AddDocument(doc); docCount++; checkAbort.Work(300); } } return(docCount); }
public IActionResult _Edit(Document.Document entity) { entity.Author = _fn.User().UserName; entity.IdUser = (int)_fn.User().Id; _documentService.Update(entity); //return View("Index"); return(RedirectToAction("Index", "Document")); }
public /*internal*/ Document.Document Doc(int n, FieldSelector fieldSelector) { SeekIndex(n); long position = indexStream.ReadLong(); fieldsStream.Seek(position); var doc = new Document.Document(); int numFields = fieldsStream.ReadVInt(); for (int i = 0; i < numFields; i++) { int fieldNumber = fieldsStream.ReadVInt(); FieldInfo fi = fieldInfos.FieldInfo(fieldNumber); FieldSelectorResult acceptField = fieldSelector == null?FieldSelectorResult.LOAD:fieldSelector.Accept(fi.name); byte bits = fieldsStream.ReadByte(); System.Diagnostics.Debug.Assert(bits <= FieldsWriter.FIELD_IS_COMPRESSED + FieldsWriter.FIELD_IS_TOKENIZED + FieldsWriter.FIELD_IS_BINARY); bool compressed = (bits & FieldsWriter.FIELD_IS_COMPRESSED) != 0; System.Diagnostics.Debug.Assert( (!compressed || (format < FieldsWriter.FORMAT_LUCENE_3_0_NO_COMPRESSED_FIELDS)), "compressed fields are only allowed in indexes of version <= 2.9"); bool tokenize = (bits & FieldsWriter.FIELD_IS_TOKENIZED) != 0; bool binary = (bits & FieldsWriter.FIELD_IS_BINARY) != 0; //TODO: Find an alternative approach here if this list continues to grow beyond the //list of 5 or 6 currently here. See Lucene 762 for discussion if (acceptField.Equals(FieldSelectorResult.LOAD)) { AddField(doc, fi, binary, compressed, tokenize); } else if (acceptField.Equals(FieldSelectorResult.LOAD_AND_BREAK)) { AddField(doc, fi, binary, compressed, tokenize); break; //Get out of this loop } else if (acceptField.Equals(FieldSelectorResult.LAZY_LOAD)) { AddFieldLazy(doc, fi, binary, compressed, tokenize); } else if (acceptField.Equals(FieldSelectorResult.SIZE)) { SkipField(binary, compressed, AddFieldSize(doc, fi, binary, compressed)); } else if (acceptField.Equals(FieldSelectorResult.SIZE_AND_BREAK)) { AddFieldSize(doc, fi, binary, compressed); break; } else { SkipField(binary, compressed); } } return(doc); }
/// <summary> /// Shows the new form /// </summary> /// <param name="sender">event sender</param> /// <param name="e">event arguments</param> private void ShowNewForm(object sender, EventArgs e) { Document.Document document = new Document.Document(640, 480); // make it a child of this MDI form before showing it document.MdiParent = this; document.Text = "Document " + m_ChildFormNumber; document.Show(); ++m_ChildFormNumber; }
protected override void Convert(Document.Document document, int startOffset, int length) { StringBuilder what = new StringBuilder(document.GetText(startOffset, length)); for (int i = 0; i < what.Length; ++i) { what[i] = char.IsUpper(what[i]) ? Char.ToLower(what[i]) : Char.ToUpper(what[i]); } document.Replace(startOffset, length, what.ToString()); }
// Add the size of field as a byte[] containing the 4 bytes of the integer byte size (high order byte first; char = 2 bytes) // Read just the size -- caller must skip the field content to continue reading fields // Return the size in bytes or chars, depending on field type private int AddFieldSize(Document.Document doc, FieldInfo fi, bool binary, bool compressed) { int size = fieldsStream.ReadVInt(), bytesize = binary || compressed?size:2 * size; var sizebytes = new byte[4]; sizebytes[0] = (byte)(Number.URShift(bytesize, 24)); sizebytes[1] = (byte)(Number.URShift(bytesize, 16)); sizebytes[2] = (byte)(Number.URShift(bytesize, 8)); sizebytes[3] = (byte)bytesize; doc.Add(new Field(fi.name, sizebytes, Field.Store.YES)); return(size); }
/// <summary>Adds field info for a Document. </summary> public void Add(Document.Document doc) { lock (this) { System.Collections.Generic.IList <IFieldable> fields = doc.GetFields(); foreach (IFieldable field in fields) { Add(field.Name, field.IsIndexed, field.IsTermVectorStored, field.IsStorePositionWithTermVector, field.IsStoreOffsetWithTermVector, field.OmitNorms, false, field.OmitTermFreqAndPositions); } } }
protected override void Convert(Document.Document document, int startOffset, int length) { StringBuilder what = new StringBuilder(document.GetText(startOffset, length)); for (int i = 0; i < what.Length; ++i) { if (!char.IsLetter(what[i]) && i < what.Length - 1) { what[i + 1] = char.ToUpper(what[i + 1]); } } document.Replace(startOffset, length, what.ToString()); }
private void AddFieldLazy(Document.Document doc, FieldInfo fi, bool binary, bool compressed, bool tokenize) { if (binary) { int toRead = fieldsStream.ReadVInt(); long pointer = fieldsStream.FilePointer; //was: doc.add(new Fieldable(fi.name, b, Fieldable.Store.YES)); doc.Add(new LazyField(this, fi.name, Field.Store.YES, toRead, pointer, binary, compressed)); //Need to move the pointer ahead by toRead positions fieldsStream.Seek(pointer + toRead); } else { const Field.Store store = Field.Store.YES; Field.Index index = FieldExtensions.ToIndex(fi.isIndexed, tokenize); Field.TermVector termVector = FieldExtensions.ToTermVector(fi.storeTermVector, fi.storeOffsetWithTermVector, fi.storePositionWithTermVector); AbstractField f; if (compressed) { int toRead = fieldsStream.ReadVInt(); long pointer = fieldsStream.FilePointer; f = new LazyField(this, fi.name, store, toRead, pointer, binary, compressed); //skip over the part that we aren't loading fieldsStream.Seek(pointer + toRead); f.OmitNorms = fi.omitNorms; f.OmitTermFreqAndPositions = fi.omitTermFreqAndPositions; } else { int length = fieldsStream.ReadVInt(); long pointer = fieldsStream.FilePointer; //Skip ahead of where we are by the length of what is stored if (format >= FieldsWriter.FORMAT_VERSION_UTF8_LENGTH_IN_BYTES) { fieldsStream.Seek(pointer + length); } else { fieldsStream.SkipChars(length); } f = new LazyField(this, fi.name, store, index, termVector, length, pointer, binary, compressed) { OmitNorms = fi.omitNorms, OmitTermFreqAndPositions = fi.omitTermFreqAndPositions }; } doc.Add(f); } }
/// <summary> /// Get the object, which was inserted under the keyword (line, at offset, with length length), /// returns null, if no such keyword was inserted. /// </summary> public object this[Document.Document document, LineSegment line, int offset, int length] { get { if (length == 0) { return(null); } Node next = root; int wordOffset = line.Offset + offset; if (casesensitive) { for (int i = 0; i < length; ++i) { int index = ((int)document.GetCharAt(wordOffset + i)) % 256; next = next[index]; if (next == null) { return(null); } if (next.color != null && RegionMatches(document, wordOffset, length, next.word)) { return(next.color); } } } else { for (int i = 0; i < length; ++i) { int index = ((int)Char.ToUpper(document.GetCharAt(wordOffset + i))) % 256; next = next[index]; if (next == null) { return(null); } if (next.color != null && RegionMatches(document, casesensitive, wordOffset, length, next.word)) { return(next.color); } } } return(null); } }
public bool Compile(PhactoryHost.Database.Resource resource) { FileInfo fileInfo = Host.GetFileInfo(resource); if (fileInfo == null) { return(false); } if (!IsResourceSupported(resource)) { return(false); } var compiler = new Phactory.Modules.Cloud.Compiler.CloudCompiler(); Document.Document tempDocument = Host.XMLRead <Document.Document>(Host.GetFileInfo(resource).FullName); if (!tempDocument.CompileInternal()) { return(false); } foreach (Document.Item item in tempDocument.Items) { PhactoryHost.Database.Resource resItem = Host.GetResource(item.ResourceID); if (resItem == null) { Host.Log("Unknown resource identifier : " + item.ResourceID); return(false); } FileInfo resFileInfo = Host.GetFileInfo(resItem); string outputFilename = resFileInfo.FullName; if (Host.IsVerboseOutput()) { Host.Log(outputFilename); } string outputFilenameBin = outputFilename + ".bin"; if (!compiler.WriteCloudBitmap(outputFilenameBin, item.CloudMaskData, item.CloudMaskWidth, item.CloudMaskHeight)) { return(false); } } return(true); }
protected override void Convert(Document.Document document, int y1, int y2) { for (int i = y2; i >= y1; --i) { LineSegment line = document.GetLineSegment(i); if (line.Length > 0) { // note: some users may prefer a more radical ConvertLeadingSpacesToTabs that // means there can be no spaces before the first character even if the spaces // didn't add up to a whole number of tabs string newLine = TextUtilities.LeadingWhiteSpaceToTabs(document.GetText(line.Offset, line.Length), Shared.TEP.TabIndent); document.Replace(line.Offset, line.Length, newLine); } } }
public static bool RegionMatches(Document.Document document, int offset, int length, char[] word) { if (length != word.Length || document.TextLength < offset + length) { return(false); } for (int i = 0; i < length; ++i) { if (document.GetCharAt(offset + i) != word[i]) { return(false); } } return(true); }
/// <summary> /// Creates a new instance of <see cref="UndoableInsert"/> /// </summary> public UndoableInsert(Document.Document document, int offset, string text) { if (document == null) { throw new ArgumentNullException("document"); } if (offset < 0 || offset > document.TextLength) { throw new ArgumentOutOfRangeException("offset"); } //Debug.Assert(text != null, "text can't be null"); // oldCaretPos = document.Caret.Offset; this.document = document; this.offset = offset; this.text = text; }
// append fields from storedFieldReaders public override Document.Document Document(int n, FieldSelector fieldSelector) { EnsureOpen(); Document.Document result = new Document.Document(); foreach(IndexReader reader in storedFieldReaders) { bool include = (fieldSelector == null); if (!include) { var fields = readerToFields[reader]; foreach(var field in fields) { if (fieldSelector.Accept(field) != FieldSelectorResult.NO_LOAD) { include = true; break; } } } if (include) { var fields = reader.Document(n, fieldSelector).GetFields(); foreach(var field in fields) { result.Add(field); } } } return result; }
public void Clear() { // don't hold onto doc nor analyzer, in case it is // largish: doc = null; analyzer = null; }
public /*internal*/ Document.Document Doc(int n, FieldSelector fieldSelector) { SeekIndex(n); long position = indexStream.ReadLong(); fieldsStream.Seek(position); var doc = new Document.Document(); int numFields = fieldsStream.ReadVInt(); for (int i = 0; i < numFields; i++) { int fieldNumber = fieldsStream.ReadVInt(); FieldInfo fi = fieldInfos.FieldInfo(fieldNumber); FieldSelectorResult acceptField = fieldSelector == null?FieldSelectorResult.LOAD:fieldSelector.Accept(fi.name); byte bits = fieldsStream.ReadByte(); System.Diagnostics.Debug.Assert(bits <= FieldsWriter.FIELD_IS_COMPRESSED + FieldsWriter.FIELD_IS_TOKENIZED + FieldsWriter.FIELD_IS_BINARY); bool compressed = (bits & FieldsWriter.FIELD_IS_COMPRESSED) != 0; System.Diagnostics.Debug.Assert( (!compressed || (format < FieldsWriter.FORMAT_LUCENE_3_0_NO_COMPRESSED_FIELDS)), "compressed fields are only allowed in indexes of version <= 2.9"); bool tokenize = (bits & FieldsWriter.FIELD_IS_TOKENIZED) != 0; bool binary = (bits & FieldsWriter.FIELD_IS_BINARY) != 0; //TODO: Find an alternative approach here if this list continues to grow beyond the //list of 5 or 6 currently here. See Lucene 762 for discussion if (acceptField.Equals(FieldSelectorResult.LOAD)) { AddField(doc, fi, binary, compressed, tokenize); } else if (acceptField.Equals(FieldSelectorResult.LOAD_AND_BREAK)) { AddField(doc, fi, binary, compressed, tokenize); break; //Get out of this loop } else if (acceptField.Equals(FieldSelectorResult.LAZY_LOAD)) { AddFieldLazy(doc, fi, binary, compressed, tokenize); } else if (acceptField.Equals(FieldSelectorResult.SIZE)) { SkipField(binary, compressed, AddFieldSize(doc, fi, binary, compressed)); } else if (acceptField.Equals(FieldSelectorResult.SIZE_AND_BREAK)) { AddFieldSize(doc, fi, binary, compressed); break; } else { SkipField(binary, compressed); } } return doc; }
public bool CreateEmptyResource(PhactoryHost.Database.Resource resource) { Document.Document newDoc = new Document.Document(); return Host.XMLWrite(Host.GetFileInfo(resource).FullName, newDoc ); }
public static Document.Document DisplayDocument(DataModels.League.Documents.LeagueDocument docsDb, bool pullComments) { try { Document.Document doc = new Document.Document(); doc.DocumentId = docsDb.Document.DocumentId; doc.DocumentName = docsDb.Name; doc.OwnerDocId = docsDb.DocumentId; doc.SizeOfDocument = docsDb.Document.DocumentSize; if (docsDb.League != null) doc.OwnerId = docsDb.League.LeagueId; doc.IsRemoved = docsDb.IsRemoved; doc.MimeType = GetDocumentMimeType(docsDb.Name); doc.SaveLocation = docsDb.Document.SaveLocation; doc.FullText = docsDb.Document.FullText; doc.HasScannedText = docsDb.Document.HasScannedText; if (docsDb.Group != null) { doc.GroupId = docsDb.Group.Id; doc.GroupName = docsDb.Group.GroupName; } doc.UploadedHuman = RDN.Portable.Util.DateTimes.DateTimeExt.RelativeDateTime(docsDb.Created); doc.Created = docsDb.Created; if (docsDb.Category != null) { doc.Folder = new Document.Folder(); doc.Folder.FolderId = docsDb.Category.CategoryId; doc.Folder.FolderName = docsDb.Category.CategoryName; if (docsDb.Category.Group != null) doc.Folder.GroupId = docsDb.Category.Group.Id; if (docsDb.Category.ParentFolder != null) doc.Folder.ParentFolderId = docsDb.Category.ParentFolder.CategoryId; } var comments = docsDb.Comments.Where(x => x.IsRemoved == false).OrderByDescending(x => x.Created); doc.CommentCount = comments.Count(); if (pullComments) { try { foreach (var co in comments) { var comment = new Document.CommentForDocument() { CreatedHuman = RDN.Portable.Util.DateTimes.DateTimeExt.RelativeDateTime(co.Created), Created = co.Created, Comment = co.Comment, CommentId = co.CommentId, }; if (co.Commentor != null) { comment.Commentor = new MemberDisplay() { MemberId = co.Commentor.MemberId, DerbyName = co.Commentor.DerbyName, PlayerNumber = co.Commentor.PlayerNumber }; } doc.Comments.Add(comment); } } catch (Exception exception) { ErrorDatabaseManager.AddException(exception, exception.GetType()); } } return doc; } catch (Exception exception) { ErrorDatabaseManager.AddException(exception, exception.GetType()); } return null; }