static async Task WalkDirectoryTree(DirectoryInfo root, CdmCorpusDefinition corpus, CdmFolderDefinition folder, IList <string> ignoreFolders, string endMatch) { FileInfo[] files = null; DirectoryInfo[] subDirs = null; // First, process all the files directly under this folder try { files = root.GetFiles("*.*", SearchOption.TopDirectoryOnly); } catch (UnauthorizedAccessException e) { Console.WriteLine(e.Message); } catch (DirectoryNotFoundException e) { Console.WriteLine(e.Message); } if (files != null) { foreach (FileInfo fi in files) { string postfix = fi.Name.Substring(fi.Name.IndexOf(".")); if (postfix == endMatch) { using (var reader = File.OpenText(fi.FullName)) { string content = await reader.ReadToEndAsync().ConfigureAwait(false); string name = folder.FolderPath + fi.Name; CdmDocumentDefinition doc = DocumentPersistence.FromData(corpus.Ctx, name, folder.Namespace, folder.FolderPath, JsonConvert.DeserializeObject <DocumentContent>(content)); corpus.Documents.Add(doc); Console.WriteLine($"Loading {fi.FullName}"); } } } // Now find all the subdirectories under this directory. subDirs = root.GetDirectories(); foreach (DirectoryInfo dirInfo in subDirs) { // Resursive call for each subdirectory. await LoadCorpusFolder(corpus, folder.ChildFolders.Add(dirInfo.Name), ignoreFolders, endMatch).ConfigureAwait(false); } } }
static async Task LoadDocument(CdmCorpusDefinition corpus, CdmFolderDefinition folder, FileInfo fi, string endMatch) { string postfix = fi.Name.Substring(fi.Name.IndexOf(".")); if (postfix == endMatch) { using (var reader = File.OpenText(fi.FullName)) { string content = await reader.ReadToEndAsync().ConfigureAwait(false); CdmDocumentDefinition doc = DocumentPersistence.FromData(corpus.Ctx, fi.Name, folder.Namespace, folder.FolderPath, JsonConvert.DeserializeObject <DocumentContent>(content)); folder.Documents.Add(doc); Console.WriteLine($"Loading {fi.FullName}"); } } }
public static async Task <CdmDocumentDefinition> LoadDocumentFromPathAsync(CdmFolderDefinition folder, string docName) { // This makes sure date values are consistently parsed exactly as they appear. // Default behavior auto formats date values. JsonConvert.DefaultSettings = () => new JsonSerializerSettings { DateParseHandling = DateParseHandling.None }; CdmDocumentDefinition docContent = null; string jsonData = null; DateTimeOffset? fsModifiedTime = null; CdmCorpusContext ctx = folder.Ctx; string docPath = folder.FolderPath + docName; StorageAdapter adapter = ctx.Corpus.Storage.FetchAdapter(folder.Namespace); try { if (adapter.CanRead()) { jsonData = await adapter.ReadAsync(docPath); fsModifiedTime = await adapter.ComputeLastModifiedTimeAsync(docPath); Logger.Info(nameof(CdmFolderDefinition), ctx, $"read file: {docPath}", "LoadDocumentFromPathAsync"); } } catch (Exception e) { Logger.Error(nameof(CdmFolderDefinition), (ResolveContext)ctx, $"Could not read '{docPath}' from the '{ctx.Corpus.Namespace}' namespace. Reason '{e.Message}'", "LoadDocumentFromPathAsync"); return(null); } try { // Check file extensions, which performs a case-insensitive ordinal string comparison if (docPath.EndsWith(CdmCorpusDefinition.FetchManifestExtension(), StringComparison.OrdinalIgnoreCase) || docPath.EndsWith(CdmCorpusDefinition.FetchFolioExtension(), StringComparison.OrdinalIgnoreCase)) { docContent = ManifestPersistence.FromData(ctx, docName, folder.Namespace, folder.FolderPath, JsonConvert.DeserializeObject <ManifestContent>(jsonData)) as CdmDocumentDefinition; } else if (docPath.EndsWith(CdmCorpusDefinition.FetchModelJsonExtension(), StringComparison.OrdinalIgnoreCase)) { docContent = await ModelJson.ManifestPersistence.FromData(ctx, JsonConvert.DeserializeObject <Model>(jsonData), folder); } else { docContent = DocumentPersistence.FromData(ctx, docName, folder.Namespace, folder.FolderPath, JsonConvert.DeserializeObject <DocumentContent>(jsonData)); } } catch (Exception e) { Logger.Error(nameof(CdmFolderDefinition), (ResolveContext)ctx, $"Could not convert '{docPath}'. Reason '{e.Message}'", "LoadDocumentFromPathAsync"); return(null); } // Add document to the folder, this sets all the folder/path things, caches name to content association and may trigger indexing on content if (docContent != null) { folder.Documents.Add(docContent, docName); docContent._fileSystemModifiedTime = fsModifiedTime; docContent.IsDirty = false; } return(docContent); }
public static async Task <CdmDocumentDefinition> LoadDocumentFromPathAsync(CdmFolderDefinition folder, string docName, CdmDocumentDefinition docContainer) { // This makes sure date values are consistently parsed exactly as they appear. // Default behavior auto formats date values. JsonConvert.DefaultSettings = () => new JsonSerializerSettings { DateParseHandling = DateParseHandling.None }; CdmDocumentDefinition docContent = null; string jsonData = null; DateTimeOffset? fsModifiedTime = null; CdmCorpusContext ctx = folder.Ctx; string docPath = folder.FolderPath + docName; StorageAdapter adapter = ctx.Corpus.Storage.FetchAdapter(folder.Namespace); try { if (adapter.CanRead()) { jsonData = await adapter.ReadAsync(docPath); fsModifiedTime = await adapter.ComputeLastModifiedTimeAsync(docPath); Logger.Info(nameof(CdmFolderDefinition), ctx, $"read file: {docPath}", "LoadDocumentFromPathAsync"); } } catch (Exception e) { Logger.Error(nameof(CdmFolderDefinition), (ResolveContext)ctx, $"Could not read '{docPath}' from the '{folder.Namespace}' namespace. Reason '{e.Message}'", "LoadDocumentFromPathAsync"); return(null); } try { // Check file extensions, which performs a case-insensitive ordinal string comparison if (docPath.EndWithOrdinalIgnoreCase(CdmCorpusDefinition.FetchManifestExtension()) || docPath.EndWithOrdinalIgnoreCase(CdmCorpusDefinition.FetchFolioExtension())) { docContent = ManifestPersistence.FromData(ctx, docName, folder.Namespace, folder.FolderPath, JsonConvert.DeserializeObject <ManifestContent>(jsonData)) as CdmDocumentDefinition; } else if (docPath.EndWithOrdinalIgnoreCase(CdmCorpusDefinition.FetchModelJsonExtension())) { if (!docName.EqualsWithOrdinalIgnoreCase(CdmCorpusDefinition.FetchModelJsonExtension())) { Logger.Error(nameof(PersistenceLayer), (ResolveContext)ctx, $"Failed to load '{docName}', as it's not an acceptable file name. It must be model.json.", "LoadDocumentFromPathAsync"); return(null); } docContent = await ModelJson.ManifestPersistence.FromData(ctx, JsonConvert.DeserializeObject <Model>(jsonData), folder); } else { docContent = DocumentPersistence.FromData(ctx, docName, folder.Namespace, folder.FolderPath, JsonConvert.DeserializeObject <DocumentContent>(jsonData)); } } catch (Exception e) { Logger.Error(nameof(CdmFolderDefinition), (ResolveContext)ctx, $"Could not convert '{docPath}'. Reason '{e.Message}'", "LoadDocumentFromPathAsync"); return(null); } // Add document to the folder, this sets all the folder/path things, caches name to content association and may trigger indexing on content if (docContent != null) { if (docContainer != null) { // there are situations where a previously loaded document must be re-loaded. // the end of that chain of work is here where the old version of the document has been removed from // the corpus and we have created a new document and loaded it from storage and after this call we will probably // add it to the corpus and index it, etc. // it would be really rude to just kill that old object and replace it with this replicant, especially because // the caller has no idea this happened. so... sigh ... instead of returning the new object return the one that // was just killed off but make it contain everything the new document loaded. docContent = docContent.Copy(new ResolveOptions(docContainer), docContainer) as CdmDocumentDefinition; } folder.Documents.Add(docContent, docName); docContent._fileSystemModifiedTime = fsModifiedTime; docContent.IsDirty = false; } return(docContent); }