public async void LoadBulk_NoStoredProcedure_ThrowsException() { // Arrange var mockClient = new Mock <IDocumentClient>(); mockClient.Setup(c => c.ExecuteStoredProcedureAsync <bool>( It.IsAny <string>(), It.IsAny <RequestOptions>(), It.IsAny <Object>())) .ThrowsAsync(new ArgumentNullException()); DocumentLoader sut = new DocumentLoader( mockClient.Object, "", ""); List <IAmDocument> docs = new List <IAmDocument>() { new DocumentBase() { PartitionKey = "a" }, new DocumentBase() { PartitionKey = "a" } }; // Act var exception = await Record.ExceptionAsync(() => sut.LoadBulk(docs)); // Assert Assert.Contains("Error initializing tasks for bulk import on groupedDoc a with 2 measurements.", exception.Message); }
public async void LoadBulk_ExceptionWithTwoFailedGroups_MessageIndicatesTwoExceptions() { // Arrange var mockClient = new Mock <IDocumentClient>(); mockClient.Setup(c => c.ExecuteStoredProcedureAsync <bool>( It.IsAny <string>(), It.IsAny <RequestOptions>(), It.IsAny <Object>())) .ThrowsAsync(new ArgumentNullException()); DocumentLoader sut = new DocumentLoader( mockClient.Object, "", ""); List <IAmDocument> docs = new List <IAmDocument>() { new DocumentBase() { PartitionKey = "b" }, new DocumentBase() { PartitionKey = "bb" } }; // Act var exception = await Record.ExceptionAsync(() => sut.LoadBulk(docs)); // Assert //Assert.Contains("2 stored procedures failed out of a total of 2 sprocs for 2 measurements.", exception.Message); Assert.Contains("Error initializing tasks for bulk import on groupedDoc b with 1 measurements.", exception.Message); }
public override void Given() { _fileStream = null; FileName = null; BlobStorageService.DownloadFileAsync(Arg.Any <BlobStorageData>()).Returns(_fileStream); Loader = new DocumentLoader(Logger, BlobStorageService); }
public async Task <DocumentLoader> SaveDocumentLoader(FileRtf fileRtf) { var documentLoader = new DocumentLoader(); documentLoader.Name = fileRtf.Caption; documentLoader.Category = fileRtf.Category; documentLoader.FileSize = fileRtf.FileSize; documentLoader.VidDoc = fileRtf.VidDoc; documentLoader.ProfName = fileRtf.ProfName; await _wordDbContext.AddAsync(documentLoader); var query = fileRtf.Items.Where(f => f.ParentId == null); foreach (var item in query) { var documentItem = new DocumentItem(); documentItem.DocumentLoader = documentLoader; documentItem.Number = item.Number; documentItem.IsRootItem = item.IsRoot; documentItem.TextContent = item.TextContent; await _wordDbContext.AddAsync(documentItem); await CreateModel(fileRtf.Items, item, documentItem, documentItem, documentLoader); } await _wordDbContext.SaveChangesAsync(); var queryA = await _wordDbContext.DocumentItem.Where(f => f.DocumentLoaderId == documentLoader.Id).ToListAsync(); foreach (var item in queryA.Where(f => f.ParentId == null)) { await UpdateParagraph(queryA, item, item); } return(documentLoader); }
public bool DoOpenDocumentWorkflow() { var dialog = new OpenFileDialog { DefaultExt = ".pose", Filter = "Pose Document (*.pose)|*.pose|All files (*.*)|*.*", FilterIndex = 0 }; if (dialog.ShowDialog(System.Windows.Application.Current.MainWindow) != true) { return(false); } if (!DoCloseDocumentWorkflow()) { return(false); } var document = DocumentLoader.LoadFromFile(MessageBus.Default, dialog.FileName); _editor.LoadDocument(document); ValidateAssetFolder(); return(true); }
public async Task <bool> Parse(string text, DocumentLoader documentLoader, DocumentItem documentItem) { if (text.Length < 40) { return(true); } var mass = text.Trim('.').Split('.'); foreach (var sentence in mass) { if (sentence == " ") { continue; } var models = await _loaderInfoAotRu.LoaderAotModel(sentence); await _iAiSentence.SaveDescription(models); foreach (var model in models) { var aiSentence = await _iAiSentence.Create(model); aiSentence.DocumentLoader = documentLoader; aiSentence.DocumentItem = documentItem; } } await _wordDbContext.SaveChangesAsync(); return(true); }
public override void Given() { _folderName = BlobStorageConstants.ResultsFolderName; _fileName = DocumentResource.TlevelDataFormatAndRulesGuide.Results_Data_Format_And_Rules_Guide_File_Name_Text; DocumentLoader.GetTechSpecFileAsync(_folderName, _fileName) .Returns(new MemoryStream(Encoding.ASCII.GetBytes("Test File for assessment entries tech spec"))); }
public async Task ShouldLoadRemoteDocument() { var docLoader = new DocumentLoader(); var remoteDoc = await docLoader.LoadDocumentAsync("https://dpzr6corrg.execute-api.us-east-1.amazonaws.com/dev/context/default"); remoteDoc.DocumentUrl.Should().NotBeNullOrWhiteSpace(); remoteDoc.Document.Should().NotBeNull(); remoteDoc.ContextUrl.Should().NotBeNullOrWhiteSpace(); remoteDoc.Context.Should().NotBeNull(); }
protected void InsertXML(object sender, WriterCommandEventArgs args) { if (args.Mode == WriterCommandEventMode.QueryState) { args.Enabled = args.DocumentControler != null && args.Document != null && args.DocumentControler.CanInsertElementAtCurrentPosition( typeof(DomElement)); } else if (args.Mode == WriterCommandEventMode.Invoke) { args.Result = false; DomDocument document = null; if (args.Parameter is string) { System.IO.StringReader reader = new System.IO.StringReader( (string)args.Parameter); document = DocumentLoader.LoadXmlFileWithCreateDocument( reader, args.Document); reader.Close(); } else if (args.Parameter is System.IO.Stream) { document = DocumentLoader.LoadXmlFileWithCreateDocument( (System.IO.Stream)args.Parameter, args.Document); } else if (args.Parameter is System.IO.TextReader) { document = DocumentLoader.LoadXmlFileWithCreateDocument( (System.IO.TextReader)args.Parameter, args.Document); } else if (args.Parameter is System.Xml.XmlReader) { document = DocumentLoader.LoadXmlFileWithCreateDocument( (System.Xml.XmlReader)args.Parameter, args.Document); } if (document != null && document.Body != null && document.Body.Elements.Count > 0) { DomElementList list = document.Body.Elements; args.Document.ImportElements(list); args.DocumentControler.InsertElements(list); args.Result = list; } } }
public LoggerNetToa5ToCosmosDBSqlApiMeasurement( EtlEvent etlEvent, string contents, DocumentClient client) { this.etlEvent = etlEvent; this.contents = contents; this.client = client; DocumentLoader loader = new DocumentLoader( client, "cafdb", "items"); }
public async Task <bool> RunAotParser(DocumentLoader documentLoader) { var items = _wordDbContext.DocumentItem.Where(f => f.DocumentLoader == documentLoader); foreach (var item in items) { if (item.IsRootItem == true) { continue; } await _aiParser.Parse(item.TextContent, documentLoader, item); } return(true); }
private void OpenDocumentFromFile() { var dialog = new OpenFileDialog { DefaultExt = ".ged", Filter = "GlyphEdit Document (*.ged)|*.ged|All files (*.*)|*.*", FilterIndex = 0 }; if (dialog.ShowDialog(App.Current.MainWindow) == true) { var document = DocumentLoader.Load(dialog.FileName); OpenDocument(document, dialog.FileName); } }
private void OpenDocument(ViewerState state) { var file = string.IsNullOrEmpty(Aux) ? Tag : Aux; // Follow the link. if (!File.Exists(file)) { return; } using (var fs = File.Open(file, FileMode.Open)) { var document = DocumentLoader.Load(fs, file); state.LoadDocument(document, true); } }
public async Task LoadNoReplace_RecordExists_ReturnsOldRecord() { // ARRANGE var e = CosmosDBSqlApiArranger.GetEtlEventMock("EtlEvent_2018-05-22T01:00:00.000000Z"); var datetime = DateTime.UtcNow; DocumentLoader sut = new DocumentLoader( client, "cafdb", "items"); // ACT ResourceResponse <Document> result = await sut.LoadNoReplace(e); // ASSERT Assert.True(result.StatusCode == HttpStatusCode.OK); Assert.True(result.Resource.Timestamp < datetime); }
protected void InsertHtml(object sender, WriterCommandEventArgs args) { if (args.Mode == WriterCommandEventMode.QueryState) { args.Enabled = args.DocumentControler != null && args.Document != null && args.DocumentControler.CanInsertElementAtCurrentPosition( typeof(DomElement)); } else if (args.Mode == WriterCommandEventMode.Invoke) { args.Result = false; DomDocument document = null; if (args.Parameter is string) { System.IO.StringReader reader = new System.IO.StringReader( (string)args.Parameter); document = (DomDocument)System.Activator.CreateInstance(args.Document.GetType()); DocumentLoader.LoadHtmlFile(reader, document, null); reader.Close(); } else if (args.Parameter is System.IO.Stream) { document = (DomDocument)Activator.CreateInstance(args.Document.GetType()); document.Load((System.IO.Stream)args.Parameter, FileFormat.Html); } else if (args.Parameter is System.IO.TextReader) { document = (DomDocument)System.Activator.CreateInstance(args.Document.GetType()); DocumentLoader.LoadHtmlFile((System.IO.TextReader)args.Parameter, document, null); } if (document != null && document.Body != null && document.Body.Elements.Count > 0) { DomElementList list = document.Body.Elements; args.Document.ImportElements(list); args.DocumentControler.InsertElements(list); args.Result = list; } } }
public async Task LoadReplace_RecordExists_ReturnsNewRecord() { // ARRANGE var datetime = DateTime.UtcNow; DocumentClient client = new DocumentClient( new Uri("https://localhost:8081"), "C2y6yDjf5/R+ob0N8A7Cgv30VRDJIWEHLM+4QDU5DE2nQ9nDuVTqobD4b8mGGyPMbIZnqyMsEcaGQy67XIw/Jw=="); var e = CosmosDBSqlApiArranger.GetEtlEventMock("EtlEvent_2018-06-22T01:00:00.000000Z"); DocumentLoader sut = new DocumentLoader( client, "cafdb", "items"); // ACT ResourceResponse <Document> result = await sut.LoadReplace(e); // ASSERT Assert.True(result.StatusCode == HttpStatusCode.OK); Assert.True(result.Resource.Timestamp > datetime); }
public async Task LoadNoReplace_RecordDoesNotExist_ReturnsNewRecord() { // ARRANGE var datetime = DateTime.UtcNow; var e = CosmosDBSqlApiArranger.GetEtlEventMock($"EtlEvent_{DateTime.UtcNow.ToString("o")}"); DocumentLoader sut = new DocumentLoader( client, "cafdb", "items"); // ACT ResourceResponse <Document> result = await sut.LoadNoReplace(e); // ASSERT Assert.True(result.StatusCode == HttpStatusCode.Created); Assert.InRange <DateTime>( result.Resource.Timestamp, datetime.Add(new TimeSpan(0, -1, 0)), datetime.Add(new TimeSpan(0, 1, 0))); }
public async Task SoilGridPointSurveyV1ToCosmos_ActualData_CreatesExpectedRecords() { // Arrange var extractor = new TidyDataCsvExtractor( pathToFileWithValidSoilGridPointSurveyV1Data, pathToFileWithValidSoilGridPointSurveyV1Dictionary); EtlEvent etlEvent = new EtlEvent( "EtlEvent", "LocalProcess", "http://files.cafltar.org/data/schema/documentDb/v2/etlEvent.json", "CookEastSoilGridPointSurvey", "0.1", "", DateTime.UtcNow); var transformer = new CosmosDBSqlApiSampleV2Transformer <SoilGridPointSurveyV1, SoilSample>( new MapFromSoilGridPointSurveyToSoilSample(), "http://files.cafltar.org/data/schema/documentDb/v2/sample.json", etlEvent.Id, "CookEastSoilGridPointSurvey", "CookEast", "SoilSample"); var loader = new DocumentLoader( client, "cafdb", "items"); // Act TidyData extracted = extractor.Extract <SoilGridPointSurveyV1>(); List <SoilSample> transformed = transformer.Transform(extracted); StoredProcedureResponse <bool>[] results = await loader.LoadBulk(transformed); Assert.Equal(30, transformed.Count); Assert.NotEmpty(results); }
public BoxEntry GetDocuments() { if (_rootEntry == null) { //Download xml var treeResponce = GetClient().DownloadData(string.Format(TreeUrl, ApiKey, AuthKey)); var response = DocumentLoader.GetDoc(treeResponce).Element("response"); //Check status if (response.Element("status").Value != "listing_ok") { throw new InvalidOperationException("bad listing status " + response.Element("status").Value); } var rootFolders = response.Element("tree").Elements("folder"); //create root entry _rootEntry = new BoxEntry(); foreach (var rootFolder in rootFolders) { RetrieveTree(_rootEntry, rootFolder); } } return(_rootEntry); }
/// <summary> /// Loads a dictionary and lock the file /// </summary> /// <param name="filePath">The path of the file which holds the dictionary data</param> /// <param name="efsSystem">The system for which this dictionary is loaded</param> /// <returns></returns> public static Dictionary load(String filePath, EFSSystem efsSystem) { Dictionary retVal = DocumentLoader <Dictionary> .loadFile(filePath); if (retVal != null) { retVal.FilePath = filePath; efsSystem.AddDictionary(retVal); // Loads the dependancies for this .efs file LoadDepends loadDepends = new LoadDepends(retVal.BasePath); loadDepends.visit(retVal); // Updates the contents of this .efs file Updater updater = new Updater(); updater.visit(retVal); if (retVal.Specifications != null) { retVal.Specifications.ManageTypeSpecs(); } } return(retVal); }
public CustomLoader(DocumentLoader defaultLoader, ResourceScriptFactory resourceScriptFactory, IResourceManager resourceManager) { _defaultLoader = defaultLoader; _resourceManager = resourceManager; _resourceScriptFactory = resourceScriptFactory; }
/// <summary> /// Loads a translation dictionary and lock the file /// </summary> /// <param name="filePath"></param> /// <param name="dictionary"></param> /// <returns></returns> public static TranslationDictionary loadTranslationDictionary(string filePath, DataDictionary.Dictionary dictionary) { TranslationDictionary retVal = DocumentLoader <TranslationDictionary> .loadFile(filePath, dictionary); return(retVal); }
/// <summary> /// Loads a specification and lock the file /// </summary> /// <param name="filePath">The name of the file which holds the dictionary data</param> /// <param name="dictionary">The dictionary for which the specification is loaded</param> /// <returns></returns> public static Specification.Specification loadSpecification(String filePath, Dictionary dictionary) { Specification.Specification retVal = DocumentLoader <Specification.Specification> .loadFile(filePath, dictionary); return(retVal); }
public static async Task Run( [BlobTrigger("ectower-cookeast/raw/Flux/{name}", Connection = "ltarcafdatastreamConnectionString")] Stream myBlob, string name, TraceWriter log, ExecutionContext context) { log.Info($"C# Blob trigger function Processed blob\n Name:{name} \n Size: {myBlob.Length} Bytes"); //var config = new ConfigurationBuilder() // .SetBasePath(context.FunctionAppDirectory) // .AddJsonFile("local.settings.json", optional: true, reloadOnChange: true) // .AddEnvironmentVariables() // .Build(); EtlEvent etlEvent = new EtlEvent( "EtlEvent", "AzureFunction", "http://files.cafltar.org/data/schema/documentDb/v2/etlEvent.json", "CafMeteorologyEcTower", "1.1", "LoggerNetFluxToCosmosDBSqlApiMeasurementCookEast", DateTime.UtcNow); etlEvent.Outputs = null; etlEvent.Inputs.Add($"ectower-cookeast/raw/Flux/{name}"); etlEvent.Logs.Add($"C# Blob trigger function Processed blob\n Name:{name} \n Size: {myBlob.Length} Bytes"); StreamReader reader = new StreamReader(myBlob); string contents = ""; log.Info("About to read contents"); try { contents = reader.ReadToEnd(); } catch (Exception e) { etlEvent.Logs.Add( $"Error reading Blob: {e.Message}"); } //DocumentClient client = new DocumentClient( // new Uri( // config["Values:AzureCosmosDBUri"]), // config["Values:AzureCosmosDBKey"]); DocumentClient client; try { client = new DocumentClient( new Uri( ConfigurationManager.AppSettings["AzureCosmosDBUri"]), ConfigurationManager.AppSettings["AzureCosmosDBKey"]); } catch (Exception e) { etlEvent.Logs.Add( $"Error creating DocumentClient: {e.Message}"); log.Error($"Error creating DocumentClient: {e.Message}"); throw new Exception("Error creating DocumentClient", e); } DocumentLoader loader = new DocumentLoader( client, "cafdb", "items"); log.Info("Created client and loader"); if (!String.IsNullOrEmpty(contents)) { try { log.Info("Attempting extract and transform"); TOA5Extractor extractor = new TOA5Extractor( name, contents, -8); TOA5 fluxTable = extractor.GetTOA5 <Flux>(); // TODO: Move strings and such to settings file DocumentDbMeasurementV2Transformer transformer = new DocumentDbMeasurementV2Transformer( new MapFromFluxDataTableToCafStandards(), "http://files.cafltar.org/data/schema/documentDb/v2/measurement.json", etlEvent.Id, "Measurement", "CafMeteorologyEcTower", 1800); List <MeasurementV2> measurements = transformer.ToMeasurements(fluxTable); log.Info("Attempting load"); /// Using the bulkImport sproc doesn't provide much benefit since /// most data tables will only have a few measurements with the /// same partition key. But it's better than nothing. StoredProcedureResponse <bool>[] results = await loader.LoadBulk(measurements); log.Info($"Loaded {results.Length.ToString()} measurements"); } catch (Exception e) { etlEvent.Logs.Add( $"Error in ETL pipeline: {e.Message}"); log.Error($"Error in ETL pipeline: {e.Message}"); throw new Exception("Error in ETL pipeline", e); } finally { log.Info("Loading etlEvent to db"); etlEvent.DateTimeEnd = DateTime.UtcNow; ResourceResponse <Document> result = await loader.LoadNoReplace(etlEvent); log.Info($"Result of writing EtlEvent: {result.StatusCode.ToString()}"); } log.Info("Function completed"); } }
private async Task CreateModel(List <ItemDto> items, ItemDto parentId, DocumentItem parent, DocumentItem paragraph, DocumentLoader loader) { var query = items.Where(f => f.ParentId == parentId); foreach (var item in query) { var documentItem = new DocumentItem(); documentItem.DocumentLoader = loader; documentItem.Number = item.Number; documentItem.IsRootItem = item.IsRoot; documentItem.TextContent = item.TextContent; documentItem.Parent = parent; documentItem.Paragraph = paragraph; await _wordDbContext.AddAsync(documentItem); await CreateModel(items, item, documentItem, paragraph, loader); } }
public void Then_GetRegistrationValidationErrorsFileAsync_Method_Is_Called() { DocumentLoader.Received(1).GetBulkUploadAssessmentEntriesTechSpecFileAsync(_fileName); }
public override void Given() { _fileName = DocumentResource.TlevelDataFormatAndRulesGuide.Assessment_Entry_Data_Format_And_Rules_Guide_File_Name_Text; DocumentLoader.GetBulkUploadAssessmentEntriesTechSpecFileAsync(_fileName).Returns(new MemoryStream(Encoding.ASCII.GetBytes("Test File for assessment entries tech spec"))); }
/// <summary> /// Loads a namespace and locks the file /// </summary> /// <param name="filePath"></param> /// <param name="dictionary"></param> /// <returns></returns> public static NameSpace loadNameSpace(string filePath, ModelElement enclosing) { NameSpace retVal = DocumentLoader <NameSpace> .loadFile(filePath, enclosing); return(retVal); }
/// <summary> /// Loads a namespace and locks the file /// </summary> /// <param name="filePath"></param> /// <param name="dictionary"></param> /// <returns></returns> public static Frame loadFrame(string filePath, ModelElement enclosing) { Frame retVal = DocumentLoader <Frame> .loadFile(filePath, enclosing); return(retVal); }
public void Test() { DocumentLoader loader = new DocumentLoader(File.Open("D:\\sampleSource.xml", FileMode.Open)); //now names contains the value of the name element List <KeyValuePair <string, string> > names = loader.GetNames(); }