public ICollector GetOrAdd(ICollector collector) { var collectorToUse = _collectors.GetOrAdd(collector.Name, collector); if (!collector.LabelNames.SequenceEqual(collectorToUse.LabelNames)) throw new InvalidOperationException("Collector with same name must have same label names"); return collectorToUse; }
public void SetUp_for_tests() { _collector= new Collector(new DatabaseContext()); _testItem = new Item() { ItemId = 1, Name = "TestItem" }; }
public JsonCodeWriter(CodeVariableReferenceExpression variable, ICollector collector) { if (variable == null) throw new ArgumentNullException("variable"); if (collector == null) throw new ArgumentNullException("collector"); _variable = variable; _collector = collector; }
public void SetupForTests() { _testCollector = new Collector(new DatabaseContext()); _testItem = new Item() { ItemId = 100, Name = "TestItem", IsBought = true }; _testIngredient = new Ingredient() { ItemId = 101, Name = "TestIngredient", Amount = 10 }; _testRecipe = new Recipe() { RecipeId = 100, RecipeName = "TestOpskrift", Ingredients = new List<Ingredient>() { _testIngredient, new Ingredient() }, AddedTime = DateTime.Now }; _testFoodplan = new Foodplan() { FoodPlanId = 100, CreatedDate = DateTime.Now, Recipies = new List<RecipesInFoodplan>() { new RecipesInFoodplan() {Recipe = _testRecipe, RecipeDate = DateTime.Now} }, FoodplanStartTime = DateTime.Now, FoodplanEndTime = DateTime.MaxValue, LastModified = DateTime.Now }; _testShoppinglist = new Shoppinglist() { ShoppingListId = 100, Items = new List<Item>() { _testItem } }; _testUser = new User() { UserId = 100, UserName = "******", UserPassword = "******", UserFoodplan = _testFoodplan, UserShoppingList = _testShoppinglist }; }
public void SetupForTests() { _testCollector = new Collector(new DatabaseContext()); _testItem = new Item() {ItemId = 100, Name = "TestItem", IsBought = true}; _testIngredient = new Ingredient() { ItemId = 101, Name = "TestIngredient", Amount = 10 }; _testRecipe = new Recipe() {RecipeId = 100,RecipeName = "TestOpskrift", Ingredients = new List<Ingredient>() {_testIngredient, new Ingredient()} }; _testFoodplan = new Foodplan() { FoodPlanId = 100, CreatedDate = DateTime.Now, RecipeList = new List<Recipe>() {_testRecipe} }; _testShoppinglist = new Shoppinglist() {ShoppingListId = 100, ShoppingItems = new List<Item>() {_testItem} }; _tesUser = new User() {UserId = 100,UserName = "******",UserPassword = "******",UserFoodplan = _testFoodplan, UserShoppinglist = _testShoppinglist}; }
public void SetUp_for_tests() { // Create mock for Unit of work mockData = Substitute.For<IDatabaseContext>(); mockData.DbSetIngredients.Returns(inMemoryIngredients); _testCollector = new Collector(mockData); }
public void GetReturnsProductWithSameId() { _mockCollector = Substitute.For<ICollector>(); _controller = new ItemsController(_mockCollector); _controller.GetItem(1); _mockCollector.Items.GetWithId(1).Received(); /* _mockCollector.Items.GetWithId(1).Returns(new Item() { IsBought = true, Name = "Abc", ItemId = 1 }); Item returnItem = _controller.GetItem(1); Assert.That(returnItem.IsBought && returnItem.Name == "Abc" && returnItem.ItemId == 1); */ }
public ICollector Merge(ICollector coll) { if (!this.IsMergeable(coll)) throw new InvalidProgramException("Algorithmic error"); IImageProvider ic = coll as IImageProvider; if (ic != null) { List<string> Images = new List<string>(_ListImage); Images.AddRange(ic.Image); return new ImageCollector(Images); } return coll.Merge(this); }
public static async Task <IActionResult> Run( [HttpTrigger(AuthorizationLevel.Anonymous, "get", "post", Route = null)] HttpRequest req, [Queue("zwiftresultrequests", Connection = "zwiftresultsstorage_STORAGE")] ICollector <string> resultWorkerQueue, ILogger log) { log.LogInformation($"Received Request on {nameof(SlackZwiftResultsTrigger)}"); string requestBody = await new StreamReader(req.Body).ReadToEndAsync(); log.LogInformation("Request Body:" + requestBody); NameValueCollection queryString; try { queryString = HttpUtility.ParseQueryString(requestBody); } catch (System.Exception ex) { log.LogError($"Error parsing slack request.\nRequest Body: {requestBody}\n Exception: {ex.Message}"); return(BuildResponse("Oops, something went wrong with the slack framework.")); } var queryDictionary = queryString.AllKeys.Cast <string>().Where(key => !string.IsNullOrWhiteSpace(key)).ToDictionary( key => key, key => queryString[key]); var jsonQueueMessage = JsonConvert.SerializeObject( queryDictionary); string input = queryDictionary["text"]; var result = CommandParser.TryParse(input, (CommandNames.Event, _ => {}), (CommandNames.Team, _ => {})); if (!result) { return(BuildResponse($"Sorry, I don't understand `{input}`. Take a look at the usage hints and try again.")); } log.LogInformation("Adding request to the queue"); try { resultWorkerQueue.Add(jsonQueueMessage); } catch (System.Exception ex) { log.LogError($"Error queuing work request.\nRequest text: {jsonQueueMessage}\n Exception: {ex.Message}"); return(BuildResponse("Oops, something went wrong with the slack framework.")); } return(BuildResponse("Retreiving Results...")); IActionResult BuildResponse(string errorMessage) { return(new JsonResult(new { response_type = "ephemeral", text = errorMessage })); } }
public static void Run( [ServiceBusTrigger("site-updates-topic", "apply-template-subscription", AccessRights.Manage, Connection = "ManageTopicConnection")] BrokeredMessage updateMsg, [ServiceBus("new-sites-topic", Connection = "ManageTopicConnection")] ICollector <BrokeredMessage> newSitesTopic, ExecutionContext executionContext, TraceWriter log) { log.Info($"C# Service Bus trigger function '{FunctionName}' processed message: {updateMsg.MessageId} (Label': {updateMsg.Label}')"); /* * The following line should work, but doesn't, so small workaround here... */ //var applyProvisioningTemplateJobAsJson = updateMsg.GetBody<ApplyProvisioningTemplateJob>(); var stream = updateMsg.GetBody <Stream>(); StreamReader streamReader = new StreamReader(stream); string applyProvisioningTemplateJobAsJson = streamReader.ReadToEnd(); var applyProvisioningTemplateJob = JsonConvert.DeserializeObject <ApplyProvisioningTemplateJob>(applyProvisioningTemplateJobAsJson); CloudStorageAccount storageAccount = CloudStorageAccount.Parse(CloudConfigurationManager.GetSetting("AzureWebJobsStorage")); CloudBlobClient blobClient = storageAccount.CreateCloudBlobClient(); CloudBlobContainer container = blobClient.GetContainerReference(CloudConfigurationManager.GetSetting("JobFilesContainer")); var blob = container.GetBlobReference(applyProvisioningTemplateJob.FileNameWithExtension); var blobStream = new MemoryStream(); blob.DownloadToStream(blobStream); streamReader = new StreamReader(blobStream); blobStream.Position = 0; string blobContent = streamReader.ReadToEnd(); JObject provisioningJobFile = JObject.Parse(blobContent); var provisioningTemplateUrl = provisioningJobFile["ProvisioningTemplateUrl"].Value <string>(); var relativeUrl = provisioningJobFile["RelativeUrl"].Value <string>(); // get JSON result objects into a list IList <JToken> parameters = provisioningJobFile["TemplateParameters"].Children().ToList(); // serialize JSON results into .NET objects IDictionary <string, string> templateParameters = new Dictionary <string, string>(); foreach (JProperty parameter in parameters) { templateParameters.Add(parameter.Name, parameter.Value.ToObject <string>()); } var clientContextManager = new ClientContextManager(new BaseConfiguration(), new CertificateManager()); var provisioningSiteUrl = CloudConfigurationManager.GetSetting("ProvisioningSite"); using (var ctx = clientContextManager.GetAzureADAppOnlyAuthenticatedContext(provisioningSiteUrl)) { // Todo: get list title from configuration. // Assume that the web has a list named "PnPProvisioningJobs". List provisioningJobsList = ctx.Web.Lists.GetByTitle("PnPProvisioningJobs"); ListItem listItem = provisioningJobsList.GetItemById(applyProvisioningTemplateJob.ListItemID); // Write a new value to the PnPProvisioningJobStatus field of // the PnPProvisioningJobs item. listItem["PnPProvisioningJobStatus"] = "Running (applying template)"; listItem.Update(); ctx.ExecuteQuery(); var templateContainer = blobClient.GetContainerReference(CloudConfigurationManager.GetSetting("TemplateFilesContainer")); var templateFileName = Path.GetFileName(provisioningTemplateUrl); var templateBlob = templateContainer.GetBlobReference(templateFileName); var templateBlobStream = new MemoryStream(); templateBlob.DownloadToStream(templateBlobStream); var provisioningTemplate = new SiteTemplate(templateBlobStream).ProvisioningTemplate; log.Info($"(id {executionContext.InvocationId}) Retrieved template {templateFileName} from blob storage."); foreach (var parameter in templateParameters) { provisioningTemplate.Parameters[parameter.Key] = parameter.Value; } var ptai = new ProvisioningTemplateApplyingInformation { ProgressDelegate = (string message, int progress, int total) => { log.Info($"(id {executionContext.InvocationId})[Progress]: {progress:00}/{total:00} - {message}"); }, MessagesDelegate = (string message, ProvisioningMessageType messageType) => { log.Info($"(id {executionContext.InvocationId})[{messageType.ToString()}]: {message}"); }, }; var tenantUrl = new Uri(CloudConfigurationManager.GetSetting("TenantUrl")); Uri.TryCreate(tenantUrl, relativeUrl, out Uri fullSiteUrl); var templateAppliedWithOutAnyErrors = false; log.Info($"Opening ctx to {fullSiteUrl.AbsoluteUri}"); using (var newSiteContext = clientContextManager.GetAzureADAppOnlyAuthenticatedContext(fullSiteUrl.AbsoluteUri)) { int tryCount = 0; const int maxTries = 3; do { tryCount++; try { log.Info($"Applying the provisioning template {provisioningTemplateUrl} to {fullSiteUrl.AbsoluteUri}."); newSiteContext.Web.ApplyProvisioningTemplate(provisioningTemplate, ptai); log.Info($"Provisioning template has been applied to {fullSiteUrl.AbsoluteUri}."); templateAppliedWithOutAnyErrors = true; } catch (Exception ex) { log.Error($"Error occured while applying the provisioning template to {fullSiteUrl.AbsoluteUri}.", ex); templateAppliedWithOutAnyErrors = false; if (tryCount <= maxTries) { log.Warning($"An error occured while applying the provisioning template, but will try to apply the provisioning template to {fullSiteUrl.AbsoluteUri} once more. (max {maxTries} times, this was attempt number {tryCount}.)"); } else { log.Warning($"Tried {maxTries} times to apply the provisioning template without succes."); } } } while (templateAppliedWithOutAnyErrors == false && tryCount <= maxTries); } if (templateAppliedWithOutAnyErrors == true) { var setDefaultColumnValuesMsg = new BrokeredMessage(applyProvisioningTemplateJob, new DataContractJsonSerializer(typeof(ApplyProvisioningTemplateJob))) { ContentType = "application/json", Label = "SetDefaultColumnValues" }; newSitesTopic.Add(setDefaultColumnValuesMsg); listItem["PnPProvisioningJobStatus"] = "Provisioned"; } else { listItem["PnPProvisioningJobStatus"] = "Failed (error while applying template)"; } listItem.Update(); ctx.ExecuteQuery(); } }
/// <summary> /// Replays the cached doc IDs (and scores) to the given <see cref="ICollector"/>. If this /// instance does not cache scores, then <see cref="Scorer"/> is not set on /// <c>other.SetScorer(Scorer)</c> as well as scores are not replayed. /// </summary> /// <exception cref="InvalidOperationException"> /// If this collector is not cached (i.e., if the RAM limits were too /// low for the number of documents + scores to cache). </exception> /// <exception cref="ArgumentException"> /// If the given Collect's does not support out-of-order collection, /// while the collector passed to the ctor does. </exception> public abstract void Replay(ICollector other);
public SlowMotionCooldownSystem(ICollector <InputEntity> collector) : base(collector) { }
internal NoScoreCachingCollector(ICollector other, double maxRAMMB) : base(other, maxRAMMB, false) { }
/// <summary> /// Create a new <see cref="CachingCollector"/> that wraps the given collector and /// caches documents and scores up to the specified RAM threshold. /// </summary> /// <param name="other"> /// The <see cref="ICollector"/> to wrap and delegate calls to. </param> /// <param name="cacheScores"> /// Whether to cache scores in addition to document IDs. Note that /// this increases the RAM consumed per doc. </param> /// <param name="maxRAMMB"> /// The maximum RAM in MB to consume for caching the documents and /// scores. If the collector exceeds the threshold, no documents and /// scores are cached. </param> public static CachingCollector Create(ICollector other, bool cacheScores, double maxRAMMB) { return(cacheScores ? (CachingCollector) new ScoreCachingCollector(other, maxRAMMB) : new NoScoreCachingCollector(other, maxRAMMB)); }
/// <summary> /// Utility method, to search and also collect all hits /// into the provided <see cref="ICollector"/>. /// </summary> public static TopDocs SearchAfter(IndexSearcher searcher, ScoreDoc after, Query q, Filter filter, int n, Sort sort, bool doDocScores, bool doMaxScore, ICollector fc) { if (sort == null) { throw new System.ArgumentException("sort must not be null"); } return(DoSearch(searcher, after, q, filter, n, sort, doDocScores, doMaxScore, fc)); }
public bool IsMergeable(ICollector coll) { return false; }
/// <summary> /// Utility method, to search and also collect all hits /// into the provided <see cref="ICollector"/>. /// </summary> public virtual TopDocs SearchAfter(IndexSearcher searcher, ScoreDoc after, Query q, int n, ICollector fc) { return(DoSearch(searcher, after, q, null, n, null, false, false, fc)); }
/// <summary> /// Utility method, to search and also collect all hits /// into the provided <see cref="ICollector"/>. /// </summary> public static TopDocs SearchAfter(IndexSearcher searcher, ScoreDoc after, Query q, Filter filter, int n, ICollector fc) { return(DoSearch(searcher, after, q, filter, n, null, false, false, fc)); }
/// <summary> /// Utility method, to search and also collect all hits /// into the provided <see cref="ICollector"/>. /// </summary> public static TopFieldDocs Search(IndexSearcher searcher, Query q, Filter filter, int n, Sort sort, ICollector fc) { if (sort == null) { throw new System.ArgumentException("sort must not be null"); } return((TopFieldDocs)DoSearch(searcher, null, q, filter, n, sort, false, false, fc)); }
/// <summary> /// Utility method, to search and also collect all hits /// into the provided <see cref="ICollector"/>. /// </summary> public static TopDocs Search(IndexSearcher searcher, Query q, int n, ICollector fc) { return(DoSearch(searcher, null, q, null, n, null, false, false, fc)); }
public static void Run([QueueTrigger("numberToBeFactored", Connection = "AzureWebJobsStorage")] string numberToBeFactored, ILogger log, [Queue("factorednumbers", Connection = "AzureWebJobsStorage")] ICollector <string> outQueueItem) { log.LogInformation($"C# Queue trigger function processed: {numberToBeFactored}"); var number = Convert.ToInt64(numberToBeFactored); var factors = Factor(number); var returnString = "Hello " + number + " you have " + factors.Count + " factors. The factors are: "; factors.OrderBy(b => b).ToList().ForEach(a => returnString += a.ToString() + ", "); returnString = returnString.Trim(' ').Trim(','); log.LogInformation(returnString); outQueueItem.Add(returnString); }
/** * @param use visitor combinator that collects used entities. * @param def visitor combinator that collects defined entities. */ public DefUse(ICollector use, ICollector def) : base(new Sequence(use,def)) { this.use = use; this.def = def; }
private static TopDocs DoSearch(IndexSearcher searcher, ScoreDoc after, Query q, Filter filter, int n, Sort sort, bool doDocScores, bool doMaxScore, ICollector fc) { if (filter != null) { q = new FilteredQuery(q, filter); } int limit = searcher.IndexReader.MaxDoc; if (limit == 0) { limit = 1; } n = Math.Min(n, limit); if (after != null && after.Doc >= limit) { throw new System.ArgumentException("after.doc exceeds the number of documents in the reader: after.doc=" + after.Doc + " limit=" + limit); } if (sort != null) { if (after != null && !(after is FieldDoc)) { // TODO: if we fix type safety of TopFieldDocs we can // remove this throw new System.ArgumentException("after must be a FieldDoc; got " + after); } const bool fillFields = true; var hitsCollector = TopFieldCollector.Create(sort, n, (FieldDoc)after, fillFields, doDocScores, doMaxScore, false); searcher.Search(q, MultiCollector.Wrap(hitsCollector, fc)); return(hitsCollector.GetTopDocs()); } else { // TODO: can we pass the right boolean for // in-order instead of hardwired to false...? we'd // need access to the protected IS.search methods // taking Weight... could use reflection... var hitsCollector = TopScoreDocCollector.Create(n, after, false); searcher.Search(q, MultiCollector.Wrap(hitsCollector, fc)); return(hitsCollector.GetTopDocs()); } }
public bool IsMergeable(ICollector coll) { if (IsSealed) return false; IImageProvider mp = coll as IImageProvider; return (mp != null); }
public static void Run([QueueTrigger("myqueue-items", Connection = "zzzz")] string myQueueItem, ILogger log, ICollector <string> collector) { log.LogInformation($"C# Queue trigger function processed: {myQueueItem}"); }
public static void FuncWithICollectorNoop([Queue(QueueName)] ICollector <PocoMessage> queue) { Assert.NotNull(queue); }
public override void Search(Query query, Filter filter, ICollector results) { CheckExplanations(query); base.Search(query, filter, results); }
internal NoScoreCachingCollector(ICollector other, int maxDocsToCache) : base(other, maxDocsToCache) { }
public override void Process(Context context, IEnumerable <TInput> elements, ICollector <TOutput> output) { throw new System.NotImplementedException(); }
/// <summary> /// Create a new <see cref="CachingCollector"/> that wraps the given collector and /// caches documents and scores up to the specified max docs threshold. /// </summary> /// <param name="other"> /// The <see cref="ICollector"/> to wrap and delegate calls to. </param> /// <param name="cacheScores"> /// Whether to cache scores in addition to document IDs. Note that /// this increases the RAM consumed per doc. </param> /// <param name="maxDocsToCache"> /// The maximum number of documents for caching the documents and /// possible the scores. If the collector exceeds the threshold, /// no documents and scores are cached. </param> public static CachingCollector Create(ICollector other, bool cacheScores, int maxDocsToCache) { return(cacheScores ? (CachingCollector) new ScoreCachingCollector(other, maxDocsToCache) : new NoScoreCachingCollector(other, maxDocsToCache)); }
public bool Remove(ICollector collector) { return(_collectors.TryRemove(collector.Name, out _)); }
public static void Run([TimerTrigger("0 10 3/3 1/1 * *", RunOnStartup = true)] TimerInfo myTimer, [Queue("downloadandunpacksnodas", Connection = "AzureWebJobsStorage")] ICollector <FileReadyToDownloadQueueMessage> outputQueueItem, TraceWriter log) { log.Info($"C# Timer trigger function executed at: {DateTime.Now}"); string partitionName = "snodas-westus-v1"; log.Info($"DetectSnodasReadyForDownload Timer trigger function executed at UTC: {DateTime.UtcNow}"); #if DEBUG int numberOfDaysToCheck = 67; #else int numberOfDaysToCheck = 5; #endif // Retrieve storage account from connection string. var storageAccount = CloudStorageAccount.Parse(CloudConfigurationManager.GetSetting("AzureWebJobsStorage")); CloudTableClient tableClient = storageAccount.CreateCloudTableClient(); CloudTable table = tableClient.GetTableReference("snodasdownloadtracker"); table.CreateIfNotExists(); //look back eight days and fill in any missing values; I beleive they store files on this server for 7 days TableQuery <FileProcessedTracker> dateQuery = new TableQuery <FileProcessedTracker>().Where( TableQuery.CombineFilters( TableQuery.GenerateFilterCondition("PartitionKey", QueryComparisons.Equal, partitionName), TableOperators.And, TableQuery.GenerateFilterConditionForDate("ForecastDate", QueryComparisons.GreaterThan, DateTime.UtcNow.AddDays(-1 * numberOfDaysToCheck)) ) ); var results = table.ExecuteQuery(dateQuery); //1. Are there any missing dates for the last n days we should backfill var currentDate = DateTime.UtcNow.AddDays(-1 * numberOfDaysToCheck); var checkDate = currentDate; var listOfDatesToDownload = new List <DateTime>(); while (checkDate < DateTime.UtcNow) { string fileName = checkDate.ToString("yyyyMMdd") + "Snodas.csv"; if (results.Where(r => r.RowKey == fileName).Count() == 0) { //If file doesn't exist enter a new item log.Info($"snodas backfill: adding item {fileName} to download queue"); listOfDatesToDownload.Add(checkDate.Date); } else { log.Info($"Skipping item {fileName} as it already exists"); } checkDate = checkDate.AddDays(1); } foreach (var date in listOfDatesToDownload) { // Get the object used to communicate with the server. var urlBase = @"ftp://sidads.colorado.edu/pub/DATASETS/NOAA/G02158/masked/"; urlBase += date.ToString("yyyy/"); urlBase += date.ToString("MM_MMM/"); urlBase += "SNODAS_" + date.ToString("yyyyMMdd") + ".tar"; FtpWebRequest request = (FtpWebRequest)WebRequest.Create(urlBase); request.Method = WebRequestMethods.Ftp.GetDateTimestamp; // This FTP site uses anonymous logon. request.Credentials = new NetworkCredential("anonymous", ""); try { FtpWebResponse response = (FtpWebResponse)request.GetResponse(); //file exists; add to download queue log.Info($"Adding snodas file with {date} to download queue."); //enter a new queue item outputQueueItem.Add(new FileReadyToDownloadQueueMessage { FileName = "SNODAS_" + date.ToString("yyyyMMdd") + "tar", FileDate = date.ToString("yyyyMMdd"), Url = urlBase, Filetype = partitionName }); } catch (WebException ex) { FtpWebResponse response = (FtpWebResponse)ex.Response; if (response.StatusCode == FtpStatusCode.ActionNotTakenFileUnavailable) { log.Info($"SNODAS File for date {date} not available, skipping."); //Does not exist continue; } else { log.Error($"Error attempting to see if snodas file with date {date} exists on ftp server."); } } } }
public override bool Score(ICollector collector, int max) { bool more; Bucket tmp; FakeScorer fs = new FakeScorer(); // The internal loop will set the score and doc before calling collect. collector.SetScorer(fs); do { bucketTable.first = null; while (current != null) // more queued { // check prohibited & required if ((current.Bits & PROHIBITED_MASK) == 0) { // TODO: re-enable this if BQ ever sends us required // clauses //&& (current.bits & requiredMask) == requiredMask) { // NOTE: Lucene always passes max = // Integer.MAX_VALUE today, because we never embed // a BooleanScorer inside another (even though // that should work)... but in theory an outside // app could pass a different max so we must check // it: if (current.Doc >= max) { tmp = current; current = current.Next; tmp.Next = bucketTable.first; bucketTable.first = tmp; continue; } if (current.Coord >= minNrShouldMatch) { fs.score = (float)(current.Score * coordFactors[current.Coord]); fs.doc = current.Doc; fs.freq = current.Coord; collector.Collect(current.Doc); } } current = current.Next; // pop the queue } if (bucketTable.first != null) { current = bucketTable.first; bucketTable.first = current.Next; return(true); } // refill the queue more = false; end += BucketTable.SIZE; for (SubScorer sub = scorers; sub != null; sub = sub.Next) { if (sub.More) { sub.More = sub.Scorer.Score(sub.Collector, end); more |= sub.More; } } current = bucketTable.first; } while (current != null || more); return(false); }
internal static IEnumerable <Metric> CollectAllMetrics(this ICollector collector, bool excludeUnlabeled = false) { return(collector.Collect().Single().metric.Where(x => !excludeUnlabeled || x.label.Count > 0)); }
public bool IsMergeable(ICollector coll) { IImageProvider ic = coll as IImageProvider; if (ic != null) return true; return coll.IsMergeable(this); }
public void GetPayments(ICollector collector) { try { log.InfoFormat("Start collecting with collector {0}", collector.GetType()); IList<Payment> payments = collector.GetPayments(); log.InfoFormat("Finished collecting with collector {0}. {1} transactions collected.", collector.GetType(), payments.Count); AddPayments(payments); } catch (Exception ex) { log.Error(String.Format("error in collector {0}.", collector.GetType()), ex); } }
public bool Remove(ICollector collector) { ICollector dummy; return _collectors.TryRemove(collector.Name, out dummy); }
public void SetUp() { target = new TestRunnerImpl(); args = Stub<ITestRunnerArgs>(); parser = Stub<IParser>(); cleaner = Stub<ICleaner>(); runDataBuilder = Stub<IRunDataBuilder>(); runDataListBuilder = Stub<IRunDataListBuilder>(); executorLauncher = Stub<IExecutorLauncher>(); trxWriter = Stub<ITrxWriter>(); breaker = Stub<IBreaker>(); enumerator = Stub<IRunDataEnumerator>(); windowsFileHelper = Stub<IWindowsFileHelper>(); collector = Stub<ICollector>(); target.Args = args; target.Parser = parser; target.Cleaner = cleaner; target.RunDataBuilder = runDataBuilder; target.RunDataListBuilder = runDataListBuilder; target.ExecutorLauncher = executorLauncher; target.TrxWriter = trxWriter; target.Breaker = breaker; target.Collector = collector; target.WindowsFileHelper = windowsFileHelper; }
public ApplicationGCProfiler(ICollector collector, IOptionAccessor <ApplicationOptions> optionAccessor) { _collector = collector ?? throw new ArgumentNullException(nameof(collector)); _applicationOptions = optionAccessor.Value; }
public ICollector Merge(ICollector coll) { if (!this.IsMergeable(coll)) throw new InvalidProgramException("Algorithmic error"); IImageProvider mp = coll as IImageProvider; if (mp == null) throw new InvalidProgramException("Algorithmic error"); return Clone(mp.Image); }
public static void BindToICollectorInt( [Queue(OutputQueueName)] ICollector <int> output) { // not supported }
public ICollector Merge(ICollector coll) { throw new InvalidProgramException("Algorithmic error"); }
public virtual void Search(Weight weight, ICollector collector) { Search(Ctx, weight, collector); }
public static async Task Run( [QueueTrigger("stage1", Connection = "AzureWebJobsStorage")] Chunk inputChunk, [Queue("stage2", Connection = "AzureWebJobsStorage")] ICollector <Chunk> outputQueue, Binder binder, TraceWriter log) { // log.Info($"C# Queue trigger function processed: {inputChunk}"); if (inputChunk.Length < MAX_CHUNK_SIZE) { outputQueue.Add(inputChunk); return; } string nsgSourceDataAccount = Util.GetEnvironmentVariable("nsgSourceDataAccount"); if (nsgSourceDataAccount.Length == 0) { log.Error("Value for nsgSourceDataAccount is required."); throw new ArgumentNullException("nsgSourceDataAccount", "Please supply in this setting the name of the connection string from which NSG logs should be read."); } var attributes = new Attribute[] { new BlobAttribute(inputChunk.BlobName), new StorageAccountAttribute(nsgSourceDataAccount) }; byte[] nsgMessages = new byte[inputChunk.Length]; try { CloudBlockBlob blob = await binder.BindAsync <CloudBlockBlob>(attributes); await blob.DownloadRangeToByteArrayAsync(nsgMessages, 0, inputChunk.Start, inputChunk.Length); } catch (Exception ex) { log.Error(string.Format("Error binding blob input: {0}", ex.Message)); throw ex; } int startingByte = 0; var chunkCount = 0; var newChunk = GetNewChunk(inputChunk, chunkCount++, log, 0); //long length = FindNextRecord(nsgMessages, startingByte); var nsgMessagesString = System.Text.Encoding.Default.GetString(nsgMessages); int endingByte = FindNextRecordRecurse(nsgMessagesString, startingByte, 0, log); int length = endingByte - startingByte + 1; while (length != 0) { if (newChunk.Length + length > MAX_CHUNK_SIZE) { //log.Info($"Chunk starts at {newChunk.Start}, length is {newChunk.Length}, next start is {newChunk.Start + newChunk.Length}"); outputQueue.Add(newChunk); newChunk = GetNewChunk(inputChunk, chunkCount++, log, newChunk.Start + newChunk.Length); } newChunk.Length += length; startingByte += length; endingByte = FindNextRecordRecurse(nsgMessagesString, startingByte, 0, log); length = endingByte - startingByte + 1; } if (newChunk.Length > 0) { outputQueue.Add(newChunk); //log.Info($"Chunk starts at {newChunk.Start}, length is {newChunk.Length}"); } }
public static void FuncWithICollector([Queue(OutputQueueName)] ICollector <string> queue) { Assert.NotNull(queue); queue.Add(TestQueueMessage); }
internal override void Init(ICollector parent, LabelValues labelValues) { Init(parent, labelValues, DateTime.UtcNow); }
public static QueueItem Queue2([QueueTrigger("queue2")] QueueItem myQueueItem, [Queue("queue3")] ICollector <string> queue3, TraceWriter log) { var json = JsonConvert.SerializeObject(myQueueItem); log.Info($"Queue2 called with message: {json}"); if (string.IsNullOrEmpty(myQueueItem.Body)) { throw new Exception(); } queue3.Add(json); return(myQueueItem); }
internal virtual void Init(ICollector parent, LabelValues labelValues) { _labelValues = labelValues; }
public void Register(ICollector collector) { if (!_collectors.TryAdd(collector.Name, collector)) throw new InvalidOperationException(string.Format("A collector with name '{0}' has already been registered!", collector.Name)); }
public void SetUp_for_tests() { testItem = new Item { Name = "CorrectItem", IsBought = false }; _collector = Substitute.For<ICollector>(); }