public static void DoBlob() { // Imagenes desde un blob var imagesUrls = JsonConvert.DeserializeObject <Paths>(System.IO.File.ReadAllText("../../../Ficheros/Path.json")); var searchList = new List <AzureSearchModel>(); foreach (var url in imagesUrls.data) { var prediction = (AIServices.PredictionImageByUrl(url.Signature).FirstOrDefault()); if (prediction != null) { searchList.Add(new AzureSearchModel() { Tag = prediction.TagName, Scoring = prediction.Probability.ToString(), Url = url.Signature }); } } string searchServiceName = "dotnetconfsearch"; string adminApiKey = "162ED4F1F498A366C066F1A1B1430F5D"; //Create index var azureSearch = new AzureSearchService(searchServiceName, adminApiKey); var indexCreate = azureSearch.CreateIndexAsync <AzureSearchModel>("csindex", false, null).Result; // Group by year - One document by year var uploadDocument = azureSearch.UploadDocuments <AzureSearchModel>("csindex", searchList.ToArray()).Result; }
public async Task OneEventWithMapping() { const string INDEX_NAME = "one-event-with-mapping"; using (var azureSearchService = new AzureSearchService(AzureSearchServiceName, _azureSearchApiKey)) { try { //setup //create a definition of an index - //it describes the fields, key and data types //at this stage - it is only an in-memory schema var index = CreateAzureIndexDefinition(INDEX_NAME); //create the index in azure index = await azureSearchService.CreateIndexAsync(index); // create a processor for a specific event and map to a custom DTO for the search var indexer = azureSearchService.CreateIndexerForEventLog <TransferEvent_ERC20, CustomTransferSearchDocumentDto>( index.Name, (e) => new CustomTransferSearchDocumentDto { From = e.Event.From, To = e.Event.To, Value = e.Event.Value.ToString(), BlockNumber = e.Log.BlockNumber.Value.ToString(), TxHash = e.Log.TransactionHash, LogAddress = e.Log.Address, LogIndex = (int)e.Log.LogIndex.Value, DocumentKey = $"{e.Log.TransactionHash}_{e.Log.LogIndex.Value}" }, documentsPerBatch: 1); var web3 = new Web3.Web3(BlockchainUrl); var blockchainProcessor = web3.Processing.Logs.CreateProcessor <TransferEvent_ERC20>( transfer => indexer.IndexAsync(transfer)); var cancellationTokenSource = new CancellationTokenSource(); //execute await blockchainProcessor.ExecuteAsync(3146694, cancellationTokenSource.Token, 3146684); //assert await Task.Delay(5000); // allow time to index Assert.Equal(19, await azureSearchService.CountDocumentsAsync(INDEX_NAME)); } finally { await azureSearchService.DeleteIndexAsync(INDEX_NAME); } } }
public static async Task <HttpResponseMessage> Run([HttpTrigger(AuthorizationLevel.Function, "get", "post", Route = null)] HttpRequestMessage req, TraceWriter log) { log.Info("C# HTTP trigger function processed a request."); // parse query parameter dynamic data = await req.Content.ReadAsAsync <object>(); log.Info("Data:" + data.Lista + data.Id + data.site); // Set name to query string or body data string lista = data?.Lista; string id = data?.Id; string site = data?.site; if (lista != null && id != null) { log.Info("Lista " + lista + " id " + id + " site " + site); SPOService spoM = new SPOService(site); var image = spoM.GetPhotoInfo(lista, id); CelebrityService cService = new CelebrityService(); var content = cService.MakeAnalysisCelebrity(image); var celebrityName = cService.GetCelebrity(content.celebrity); log.Info("Obteniendo el celebrity"); var azureSearch = new AzureSearchService(); var indexCreate = azureSearch.CreateIndexAsync <AzureSearchModel>("newsindex", false, null).Result; var contentIndex = new AzureSearchModel() { IdSharepoint = id, Name = celebrityName, Tags = content.tags, Id = id }; var uploadDocument = azureSearch.UploadDocuments <AzureSearchModel>("newsindex", new List <AzureSearchModel>() { contentIndex }.ToArray()); log.Info("Creado el search"); spoM.SetResultNews(lista, id, JsonConvert.SerializeObject(content.tags)); return(req.CreateResponse(HttpStatusCode.OK, "Noticia categorizada")); } else { return(req.CreateResponse(HttpStatusCode.BadRequest, "Faltan parametros")); } }
static void Main(string[] args) { string searchServiceName = ""; string adminApiKey = ""; //Get data string text = System.IO.File.ReadAllText(@"../../../Data/dataOscars.json"); List <AzureSearchModel> data = JsonConvert.DeserializeObject <List <AzureSearchModel> >(text); //Create index var azureSearch = new AzureSearchService(searchServiceName, adminApiKey); var indexCreate = azureSearch.CreateIndexAsync <AzureSearchModel>("compartimossearch", false, null).Result; // Group by year - One document by year var groupByYear = data.GroupBy(e => e.Year).ToList(); foreach (var element in groupByYear) { var currentData = element.ToList <AzureSearchModel>(); var uploadDocument = azureSearch.UploadDocuments <AzureSearchModel>("compartimossearch", currentData.ToArray()).Result; } }
public async Task WebJobExample() { var config = TestConfiguration.LoadConfig(); string azureStorageConnectionString = config["AzureStorageConnectionString"]; string azureSearchKey = config["AzureSearchApiKey"]; var configurationContext = EventProcessingConfigMock.Create(PARTITION, out IdGenerator idGenerator); IEventProcessingConfigurationRepository configurationRepository = configurationContext.CreateMockRepository(idGenerator); var web3 = new Web3.Web3(TestConfiguration.BlockchainUrls.Infura.Rinkeby); // search components var searchService = new AzureSearchService(serviceName: AZURE_SEARCH_SERVICE_NAME, searchApiKey: azureSearchKey); var subscriberSearchIndexFactory = new SubscriberSearchIndexFactory(async indexName => { if (await searchService.IndexExistsAsync(indexName) == false) { //TODO: REPLACE THIS WITH Nethereum.BlockchainStore.Search.Azure.EventToGenericSearchDocMapper await searchService.CreateIndexAsync(EventToGenericSearchDocMapper.CreateAzureIndexDefinition(indexName)); } return(searchService.CreateIndexer <DecodedEvent, GenericSearchDocument>( indexName, decodedEvent => EventToGenericSearchDocMapper.Map(decodedEvent, decodedEvent.State))); }); // queue components //AzureStorageQueueFactory var azureQueueFactory = new AzureStorageQueueFactory(azureStorageConnectionString); var subscriberQueueFactory = new SubscriberQueueFactory( queueName => azureQueueFactory.GetOrCreateQueueAsync(queueName)); // subscriber repository var repositoryFactory = new AzureTablesSubscriberRepositoryFactory(azureStorageConnectionString); // load subscribers and event subscriptions var eventSubscriptionFactory = new EventSubscriptionFactory( web3, configurationRepository, subscriberQueueFactory, subscriberSearchIndexFactory, repositoryFactory); var eventSubscriptions = await eventSubscriptionFactory.LoadAsync(PARTITION); // progress repo (dictates which block ranges to process next) // maintain separate progress per partition via a prefix var storageCloudSetup = new AzureTablesRepositoryFactory(azureStorageConnectionString, prefix: $"Partition{PARTITION}"); var blockProgressRepo = storageCloudSetup.CreateBlockProgressRepository(); var logProcessor = web3.Processing.Logs.CreateProcessor( logProcessors: eventSubscriptions, blockProgressRepository: blockProgressRepo); // execute try { var ctx = new System.Threading.CancellationTokenSource(); await logProcessor.ExecuteAsync(BLOCK_TO, ctx.Token, BLOCK_FROM); } finally { await ClearDown(configurationContext, storageCloudSetup, searchService, azureQueueFactory, repositoryFactory); } // save event subscription state await configurationRepository.EventSubscriptionStates.UpsertAsync(eventSubscriptions.Select(s => s.State)); // assertions var subscriptionState1 = configurationContext.GetEventSubscriptionState(eventSubscriptionId: 1); // interested in transfers with contract queries and aggregations var subscriptionState2 = configurationContext.GetEventSubscriptionState(eventSubscriptionId: 2); // interested in transfers with simple aggregation var subscriptionState3 = configurationContext.GetEventSubscriptionState(eventSubscriptionId: 3); // interested in any event for a specific address Assert.Equal("4009000000002040652615", subscriptionState1.Values["RunningTotalForTransferValue"].ToString()); Assert.Equal((uint)19, subscriptionState2.Values["CurrentTransferCount"]); var txForSpecificAddress = (List <string>)subscriptionState3.Values["AllTransactionHashes"]; Assert.Equal("0x362bcbc78a5cc6156e8d24d95bee6b8f53d7821083940434d2191feba477ae0e", txForSpecificAddress[0]); Assert.Equal("0xe63e9422dedf84d0ce13f9f75ebfd86333ce917b2572925fbdd51b51caf89b77", txForSpecificAddress[1]); var blockNumbersForSpecificAddress = (List <HexBigInteger>)subscriptionState3.Values["AllBlockNumbers"]; Assert.Equal(4063362, blockNumbersForSpecificAddress[0].Value); Assert.Equal(4063362, blockNumbersForSpecificAddress[1].Value); }