public ActionResult Index() { this.ViewBag.CenterLatitude = MappyData.Constants.SeattleLatitude; this.ViewBag.CenterLongitude = MappyData.Constants.SeattleLongitude; this.ViewBag.MapsId = AzureUtilities.FromConfiguration("OpenStreetMapId"); this.ViewBag.MapsAccessToken = AzureUtilities.FromConfiguration("OpenStreetMapAccessToken"); return(View()); }
public Task <ReturnConfigModel> SetConfig(ConfigModel model) { try { var provider = new AzureUtilities(model); var response = provider.SetConfig(model); return(response); } catch (Exception ex) { throw ex; } }
protected void Application_Start() { AreaRegistration.RegisterAllAreas(); GlobalConfiguration.Configure(WebApiConfig.Register); FilterConfig.RegisterGlobalFilters(GlobalFilters.Filters); RouteConfig.RegisterRoutes(RouteTable.Routes); BundleConfig.RegisterBundles(BundleTable.Bundles); // Fire and forget // Start the factory based on a configuration entry, and have the callback just send the incoming points through our SignalR hub. RoutePointSourceFactory.StartAsync(AzureUtilities.FromConfiguration("RoutePointSource"), pt => RouteHub.Send(RouteHub.Hub(), pt.UserID, (float)pt.Latitude, (float)pt.Longitude)); // Note the casts to floats - SignalR seems to have issues sending doubles to Javascript, possibly because JS doesn't support that precision. }
public async Task <IEnumerable <UploadedImage> > Post() { if (!Request.Content.IsMimeMultipartContent("form-data")) { throw new HttpResponseException(HttpStatusCode.UnsupportedMediaType); } var multipartStreamProvider = new AzureBlobMultipartProvider( await AzureUtilities.GetImageBlobContainerAsync("ImageDataConnectionString"), await AzureUtilities.GetImageDataTableAsync("ImageDataConnectionString")); var results = await Request.Content.ReadAsMultipartAsync <AzureBlobMultipartProvider>(multipartStreamProvider); var imageDescription = results.FormFields["ImageDescription"]; return(await results.SaveAllAsync(imageDescription)); }
static void WriteToEventHub() { var ehConnStr = AzureUtilities.ServiceBusConnectionString( AzureUtilities.FromConfiguration("MappyServiceBusNamespace"), AzureUtilities.FromConfiguration("MappyEventHubSASName"), AzureUtilities.FromConfiguration("MappyEventHubSASKey")); var eventHubName = AzureUtilities.FromConfiguration("MappyEventHubName"); var eventHubClient = EventHubClient.CreateFromConnectionString(ehConnStr, eventHubName); new RandomRoutePointSource(pt => { Console.WriteLine("Sending {0}", pt); var data = new EventData(Encoding.UTF8.GetBytes(JsonConvert.SerializeObject(pt))); data.PartitionKey = pt.UserID; eventHubClient.Send(data); }).StartAsync().Wait(); }
public static FileProcessedTracker Run([QueueTrigger("downloadandunpacksnodas", Connection = "AzureWebJobsStorage")] FileReadyToDownloadQueueMessage myQueueItem, TraceWriter log) { log.Info($"C# Queue trigger function processed snodas date: {myQueueItem.FileDate}"); string partitionName = myQueueItem.Filetype; var urlToDownload = myQueueItem.Url; log.Info($"Downloading Url {urlToDownload}"); FtpWebRequest request = (FtpWebRequest)WebRequest.Create(urlToDownload); request.Method = WebRequestMethods.Ftp.DownloadFile; // This example assumes the FTP site uses anonymous logon. request.Credentials = new NetworkCredential("anonymous", ""); List <string> listOfUnpackedFiles = null; FtpWebResponse response = (FtpWebResponse)request.GetResponse(); log.Info($"File {urlToDownload} downloaded"); Stream responseStream = response.GetResponseStream(); listOfUnpackedFiles = SnodasUtilities.UnpackSnodasStream(responseStream); log.Info($"File {urlToDownload} unpacked"); //fix the bard codes in the hdr files foreach (var f in listOfUnpackedFiles.Where(s => s.ToLower().Contains(".hdr"))) { SnodasUtilities.RemoveBardCodesFromHdr(f); } log.Info($"Attempting to sign in to ad for datalake upload"); var adlsAccountName = CloudConfigurationManager.GetSetting("ADLSAccountName"); //auth secrets var domain = CloudConfigurationManager.GetSetting("Domain"); var webApp_clientId = CloudConfigurationManager.GetSetting("WebAppClientId"); var clientSecret = CloudConfigurationManager.GetSetting("ClientSecret"); var clientCredential = new ClientCredential(webApp_clientId, clientSecret); var creds = ApplicationTokenProvider.LoginSilentAsync(domain, clientCredential).Result; // Create client objects and set the subscription ID var adlsFileSystemClient = new DataLakeStoreFileSystemManagementClient(creds); log.Info($"Attempting to upload unpacked files to adls"); #if DEBUG // listOfUnpackedFiles = listOfUnpackedFiles.Where(f => f.Contains(".Hdr")).Take(1).ToList(); #endif foreach (var file in listOfUnpackedFiles) { try { adlsFileSystemClient.FileSystem.UploadFile(adlsAccountName, file, "/snodas-dat-us-v1/" + file.Split('\\').Last(), uploadAsBinary: true, overwrite: true); log.Info($"Uploaded file: {file}"); } catch (Exception e) { log.Error($"Upload failed: {e.Message}"); } } //1: Get values for lat/lon var locations = AzureUtilities.DownloadLocations(log); #if DEBUG var executingAssemblyFile = new Uri(Assembly.GetExecutingAssembly().GetName().CodeBase).LocalPath; var executingDirectory = Path.GetDirectoryName(executingAssemblyFile); if (string.IsNullOrEmpty(executingDirectory)) { throw new InvalidOperationException("cannot get executing directory"); } executingDirectory = Directory.GetParent(executingDirectory).FullName; var gdalPath = Path.Combine(executingDirectory, "gdal"); log.Info($"Have gdal path {gdalPath}"); #endif log.Info($"Configuring gdal"); GdalConfiguration.ConfigureGdal(); var results = SnodasUtilities.GetValuesForCoordinates(locations, listOfUnpackedFiles.Where(f => f.Contains(".Hdr")).ToList()); log.Info($"Have {results.Count} results for coordinates."); DateTime fileDate; string fileName; using (MemoryStream s = new MemoryStream()) using (StreamWriter csvWriter = new StreamWriter(s, Encoding.UTF8)) { csvWriter.WriteLine(SnodasRow.GetHeader); foreach (var row in results) { csvWriter.WriteLine(row.ToString()); } csvWriter.Flush(); s.Position = 0; fileDate = results[0].Date; fileName = fileDate.ToString("yyyyMMdd") + "Snodas.csv"; try { adlsFileSystemClient.FileSystem.Create(adlsAccountName, "/snodas-csv-westus-v1/" + fileName, s, overwrite: true); log.Info($"Uploaded csv stream: {fileName}"); } catch (Exception e) { log.Info($"Upload failed: {e.Message}"); } } log.Info($"Removing unpacked files"); foreach (var f in listOfUnpackedFiles) { //delete local temp file File.Delete(f); } return(new FileProcessedTracker { ForecastDate = fileDate, PartitionKey = "snodas-westus-v1", RowKey = fileName, Url = "unknown" }); }
public static FileProcessedTracker Run([BlobTrigger("snotel-csv-westus-v1/{name}", Connection = "AzureWebJobsStorage")] Stream myBlob, string name, TraceWriter log) { log.Info($"C# Blob trigger function Processed blob\n Name:{name} \n Size: {myBlob.Length} Bytes"); log.Info($"Double Checking if {name} already exists."); var exists = AzureUtilities.CheckIfFileProcessedRowExistsInTableStorage(Constants.SnotelTrackerTable, Constants.SnotelTrackerPartitionKey, name, log); if (exists) { log.Info($"{name} Already exists in double check, skipping"); return(null); } //1. Remove header from stream var s = new MemoryStream(); StreamWriter csvWriter = new StreamWriter(s, Encoding.UTF8); bool firstLine = true; using (StreamReader sr = new StreamReader(myBlob)) { string line = null; while ((line = sr.ReadLine()) != null) { if (line.StartsWith("#")) { //throw out this header continue; } else { //Dates in the file are local times; need to change them to UTC which is +8 //These dates don't adjust for daylight savings time var splitLine = line.Split(','); if (firstLine == false && splitLine.Length > 1) { var localTimeOfForecast = DateTime.Parse(splitLine[0]); var utcTimeOfForecast = localTimeOfForecast.AddHours(+8); splitLine[0] = utcTimeOfForecast.ToString("yyyyMMdd HH:00"); line = String.Join(",", splitLine); } firstLine = false; csvWriter.WriteLine(line); } } } csvWriter.Flush(); s.Position = 0; //refactoring the below code to a shared method can cause an .net issue //related to binding redirect to arise; leave this here for now. See AzureUtilities.cs //for more info log.Info($"Attempting to sign in to ad for datalake upload"); var adlsAccountName = CloudConfigurationManager.GetSetting("ADLSAccountName"); //auth secrets var domain = CloudConfigurationManager.GetSetting("Domain"); var webApp_clientId = CloudConfigurationManager.GetSetting("WebAppClientId"); var clientSecret = CloudConfigurationManager.GetSetting("ClientSecret"); var clientCredential = new ClientCredential(webApp_clientId, clientSecret); var creds = ApplicationTokenProvider.LoginSilentAsync(domain, clientCredential).Result; // Create client objects and set the subscription ID var adlsFileSystemClient = new DataLakeStoreFileSystemManagementClient(creds); //string subId = CloudConfigurationManager.GetSetting("SubscriptionId"); try { adlsFileSystemClient.FileSystem.Create(adlsAccountName, "/snotel-csv-westus-v1/" + name, s, overwrite: true); log.Info($"Uploaded csv stream: {name}"); } catch (Exception e) { log.Info($"Upload failed: {e.Message}"); } var splitFileName = name.Split('.'); DateTime date = DateTime.ParseExact(splitFileName[0], "yyyyMMdd", null).AddHours(int.Parse(splitFileName[1])); return(new FileProcessedTracker { ForecastDate = date, PartitionKey = "snotel-csv-westus-v1", RowKey = name, Url = "unknown" }); }
public static FileProcessedTracker Run([BlobTrigger("nam-grib-westus-v1/{name}", Connection = "AzureWebJobsStorage")] Stream myBlob, string name, TraceWriter log) { log.Info($"C# Blob trigger function Processed blob\n Name:{name} \n Size: {myBlob.Length} Bytes"); log.Info($"Double Checking if {name} already exists."); var exists = AzureUtilities.CheckIfFileProcessedRowExistsInTableStorage(Constants.NamTrackerTable, Constants.NamTrackerPartitionKey, name, log); if (exists) { log.Info($"{name} Already exists in double check, skipping"); return(null); } log.Info($"Have env: {Environment.GetEnvironmentVariable("GRIB_API_DIR_ROOT")}"); log.Info($"In dir: {Assembly.GetExecutingAssembly().Location}"); string attemptPath = ""; GribUtilities.TryFindBootstrapLibrary(out attemptPath); log.Info($"Attemping to find lib: {attemptPath}"); GribEnvironment.Init(); #if DEBUG == false GribEnvironment.DefinitionsPath = @"D:\home\site\wwwroot\bin\Grib.Api\definitions"; #endif //1. Download stream to temp //TODO: there is supposedly now an ability to read a stream direction in GRIBAPI.Net; investigate to see if its better than storing a temp file string localFileName = AzureUtilities.DownloadBlobToTemp(myBlob, name, log); var rowList = new List <NamTableRow>(); //2. Get values from file using (GribFile file = new GribFile(localFileName)) { log.Info($"Parsing file {name}"); rowList = GribUtilities.ParseNamGribFile(file); } //3. Format in correct table format log.Info($"Attempting to sign in to ad for datalake upload"); var adlsAccountName = CloudConfigurationManager.GetSetting("ADLSAccountName"); //auth secrets var domain = CloudConfigurationManager.GetSetting("Domain"); var webApp_clientId = CloudConfigurationManager.GetSetting("WebAppClientId"); var clientSecret = CloudConfigurationManager.GetSetting("ClientSecret"); var clientCredential = new ClientCredential(webApp_clientId, clientSecret); var creds = ApplicationTokenProvider.LoginSilentAsync(domain, clientCredential).Result; // Create client objects and set the subscription ID var adlsFileSystemClient = new DataLakeStoreFileSystemManagementClient(creds); try { adlsFileSystemClient.FileSystem.UploadFile(adlsAccountName, localFileName, "/nam-grib-westus-v1/" + name, uploadAsBinary: true, overwrite: true); log.Info($"Uploaded file: {localFileName}"); } catch (Exception e) { log.Error($"Upload failed: {e.Message}"); } MemoryStream s = new MemoryStream(); StreamWriter csvWriter = new StreamWriter(s, Encoding.UTF8); csvWriter.WriteLine(NamTableRow.Columns); MemoryStream sLocations = new MemoryStream(); StreamWriter csvLocationsWriter = new StreamWriter(sLocations, Encoding.UTF8); csvLocationsWriter.WriteLine("Lat, Lon"); string fileName = null; foreach (var row in rowList) { if (fileName == null) { fileName = row.PartitionKey + ".csv"; } csvLocationsWriter.WriteLine(row.Lat + "," + row.Lon); csvWriter.WriteLine(row.ToString()); } csvWriter.Flush(); csvLocationsWriter.Flush(); s.Position = 0; sLocations.Position = 0; AzureUtilities.UploadLocationsFile(sLocations, log); sLocations.Flush(); sLocations.Close(); log.Info($"Completed csv creation--attempting to upload to ADLS"); try { adlsFileSystemClient.FileSystem.Create(adlsAccountName, "/nam-csv-westus-v1/" + fileName, s, overwrite: true); log.Info($"Uploaded csv stream: {localFileName}"); } catch (Exception e) { log.Info($"Upload failed: {e.Message}"); } s.Flush(); s.Close(); //delete local temp file File.Delete(localFileName); DateTime date = DateTime.ParseExact(name.Split('.')[0], "yyyyMMdd", null); return(new FileProcessedTracker { ForecastDate = date, PartitionKey = "nam-grib-westus-v1", RowKey = name, Url = "unknown" }); }
public void TestMethod1() { var _test = AzureUtilities.GetAzureBlob().InsertFilesTest(); Assert.IsNotNull(_test); }