/// <summary> /// Reading config keys for storm API service /// </summary> private void getConfigValues() { _weatherApiURlApiUrl = ConfigurationManager.AppSettings[APIUrlConfigKey].ToString(); var _startDate1 = Convert.ToDateTime(ConfigurationManager.AppSettings[DateToGetDataFromConfigKey].ToString()); //_startDate = _startDate1.ToString(("yyyyMMddhhmm")); //_endDate = DateTime.Now.ToString(("yyyyMMddhhmm")); _startDate = _startDate1.ToString("yyyy-MM-ddTHH:mm:ss"); _endDate = DateTime.Now.ToString("yyyy-MM-ddTHH:mm:ss"); //_url = "json/" + "warn" + "/" + _startDate + ":" + _endDate + ""; using (var db = new DataLakeEntities()) { DateTime datetime = new DateTime(1900, 1, 1).Add(Convert.ToDateTime(DateTime.MinValue).TimeOfDay); var checklastlog = db.uspGetlastrundate("GHCND").FirstOrDefault(); if (checklastlog == datetime) { _url = "api/v2/data?datasetid=GHCND&startdate=" + _startDate + "&enddate=" + _endDate + "&limit=50"; } else { string _checklastlog = checklastlog.Value.ToString("yyyy-MM-ddTHH:mm:ss"); _url = "api/v2/data?datasetid=GHCND&startdate=" + _checklastlog + "&enddate=" + _endDate + "&limit=50"; } } }
/// <summary> /// This method read input parameter and update cutoffdatetime in WorkflowRun Table /// </summary> /// <param name="workflowDefinationId"></param> /// <param name="datetime"></param> public void UpdateWorkFlowRun(int workflowDefinitionId, DateTime datetime) { using (var db = new DataLakeEntities()) { int insertworkflowRun = db.uspUpdateWorkflowRun(workflowDefinitionId, datetime); } }
private void getConfigValues() { _stormApiURlApiUrl = ConfigurationManager.AppSettings[APIUrlConfigKey].ToString(); var _startDate1 = Convert.ToDateTime(ConfigurationManager.AppSettings[DateToGetDataFromConfigKey].ToString()); //_startDate = _startDate1.ToString(("yyyyMMddhhmm")); //_endDate = DateTime.Now.ToString(("yyyyMMddhhmm")); _startDate = _startDate1.ToString("yyyyMMddhhmm"); _endDate = DateTime.Now.ToString("yyyyMMddhhmm"); //_url = "json/" + "warn" + "/" + _startDate + ":" + _endDate + ""; using (var db = new DataLakeEntities()) { DataTable alldatset = new DataTable(); DateTime datetime = new DateTime(1900, 1, 1).Add(Convert.ToDateTime(DateTime.MinValue).TimeOfDay); _datasetname = db.uspGetallStormdatasets().FirstOrDefault(); var checklastlog = db.uspGetlastrundate(_datasetname).FirstOrDefault(); if (checklastlog == datetime) { _url = "json/" + _datasetname + "/" + _startDate + ":" + _endDate + ""; } else { string _checklastlog = checklastlog.Value.ToString("yyyyMMddhhmm"); _url = "json/" + _datasetname + "/" + _checklastlog + ":" + _endDate + ""; } } }
/// <summary> /// This method read input parameter and insert data in WorkflowRun Table /// </summary> /// <param name="workflowDefinitionId"></param> /// <param name="status"></param> /// <param name="startedOn"></param> /// <param name="completedOn"></param> /// <param name="hasError"></param> /// <param name="cutOffValue"></param> /// <param name="cutOffDateTime"></param> public void InsertWorkFlowRun(int workflowDefinitionId, int status, DateTime startedOn, DateTime?completedOn, bool hasError, string cutOffValue, DateTime?cutOffDateTime) { using (var db = new DataLakeEntities()) { int insertworkflowRun = db.uspInsertWorkflowRun(workflowDefinitionId, status, startedOn, completedOn, hasError, cutOffValue, cutOffDateTime); } }
/// <summary> /// This method log the data in database /// </summary> /// <param name="workflowDefinitionID">Workflow Definition ID Parameter</param> /// <param name="logLevel">LogLevel Parameter</param> /// <param name="message">Message Parameter</param> /// <param name="exceptionType">ExceptionType Parameter</param> /// <param name="exceptionJSON">ExceptionJSON Parameter</param> public void InsertWorkFlowLog(int workflowDefinitionID, int logLevel, string message, string exceptionType, string exceptionJSON) { using (var db = new DataLakeEntities()) { var logWorkFlowId = db.uspGetLogParameter(workflowDefinitionID).FirstOrDefault(); int insertWorkflowLog = db.uspInsertWorkflowLog(logWorkFlowId.WorkFlowRunID, logWorkFlowId.WorkflowTaskID, logLevel, message, DateTime.UtcNow, exceptionType, exceptionJSON); } }
// Please set the following connection strings in app.config for this WebJob to run: // AzureWebJobsDashboard and AzureWebJobsStorage static void Main() { var host = new JobHost(); Console.WriteLine("Subscriber Start"); string connectionString = CloudConfigurationManager.GetSetting("Microsoft.ServiceBus.ConnectionString"); SubscriptionClient Client = SubscriptionClient.CreateFromConnectionString(connectionString, "Storm", "StormSubcription"); // Configure the callback options. OnMessageOptions options = new OnMessageOptions(); options.AutoComplete = false; // options.AutoRenewTimeout = TimeSpan.FromMinutes(1); Client.OnMessage((message) => { try { var data = message.GetBody <string>(); WarnDataset warnDataset = JsonConvert.DeserializeObject <WarnDataset>(data); System.Data.Entity.Spatial.DbGeography shape = DbGeography.PolygonFromText(warnDataset.SHAPE.ToString(), 4326); //System.Data.Entity.Spatial.DbGeography shape= DbGeography.PolygonFromText("POLYGON ((-95.08 38.04, -95.08 37.86, -94.69 37.87, -94.66 38.03, -95.08 38.04))", 4326); // This code will move to another project using (var db = new DataLakeEntities()) { int insertWeatherDatasets = db.uspInsertWarningData(warnDataset.WARNINGTYPE, warnDataset.MESSAGEID, shape, warnDataset.ZTIME_END, warnDataset.ZTIME_START, warnDataset.ID, warnDataset.ISSUEWFO); } // Remove message from subscription. message.Complete(); } catch (Exception ex) { // Indicates a problem, unlock message in subscription. message.Abandon(); } }, options); Console.ReadLine(); // The following code ensures that the WebJob will be running continuously host.RunAndBlock(); }
public int AddLog(LogData logdata, bool isComplete, string description) { int result = 0; using (var db = new DataLakeEntities()) { result = db.uspInsertDatalog(logdata.DatasetName, logdata.DatasetURL, DateTime.Now, DateTime.Now, isComplete, description); } return(result); }
// Please set the following connection strings in app.config for this WebJob to run: // AzureWebJobsDashboard and AzureWebJobsStorage static void Main() { var host = new JobHost(); Console.WriteLine("Subscriber Start"); string connectionString = CloudConfigurationManager.GetSetting("Microsoft.ServiceBus.ConnectionString"); SubscriptionClient Client = SubscriptionClient.CreateFromConnectionString(connectionString, "Weather", "WeatherSubcription"); // Configure the callback options. OnMessageOptions options = new OnMessageOptions(); options.AutoComplete = false; // options.AutoRenewTimeout = TimeSpan.FromMinutes(1); Client.OnMessage((message) => { try { var data = message.GetBody <string>(); WeatherDatasets weatherDatasets = JsonConvert.DeserializeObject <WeatherDatasets>(data); // This code will move to another project using (var db = new DataLakeEntities()) { int insertWeatherDatasets = db.uspInsertWeatherDatasetsData(weatherDatasets.date, weatherDatasets.datatype, weatherDatasets.station, weatherDatasets.attributes, weatherDatasets.value); } // Remove message from subscription. message.Complete(); } catch (Exception ex) { // Indicates a problem, unlock message in subscription. message.Abandon(); } }, options); // The following code ensures that the WebJob will be running continuously host.RunAndBlock(); }
/// <summary> /// This is publisher start up point we create topic and publish IMessage in the subscription and log data /// </summary> /// <param name="args"></param> static void Main(string[] args) { DateTime startedOn; try { var publisher = new Publisher(); Console.WriteLine(LogMessage.WorkflowStarted); string WorkFlowGUID = LogMessage.WorkFlowGUID; string workflowId = ConfigurationManager.AppSettings[WorkFlowGUID].ToString(); string[] SpiltworkflowIds = workflowId.Split(','); IRun run = new Run(); ILog log = new Log(); foreach (var spiltworkflowId in SpiltworkflowIds) { using (var db = new DataLakeEntities()) { startedOn = DateTime.UtcNow; var workflowIds = db.uspGetWorkFlowParameter(spiltworkflowId).FirstOrDefault(); if (workflowIds.WorkflowDefinationID != null) { run.InsertWorkFlowRun((Convert.ToInt32(workflowIds.WorkflowDefinationID)), 3, startedOn, null, false, null, null); log.InsertWorkFlowLog(Convert.ToInt32(workflowIds.WorkflowDefinationID), 3, LogMessage.PublishStartLogMsg, null, null); publisher.CreateTopic(); publisher.SendMessage(spiltworkflowId); } } } Console.WriteLine(LogMessage.WorkflowFinished); } catch (Exception) { } Console.ReadLine(); }
// Please set the following connection strings in app.config for this WebJob to run: // AzureWebJobsDashboard and AzureWebJobsStorage static void Main() { var host = new JobHost(); while (true) { IManageLog manageLog = new ManageLog(); RestClient restClient = new RestClient(); var logdata = restClient.GetLogData(); DateTime startdate = Convert.ToDateTime("2016-08-12T13:00:00"); DateTime enddate = DateTime.Now; using (var db = new DataLakeEntities()) { var checklastlog = db.uspGetlastrundate("EarthQuake").FirstOrDefault(); DateTime datetime = new DateTime(1900, 1, 1).Add(Convert.ToDateTime(DateTime.MinValue).TimeOfDay); if (checklastlog != datetime) { startdate = Convert.ToDateTime(checklastlog); } } List <Feature> feature = restClient.LoadData(startdate, enddate); EarthQuakePublisher earthQuakePublisher = new EarthQuakePublisher(); if (feature.Count > 0) { earthQuakePublisher.AddMessageToTopic(feature, "EarthQuake", "EarthQuakeSubscriptin"); manageLog.AddLog(logdata, true, "Ok"); } else { manageLog.AddLog(logdata, false, "No data found for this set of parameter"); } Thread.Sleep(5 * 60 * 1000); } // The following code ensures that the WebJob will be running continuously // host.RunAndBlock(); }
// Please set the following connection strings in app.config for this WebJob to run: // AzureWebJobsDashboard and AzureWebJobsStorage static void Main() { var host = new JobHost(); Console.WriteLine("Subscriber Start"); string connectionString = CloudConfigurationManager.GetSetting("Microsoft.ServiceBus.ConnectionString"); SubscriptionClient Client = SubscriptionClient.CreateFromConnectionString(connectionString, "EarthQuake", "EarthQuakeSubscriptin"); // Configure the callback options. OnMessageOptions options = new OnMessageOptions(); options.AutoComplete = false; // options.AutoRenewTimeout = TimeSpan.FromMinutes(1); Client.OnMessage((message) => { try { var data = message.GetBody <string>(); List <Feature> features = JsonConvert.DeserializeObject <List <Feature> >(data); // List<Feature> feature = data.ToList(); // This code will move to another project foreach (var feature in features) { using (var db = new DataLakeEntities()) { var id = feature.PreferredSourceNetworkId + feature.IdentificationCode; if (feature.Magnitude.HasValue) { feature.Magnitude = feature.Magnitude; } else { feature.Magnitude = -9999; } int insertFeatureInfo = db.uspinsertfeatureinfo(id, feature.Magnitude, feature.Place, feature.Time, feature.LastUpdated, feature.EventDateTime, feature.LastUpdatedDateTime, feature.TimeZoneOffset, feature.Detail, Convert.ToInt32(feature.ComputedFeltIntesity), feature.NumOfFeltReported, Convert.ToInt32(feature.MaxInstrumentalIntesity), feature.AlertLevel, feature.TsunamiFlag, Convert.ToInt16(feature.Significancy), feature.PreferredSourceNetworkId, feature.CommaSeparatedSourceNetworkIds, feature.CommaSeparatedProductTypes, feature.NumOfSeismicStations, feature.HorizontalDistance, feature.RmsTravelTime, feature.Title, feature.TypeOfSeismicEvent, feature.HumanReviewedStatus, feature.USGEventPageUrl); if (feature.Geometry != null) { int insertFeatureGeometryInfo = db.uspinsertfeaturegeometryinfo(id, feature.Geometry.Coordinates[0], feature.Geometry.Coordinates[1], feature.Geometry.Coordinates[2], feature.Geometry.Type); } foreach (var geoServes in feature.GeoServes) { if (geoServes.Region.Country == null) { geoServes.Region.Country = string.Empty; } if (geoServes.Region.State == null) { geoServes.Region.State = string.Empty; } ObjectParameter oId = new ObjectParameter("OId", typeof(int)); var geoservesId = db.GetmaxGeoServeId().FirstOrDefault(); int outId = db.uspinsertFeatureGeoServeInfo(geoservesId, id, geoServes.Region.Country, geoServes.Region.State, geoServes.Id, oId); foreach (var city in geoServes.Cities) { var locgeoservesId = db.GetmaxGeoServeIdlOCATION().FirstOrDefault(); db.uspinsertFeatureGeoLocationInfo(locgeoservesId, city.Distance, city.Latitude, city.Longitude, city.Name, city.Direction, city.Population); } } } } // Remove message from subscription. message.Complete(); } catch (Exception ex) { // Indicates a problem, unlock message in subscription. message.Abandon(); } }, options); // The following code ensures that the WebJob will be running continuously host.RunAndBlock(); }
public void GetWeatheMasterData(int i, string dataset, string url) { var jsonData = APIResponse.Content.ReadAsStringAsync().Result; IManageLog manageLog = new ManageLog(); // TODO deserialize json into generic collection List <dynamic> data = new List <dynamic>(); var finaldata = (JObject)JsonConvert.DeserializeObject(jsonData); switch (i) { case 1: var weatherDatasetTypesResult = finaldata["results"].Select(item => new WeatherDatasetTypes { uid = item["uid"].ToString(), mindate = Convert.ToDateTime(item["mindate"]), maxdate = Convert.ToDateTime(item["maxdate"]), name = item["name"].ToString(), datacoverage = item["datacoverage"].ToString(), id = item["id"].ToString(), }).ToList(); using (var db = new DataLakeEntities()) { foreach (var item in weatherDatasetTypesResult) { db.uspInsertWeatherDatasetTypesData(item.uid, item.mindate, item.maxdate, item.name, item.datacoverage, item.id); } LogData logdata = new LogData(); logdata.DatasetName = dataset; logdata.DatasetURL = url; manageLog.AddLog(logdata, true, "Ok"); } Console.WriteLine("Datset Updated"); Console.ReadLine(); break; case 2: var weatherDataCategoriesResult = finaldata["results"].Select(item => new WeatherDataCategories { id = item["id"].ToString(), name = item["name"].ToString(), }).ToList(); using (var db = new DataLakeEntities()) { foreach (var item in weatherDataCategoriesResult) { db.uspInsertWeatherDataCategoriesData(item.id, item.name); } LogData logdata = new LogData(); logdata.DatasetName = dataset; logdata.DatasetURL = url; manageLog.AddLog(logdata, true, "Ok"); } break; case 3: var weatherDataTypesResult = finaldata["results"].Select(item => new WeatherDataTypes { mindate = Convert.ToDateTime(item["mindate"]), maxdate = Convert.ToDateTime(item["maxdate"]), name = item["name"].ToString(), datacoverage = Convert.ToDecimal(item["datacoverage"]), id = item["id"].ToString(), }).ToList(); using (var db = new DataLakeEntities()) { foreach (var item in weatherDataTypesResult) { db.uspInsertWeatherDataTypesData(item.mindate, item.maxdate, item.name, item.datacoverage, item.id); } LogData logdata = new LogData(); logdata.DatasetName = dataset; logdata.DatasetURL = url; manageLog.AddLog(logdata, true, "Ok"); } break; case 4: var weatherLocationCategoriesResult = finaldata["results"].Select(item => new WeatherLocationCategories { id = item["id"].ToString(), name = item["name"].ToString(), }).ToList(); using (var db = new DataLakeEntities()) { foreach (var item in weatherLocationCategoriesResult) { db.uspInsertWeatherLocationCategoriesData(item.id, item.name); } LogData logdata = new LogData(); logdata.DatasetName = dataset; logdata.DatasetURL = url; manageLog.AddLog(logdata, true, "Ok"); } break; case 5: var weatherLocationsResult = finaldata["results"].Select(item => new WeatherLocations { mindate = Convert.ToDateTime(item["mindate"]), maxdate = Convert.ToDateTime(item["maxdate"]), name = item["name"].ToString(), datacoverage = Convert.ToDecimal(item["datacoverage"]), id = item["id"].ToString(), }).ToList(); using (var db = new DataLakeEntities()) { foreach (var item in weatherLocationsResult) { db.uspInsertWeatherLocationsData(item.mindate, item.maxdate, item.name, item.datacoverage, item.id); } LogData logdata = new LogData(); logdata.DatasetName = dataset; logdata.DatasetURL = url; manageLog.AddLog(logdata, true, "Ok"); } break; case 6: var weatherStationsResult = finaldata["results"].Select(item => new WeatherStations { elevation = item["elevation"] != null ? Convert.ToDecimal(item["elevation"]) : 0, mindate = Convert.ToDateTime(item["mindate"]), maxdate = Convert.ToDateTime(item["maxdate"]), latitude = Convert.ToDecimal(item["latitude"]), name = item["name"].ToString(), datacoverage = Convert.ToDecimal(item["datacoverage"]), id = item["id"].ToString(), elevationUnit = Convert.ToString(item["elevationUnit"]), longitude = Convert.ToDecimal(item["longitude"]), }).ToList(); using (var db = new DataLakeEntities()) { foreach (var item in weatherStationsResult) { db.uspInsertWeatherStationsData(item.elevation, item.mindate, item.maxdate, item.latitude, item.name, item.datacoverage, item.id, item.elevationUnit, item.longitude); } LogData logdata = new LogData(); logdata.DatasetName = dataset; logdata.DatasetURL = url; manageLog.AddLog(logdata, true, "Ok"); } break; } //dynamic d = JObject.Parse(jsonData); // WeatherDatasets account = JsonConvert.DeserializeObject<WeatherDatasets>(jsonData); //dynamic obj = finaldata["results"]; // return obj; //data = (JObject)JsonConvert.DeserializeObject <dynamic>(jsonData); // Iterating each element in generic collection // Pass each entry to queue or topic }
/// <summary> /// This is subscriber entry point its read IMessage from subscription and process.It will return APIResponse and call PrepareMessageForAnalytics to print data /// </summary> static void Main() { var host = new JobHost(); ILog log = new Log(); ReflectionHelper reflectionHelper = new ReflectionHelper(); EventStreamPublisher eventStreamPublisher = new EventStreamPublisher(); Console.WriteLine(LogMessage.SubscriberStarted); string connectionString = CloudConfigurationManager.GetSetting(LogMessage.ServiceBusConnectionString); SubscriptionClient Client = SubscriptionClient.CreateFromConnectionString(connectionString, LogMessage.TopicName, LogMessage.SubscriptionName); OnMessageOptions options = new OnMessageOptions(); options.AutoComplete = false; Client.OnMessage((message) => { int workflowDefinitionID = 0; try { var data = message.GetBody <string>(); using (var db = new DataLakeEntities()) { DateTime startedOn = DateTime.UtcNow; string workflowGuid = Convert.ToString(data); var workFlowURL = db.GetWorkFlowServiceURL(workflowGuid).FirstOrDefault(); workflowDefinitionID = Convert.ToInt32(workFlowURL.WorkflowDefinitionID); log.InsertWorkFlowLog(workflowDefinitionID, 3, LogMessage.OnMessageCall, null, null); var getURLParameter = db.GetWorkFlowURLParameter(workflowGuid).ToList(); IDictionary <string, object> parameters = new Dictionary <string, object>(); var type = getURLParameter.Where(o => o.ConfigurationKey == "Type").FirstOrDefault(); var method = getURLParameter.Where(o => o.ConfigurationKey == "Method").FirstOrDefault(); string serviceURL = GenrateURL(getURLParameter); parameters.Add("ServiceURL", serviceURL); parameters.Add("WorkflowDefinitionID", workflowDefinitionID); parameters.Add("StartedOn", startedOn); dynamic getAPIResponse = reflectionHelper.InvokeMethodByReflection("", Convert.ToString(type.ConfigurationValue), Convert.ToString(method.ConfigurationValue), parameters); eventStreamPublisher.PublishMessages(getAPIResponse); } message.Complete(); } catch (Exception ex) { var getExceptionType = ex.GetType(); log.InsertWorkFlowLog(workflowDefinitionID, 1, LogMessage.SubscriberError + "--" + ex.Message, getExceptionType.Name, null); message.Abandon(); Console.WriteLine(ex.Message); } }, options); host.RunAndBlock(); Console.ReadLine(); }