public bool UpdateInputIngestionStatus(string Name) { List <LogInput> inputs = Context.LogInputs.ToList(); LogInput operatedInput = null; foreach (LogInput input in inputs) { if (input.Name.Equals(Name, System.StringComparison.InvariantCultureIgnoreCase)) { operatedInput = input; break; } } if (operatedInput == null) { return(false); } else { operatedInput.InitialIngest = true; Context.LogInputs.Update(operatedInput); try { Context.SaveChanges(); return(true); } catch (DbUpdateException) { return(false); } } }
public async Task <IActionResult> Remove(int InputID) { LogInput retrieved = await _logContext.LogInputs.FindAsync(InputID); if (retrieved == null) { return(StatusCode(404)); } DeleteDeliveryStreamResponse deleteDeliveryStreamResponse = await _FirehoseClient.DeleteDeliveryStreamAsync(new DeleteDeliveryStreamRequest { DeliveryStreamName = retrieved.FirehoseStreamName }); await _LambdaClient.RemovePermissionAsync(new RemovePermissionRequest { FunctionName = Environment.GetEnvironmentVariable("LAMBDA_FUNCTION_NAME"), StatementId = "ID-" + retrieved.LinkedS3Bucket.ID }); DeleteBucketResponse deleteBucketResponse = await _S3Client.DeleteBucketAsync(new DeleteBucketRequest { BucketName = retrieved.LinkedS3Bucket.Name, UseClientRegion = true, }); _logContext.LogInputs.Remove(retrieved); await _logContext.SaveChangesAsync(); TempData["Alert"] = "Success"; TempData["Message"] = "Log Input " + retrieved.Name + " deleted successfully!"; return(RedirectToAction("Index")); }
protected override void Execute(CodeActivityContext context) { double logInput = LogInput.Get(context); double powInput = RaisedToPower.Get(context); double power = Power.Get(context); double logRes = CalculateLog(logInput); double powRes = CalculatePower(powInput, power); LogOutput.Set(context, logRes); PowerOutput.Set(context, powRes); }
public string GetInputS3BucketName(int ID) { LogInput result = Context.LogInputs.Find(ID); if (result.LinkedS3Bucket == null) { return(null); } else { return(result.LinkedS3Bucket.Name); } }
public GlueDatabaseTable GetGlueDatabaseTable(int ID) { LogInput result1 = Context.LogInputs.Find(ID); GlueConsolidatedEntity result2 = result1.LinkedGlueEntity; if (result2 == null) { return(null); } else { return(result2.LinkedTable); } }
public async Task <IActionResult> Manage(int InputID) { LogInput retrieved = await _logContext.LogInputs.FindAsync(InputID); if (retrieved == null) { return(StatusCode(404)); } if (retrieved.InitialIngest == true) { string dbTableName = "dbo." + retrieved.LinkedS3Bucket.Name.Replace("-", "_"); ViewBag.fields = new List <string>(); using (SqlConnection connection = new SqlConnection(GetRdsConnectionString())) { connection.Open(); using (SqlCommand cmd = new SqlCommand(@"SELECT name FROM sys.columns WHERE object_id = OBJECT_ID(@TableName);", connection)) { cmd.CommandTimeout = 0; cmd.Parameters.AddWithValue("@TableName", dbTableName); using (SqlDataReader dr = cmd.ExecuteReader()) { while (dr.Read()) { ViewBag.fields.Add(dr.GetString(0)); } } } using (SqlCommand cmd = new SqlCommand("SELECT COUNT(*) FROM " + dbTableName + ";", connection)) { cmd.CommandTimeout = 0; using (SqlDataReader dr = cmd.ExecuteReader()) { while (dr.Read()) { ViewData["LogInputEventCount"] = dr.GetValue(0); } } } } } return(View(retrieved)); }
public IActionResult Registrar([FromBody] LogInput r) { var newRegistro = new Registros { Departamento = r.Departamento, Nome = r.Nome, Obs = r.Obs, Placa = r.Placa, Hora = DateTime.Now, Usuario = User.FindFirst("subjectId")?.Value, Fotos = r.Fotos.Select(e => new ArquivoLog { FileId = new ObjectId(e.FileId), Descricao = e.Descricao }).ToList() }; _db.Registros.InsertOne(newRegistro); return(StatusCode(201, newRegistro)); }
public async Task FunctionHandler(S3Event s3event, ILambdaContext context) { if (s3event != null) { foreach (var record in s3event.Records) { context.Logger.LogLine(record.S3.Bucket.Name); string bucket = record.S3.Bucket.Name.Substring(14).Replace('-', ' '); context.Logger.LogLine(bucket); if (LogContextOperations.IfInputExist(bucket)) { LogInput retrievedLI = LogContextOperations.GetLogInputByName(bucket); GlueConsolidatedEntity retrievedGCE = LogContextOperations.GetGlueConsolidatedEntity(retrievedLI.ID); GlueDatabaseTable retrievedGDT = LogContextOperations.GetGlueDatabaseTable(retrievedLI.ID); context.Logger.LogLine(retrievedLI.ID + " | " + retrievedLI.Name); if (retrievedLI.InitialIngest == false && retrievedGCE == null) { context.Logger.LogLine("Log Input has not be crawled before and has no crawler"); CreateCrawlerResponse createCrawlerResponse = await GlueClient.CreateCrawlerAsync(new CreateCrawlerRequest { Name = retrievedLI.Name, DatabaseName = LogContextOperations.GetGlueDatabase().Name, Role = "GlueServiceRole", SchemaChangePolicy = new SchemaChangePolicy { DeleteBehavior = DeleteBehavior.DEPRECATE_IN_DATABASE, UpdateBehavior = UpdateBehavior.UPDATE_IN_DATABASE }, Tags = new Dictionary <string, string> { { "Project", "OSPJ" } }, Targets = new CrawlerTargets { S3Targets = new List <S3Target> { new S3Target { Path = "s3://" + LogContextOperations.GetInputS3BucketName(retrievedLI.ID) } } } }); if (createCrawlerResponse.HttpStatusCode.Equals(HttpStatusCode.OK)) { context.Logger.LogLine("Crawler Created"); StartCrawlerResponse startCrawlerResponse = await GlueClient.StartCrawlerAsync(new StartCrawlerRequest { Name = retrievedLI.Name }); if (startCrawlerResponse.HttpStatusCode.Equals(HttpStatusCode.OK)) { context.Logger.LogLine("Crawler Just Created Started"); LogContextOperations.AddGlueConsolidatedEntity(new GlueConsolidatedEntity { CrawlerName = retrievedLI.Name, LinkedLogInputID = retrievedLI.ID }); } } } else if (retrievedLI.InitialIngest == false && retrievedGCE != null) { context.Logger.LogLine("Log Input has not be crawled before but has a crawler"); GetCrawlerResponse getCrawlerResponse = await GlueClient.GetCrawlerAsync(new GetCrawlerRequest { Name = retrievedGCE.CrawlerName }); if (getCrawlerResponse.Crawler.State.Equals(CrawlerState.READY) && getCrawlerResponse.Crawler.LastCrawl == null) { StartCrawlerResponse startCrawlerResponse = await GlueClient.StartCrawlerAsync(new StartCrawlerRequest { Name = retrievedGCE.CrawlerName }); if (startCrawlerResponse.HttpStatusCode.Equals(HttpStatusCode.OK)) { context.Logger.LogLine("Crawler Started"); } } else if (getCrawlerResponse.Crawler.State.Equals(CrawlerState.READY) && getCrawlerResponse.Crawler.LastCrawl != null) { context.Logger.LogLine("Log Input has been crawled before, has a crawler but not a job"); LogContextOperations.AddGlueDatabaseTable(new GlueDatabaseTable { LinkedDatabaseID = 1, LinkedGlueConsolidatedInputEntityID = retrievedGCE.ID, Name = getCrawlerResponse.Crawler.Targets.S3Targets[0].Path.Substring(5).Replace("-", "_") }); GetTableResponse getTableResponse = await GlueClient.GetTableAsync(new GetTableRequest { DatabaseName = "master-database", Name = getCrawlerResponse.Crawler.Targets.S3Targets[0].Path.Substring(5).Replace("-", "_") }); if (getTableResponse.HttpStatusCode.Equals(HttpStatusCode.OK)) { UpdateTableResponse updateTableResponse = await GlueClient.UpdateTableAsync(new UpdateTableRequest { DatabaseName = getTableResponse.Table.DatabaseName, TableInput = new TableInput { Name = getTableResponse.Table.Name, Parameters = getTableResponse.Table.Parameters, LastAccessTime = getTableResponse.Table.LastAccessTime, LastAnalyzedTime = getTableResponse.Table.LastAnalyzedTime, Owner = getTableResponse.Table.Owner, StorageDescriptor = getTableResponse.Table.StorageDescriptor, Retention = getTableResponse.Table.Retention, TableType = getTableResponse.Table.TableType } }); if (updateTableResponse.HttpStatusCode.Equals(HttpStatusCode.OK)) { retrievedGDT = LogContextOperations.GetGlueDatabaseTable(retrievedLI.ID); CreateJobRequest createJobRequest = new CreateJobRequest { Name = retrievedLI.Name, DefaultArguments = new Dictionary <string, string> { { "--enable-spark-ui", "true" }, { "--spark-event-logs-path", "s3://aws-glue-spark-188363912800-ap-southeast-1" }, { "--job-bookmark-option", "job-bookmark-enable" }, { "--job-language", "python" }, { "--TempDir", "s3://aws-glue-temporary-188363912800-ap-southeast-1/root" }, { "--TABLE_NAME", retrievedGDT.Name } }, MaxCapacity = 10.0, Role = "GlueServiceRole", Connections = new ConnectionsList { Connections = new List <string> { "SmartInsights" } }, Tags = new Dictionary <string, string> { { "Project", "OSPJ" } }, MaxRetries = 0, GlueVersion = "1.0", ExecutionProperty = new ExecutionProperty { MaxConcurrentRuns = 1 }, Timeout = 2880 }; if (retrievedLI.LogInputCategory.Equals(LogInputCategory.ApacheWebServer)) { createJobRequest.Command = new JobCommand { PythonVersion = "3", Name = "glueetl", ScriptLocation = "s3://aws-glue-scripts-188363912800-ap-southeast-1/root/Apache CLF" }; } else if (retrievedLI.LogInputCategory.Equals(LogInputCategory.SquidProxy)) { createJobRequest.Command = new JobCommand { PythonVersion = "3", Name = "glueetl", ScriptLocation = "s3://aws-glue-scripts-188363912800-ap-southeast-1/root/Cisco Squid Proxy" }; } else if (retrievedLI.LogInputCategory.Equals(LogInputCategory.SSH)) { createJobRequest.Command = new JobCommand { PythonVersion = "3", Name = "glueetl", ScriptLocation = "s3://aws-glue-scripts-188363912800-ap-southeast-1/root/Splunk SSH" }; } else if (retrievedLI.LogInputCategory.Equals(LogInputCategory.WindowsEventLogs)) { createJobRequest.Command = new JobCommand { PythonVersion = "3", Name = "glueetl", ScriptLocation = "s3://aws-glue-scripts-188363912800-ap-southeast-1/root/Windows Events" }; } CreateJobResponse createJobResponse = await GlueClient.CreateJobAsync(createJobRequest); if (createJobResponse.HttpStatusCode.Equals(HttpStatusCode.OK)) { context.Logger.LogLine("Job Created"); StartJobRunResponse startJobRunResponse = await GlueClient.StartJobRunAsync(new StartJobRunRequest { JobName = createJobResponse.Name, MaxCapacity = 10.0 }); if (startJobRunResponse.HttpStatusCode.Equals(HttpStatusCode.OK)) { context.Logger.LogLine("Job Just Created Started"); retrievedGCE.JobName = createJobResponse.Name; LogContextOperations.UpdateGlueConsolidatedEntity(retrievedGCE); LogContextOperations.UpdateInputIngestionStatus(bucket); } } } } } } else if (retrievedLI.InitialIngest == true) { context.Logger.LogLine("Log Input has been crawled before"); context.Logger.LogLine(retrievedGCE.JobName); if (retrievedGCE.JobName == null && retrievedGDT == null) { context.Logger.LogLine("Log Input has not be transferred over to RDS before due to no job"); GetCrawlerResponse getCrawlerResponse = await GlueClient.GetCrawlerAsync(new GetCrawlerRequest { Name = retrievedGCE.CrawlerName }); if (getCrawlerResponse.HttpStatusCode.Equals(HttpStatusCode.OK)) { LogContextOperations.AddGlueDatabaseTable(new GlueDatabaseTable { LinkedDatabaseID = 1, LinkedGlueConsolidatedInputEntityID = retrievedGCE.ID, Name = getCrawlerResponse.Crawler.Targets.S3Targets[0].Path.Substring(5).Replace("-", "_") }); GetTableResponse getTableResponse = await GlueClient.GetTableAsync(new GetTableRequest { DatabaseName = "master-database", Name = getCrawlerResponse.Crawler.Targets.S3Targets[0].Path.Substring(5).Replace("-", "_") }); if (getTableResponse.HttpStatusCode.Equals(HttpStatusCode.OK)) { UpdateTableResponse updateTableResponse = await GlueClient.UpdateTableAsync(new UpdateTableRequest { DatabaseName = getTableResponse.Table.DatabaseName, TableInput = new TableInput { Name = getTableResponse.Table.Name, Parameters = getTableResponse.Table.Parameters, LastAccessTime = getTableResponse.Table.LastAccessTime, LastAnalyzedTime = getTableResponse.Table.LastAnalyzedTime, Owner = getTableResponse.Table.Owner, StorageDescriptor = getTableResponse.Table.StorageDescriptor, Retention = getTableResponse.Table.Retention, TableType = getTableResponse.Table.TableType } }); if (updateTableResponse.HttpStatusCode.Equals(HttpStatusCode.OK)) { retrievedGDT = LogContextOperations.GetGlueDatabaseTable(retrievedLI.ID); CreateJobRequest createJobRequest = new CreateJobRequest { Name = retrievedLI.Name, DefaultArguments = new Dictionary <string, string> { { "--enable-spark-ui", "true" }, { "--spark-event-logs-path", "s3://aws-glue-spark-188363912800-ap-southeast-1" }, { "--job-bookmark-option", "job-bookmark-enable" }, { "--job-language", "python" }, { "--TempDir", "s3://aws-glue-temporary-188363912800-ap-southeast-1/root" }, { "--TABLE_NAME", retrievedGDT.Name } }, MaxCapacity = 10.0, Role = "GlueServiceRole", Connections = new ConnectionsList { Connections = new List <string> { "SmartInsights" } }, Tags = new Dictionary <string, string> { { "Project", "OSPJ" } }, MaxRetries = 0, GlueVersion = "1.0", ExecutionProperty = new ExecutionProperty { MaxConcurrentRuns = 1 }, Timeout = 2880 }; if (retrievedLI.LogInputCategory.Equals(LogInputCategory.ApacheWebServer)) { createJobRequest.Command = new JobCommand { PythonVersion = "3", Name = "glueetl", ScriptLocation = "s3://aws-glue-scripts-188363912800-ap-southeast-1/root/Apache CLF" }; } else if (retrievedLI.LogInputCategory.Equals(LogInputCategory.SquidProxy)) { createJobRequest.Command = new JobCommand { PythonVersion = "3", Name = "glueetl", ScriptLocation = "s3://aws-glue-scripts-188363912800-ap-southeast-1/root/Cisco Squid Proxy" }; } else if (retrievedLI.LogInputCategory.Equals(LogInputCategory.SSH)) { createJobRequest.Command = new JobCommand { PythonVersion = "3", Name = "glueetl", ScriptLocation = "s3://aws-glue-scripts-188363912800-ap-southeast-1/root/Splunk SSH" }; } else if (retrievedLI.LogInputCategory.Equals(LogInputCategory.WindowsEventLogs)) { createJobRequest.Command = new JobCommand { PythonVersion = "3", Name = "glueetl", ScriptLocation = "s3://aws-glue-scripts-188363912800-ap-southeast-1/root/Windows Events" }; } CreateJobResponse createJobResponse = await GlueClient.CreateJobAsync(createJobRequest); if (createJobResponse.HttpStatusCode.Equals(HttpStatusCode.OK)) { context.Logger.LogLine("Job Created"); StartJobRunResponse startJobRunResponse = await GlueClient.StartJobRunAsync(new StartJobRunRequest { JobName = createJobResponse.Name, MaxCapacity = 10.0 }); if (startJobRunResponse.HttpStatusCode.Equals(HttpStatusCode.OK)) { context.Logger.LogLine("Job Just Created Started"); retrievedGCE.JobName = createJobResponse.Name; LogContextOperations.UpdateGlueConsolidatedEntity(retrievedGCE); LogContextOperations.UpdateInputIngestionStatus(bucket); } } } } } } else { GetCrawlerResponse getCrawlerResponse = await GlueClient.GetCrawlerAsync(new GetCrawlerRequest { Name = retrievedGCE.CrawlerName }); if (getCrawlerResponse.HttpStatusCode.Equals(HttpStatusCode.OK) && getCrawlerResponse.Crawler.State.Equals(CrawlerState.READY)) { if ((getCrawlerResponse.Crawler.LastCrawl.StartTime.Hour < DateTime.Now.Hour && getCrawlerResponse.Crawler.LastCrawl.StartTime.Day == DateTime.Now.Day) || getCrawlerResponse.Crawler.LastCrawl.StartTime.Day != DateTime.Now.Day) { context.Logger.LogLine("Log Input has been transferred over to RDS before but time condition not met"); StartCrawlerResponse startCrawlerResponse = await GlueClient.StartCrawlerAsync(new StartCrawlerRequest { Name = retrievedGCE.CrawlerName }); if (startCrawlerResponse.HttpStatusCode.Equals(HttpStatusCode.OK)) { context.Logger.LogLine("Crawler Started"); } } else { context.Logger.LogLine("Log Input has been transferred over to RDS before and time condition met"); GetTableResponse getTableResponse = await GlueClient.GetTableAsync(new GetTableRequest { DatabaseName = "master-database", Name = getCrawlerResponse.Crawler.Targets.S3Targets[0].Path.Substring(5).Replace("-", "_") }); if (getTableResponse.HttpStatusCode.Equals(HttpStatusCode.OK)) { UpdateTableResponse updateTableResponse = await GlueClient.UpdateTableAsync(new UpdateTableRequest { DatabaseName = getTableResponse.Table.DatabaseName, TableInput = new TableInput { Name = getTableResponse.Table.Name, Parameters = getTableResponse.Table.Parameters, LastAccessTime = getTableResponse.Table.LastAccessTime, LastAnalyzedTime = getTableResponse.Table.LastAnalyzedTime, Owner = getTableResponse.Table.Owner, StorageDescriptor = getTableResponse.Table.StorageDescriptor, Retention = getTableResponse.Table.Retention, TableType = getTableResponse.Table.TableType } }); if (updateTableResponse.HttpStatusCode.Equals(HttpStatusCode.OK)) { context.Logger.LogLine("Table updated before running job"); context.Logger.LogLine(retrievedGCE.JobName); GetJobRunsResponse getJobRunsResponse = await GlueClient.GetJobRunsAsync(new GetJobRunsRequest { JobName = retrievedGCE.JobName }); bool jobRunning = false; context.Logger.LogLine(getJobRunsResponse.JobRuns.Count().ToString()); foreach (JobRun j in getJobRunsResponse.JobRuns) { context.Logger.LogLine(j.Id + " | " + j.JobRunState); if (j.JobRunState.Equals(JobRunState.STARTING) || j.JobRunState.Equals(JobRunState.RUNNING) || j.JobRunState.Equals(JobRunState.STOPPING)) { jobRunning = true; break; } } context.Logger.LogLine(jobRunning.ToString()); if (!jobRunning) { StartJobRunResponse startJobRunResponse = await GlueClient.StartJobRunAsync(new StartJobRunRequest { JobName = retrievedGCE.JobName, MaxCapacity = 10.0 }); if (startJobRunResponse.HttpStatusCode.Equals(HttpStatusCode.OK)) { context.Logger.LogLine("Job Started"); } } } } } } } } } } } }
public GlueConsolidatedEntity GetGlueConsolidatedEntity(int ID) { LogInput result = Context.LogInputs.Find(ID); return(result.LinkedGlueEntity); }
public async Task <IActionResult> Create([Bind("FilePath", "Name", "Filter", "LogType", "LogInputCategory")] LogInput input) { ViewBag.LogPath = input.FilePath; ViewBag.LogName = input.Name; ViewBag.Filter = input.Filter; ViewBag.LogType = input.LogType; ViewBag.LogInput = input.LogInputCategory; string lowcap = input.Name.ToLower(); string pattern = @"[^A-Za-z0-9]+"; string replacement = "-"; string replace = Regex.Replace(lowcap, pattern, replacement); var BucketName2 = "smartinsights-" + replace; var data = "{ \r\n \"Sources\":[ \r\n { \r\n \"Id\":\"" + input.Name + "\",\r\n \"SourceType\":\"WindowsEventLogSource\",\r\n \"LogName\":\"" + input.LogType + "\",\r\n \"IncludeEventData\" : true\r\n }\r\n ],\r\n \"Sinks\":[ \r\n { \r\n \"Id\":\"" + input.Name + "Firehose\",\r\n \"SinkType\":\"KinesisFirehose\",\r\n \"AccessKey\":\"" + Environment.GetEnvironmentVariable("FIREHOSE_ACCESS_KEY_ID") + "\",\r\n \"SecretKey\":\"" + Environment.GetEnvironmentVariable("FIREHOSE_SECRET_ACCESS_KEY") + "\",\r\n \"Region\":\"ap-southeast-1\",\r\n \"StreamName\":\"" + BucketName2 + "\"\r\n \"Format\": \"json\"\r\n }\r\n ],\r\n \"Pipes\":[ \r\n { \r\n \"Id\":\"WinSecurityPipe\",\r\n \"SourceRef\":\"" + input.Name + "\",\r\n \"SinkRef\":\"" + input.Name + "KinesisFirehose\"\r\n }\r\n ],\r\n \"SelfUpdate\":0\r\n}"; var data2 = "{\r\n \"cloudwatch.emitMetrics\": false,\r\n \"awsSecretAccessKey\": \"" + Environment.GetEnvironmentVariable("FIREHOSE_SECRET_ACCESS_KEY") + "\",\r\n \"firehose.endpoint\": \"firehose.ap-southeast-1.amazonaws.com\",\r\n \"awsAccessKeyId\": \"" + Environment.GetEnvironmentVariable("FIREHOSE_ACCESS_KEY_ID") + "\",\r\n \"flows\": [\r\n {\r\n \"filePattern\": \"/opt/generators/CLF/*.log\",\r\n \"deliveryStream\": \"SmartInsights-Apache-Web-Logs\",\r\n \"dataProcessingOptions\": [\r\n {\r\n \"optionName\": \"LOGTOJSON\",\r\n \"logFormat\": \"COMMONAPACHELOG\"\r\n }\r\n ]\r\n },\r\n {\r\n \"filePattern\": \"/opt/generators/ELF/*.log\",\r\n \"deliveryStream\": \"\",\r\n \"dataProcessingOptions\": [\r\n {\r\n \"optionName\": \"LOGTOJSON\",\r\n \"logFormat\": \"COMBINEDAPACHELOG\"\r\n }\r\n ] \r\n },\r\n {\r\n \"filePattern\": \"/opt/log/www1/secure.log\",\r\n \"deliveryStream\": \"SmartInsights-SSH-Login-Logs\",\r\n \"dataProcessingOptions\": [\r\n {\r\n \"optionName\": \"LOGTOJSON\",\r\n \"logFormat\": \"SYSLOG\",\r\n \"matchPattern\": \"^([\\\\w]+) ([\\\\w]+) ([\\\\d]+) ([\\\\d]+) ([\\\\w:]+) ([\\\\w]+) ([\\\\w]+)\\\\[([\\\\d]+)\\\\]\\\\: ([\\\\w\\\\s.\\\\:=]+)$\",\r\n \"customFieldNames\": [\"weekday\", \"month\", \"day\", \"year\", \"time\", \"host\", \"process\", \"identifer\",\"message\"]\r\n }\r\n ]\r\n },\r\n {\r\n \"filePattern\": \"/opt/log/cisco_router1/cisco_ironport_web.log\",\r\n \"deliveryStream\": \"SmartInsights-Cisco-Squid-Proxy-Logs\",\r\n \"dataProcessingOptions\": [\r\n {\r\n \"optionName\": \"LOGTOJSON\",\r\n \"logFormat\": \"SYSLOG\",\r\n \"matchPattern\": \"^([\\\\w.]+) (?:[\\\\d]+) ([\\\\d.]+) ([\\\\w]+)\\\\/([\\\\d]+) ([\\\\d]+) ([\\\\w.]+) ([\\\\S]+) ([\\\\S]+) (?:[\\\\w]+)\\\\/([\\\\S]+) ([\\\\S]+) (?:[\\\\S\\\\s]+)$\",\r\n \"customFieldNames\": [\"timestamp\",\"destination_ip_address\",\"action\",\"http_status_code\",\"bytes_in\",\"http_method\",\"requested_url\",\"user\",\"requested_url_domain\",\"content_type\"]\r\n }\r\n ]\r\n }\r\n ]\r\n}"; string data3 = ""; PutBucketResponse putBucketResponse = await _S3Client.PutBucketAsync(new PutBucketRequest { BucketName = "smartinsights-" + replace, UseClientRegion = true, CannedACL = S3CannedACL.Private }); PutBucketTaggingResponse putBucketTaggingResponse = await _S3Client.PutBucketTaggingAsync(new PutBucketTaggingRequest { BucketName = "smartinsights-" + replace, TagSet = new List <Amazon.S3.Model.Tag> { new Amazon.S3.Model.Tag { Key = "Project", Value = "OSPJ" } } }); PutPublicAccessBlockResponse putPublicAccessBlockResponse = await _S3Client.PutPublicAccessBlockAsync(new PutPublicAccessBlockRequest { BucketName = "smartinsights-" + replace, PublicAccessBlockConfiguration = new PublicAccessBlockConfiguration { BlockPublicAcls = true, BlockPublicPolicy = true, IgnorePublicAcls = true, RestrictPublicBuckets = true } }); CreateDeliveryStreamResponse createDeliveryStreamResponse = await _FirehoseClient.CreateDeliveryStreamAsync(new CreateDeliveryStreamRequest { DeliveryStreamName = "smartinsights-" + replace, DeliveryStreamType = DeliveryStreamType.DirectPut, ExtendedS3DestinationConfiguration = new ExtendedS3DestinationConfiguration { BucketARN = "arn:aws:s3:::" + BucketName2, BufferingHints = new BufferingHints { IntervalInSeconds = 60, SizeInMBs = 5 }, RoleARN = Environment.GetEnvironmentVariable("FIREHOSE_EXECUTION_ROLE") }, Tags = new List <Amazon.KinesisFirehose.Model.Tag> { new Amazon.KinesisFirehose.Model.Tag { Key = "Project", Value = "OSPJ" } } }); _logContext.S3Buckets.Add(new Models.S3Bucket { Name = BucketName2 }); await _logContext.SaveChangesAsync(); ClaimsIdentity claimsIdentity = HttpContext.User.Identity as ClaimsIdentity; string currentIdentity = claimsIdentity.FindFirst("preferred_username").Value; User user = await _accountContext.Users.Where(u => u.Username == currentIdentity).FirstOrDefaultAsync(); Models.S3Bucket bucket = await _logContext.S3Buckets.Where(b => b.Name.Equals(BucketName2)).FirstOrDefaultAsync(); await _logContext.SaveChangesAsync(); await _LambdaClient.AddPermissionAsync(new AddPermissionRequest { Action = "lambda:InvokeFunction", FunctionName = Environment.GetEnvironmentVariable("LAMBDA_FUNCTION_NAME"), Principal = "s3.amazonaws.com", SourceAccount = Environment.GetEnvironmentVariable("AWS_ACCOUNT_NUMBER"), SourceArn = "arn:aws:s3:::" + bucket.Name, StatementId = "ID-" + bucket.ID }); await _S3Client.PutBucketNotificationAsync(new PutBucketNotificationRequest { BucketName = BucketName2, LambdaFunctionConfigurations = new List <LambdaFunctionConfiguration> { new LambdaFunctionConfiguration { FunctionArn = Environment.GetEnvironmentVariable("LAMBDA_FUNCTION_ARN"), Events = new List <EventType> { EventType.ObjectCreatedPut } } } }); if (!input.LogInputCategory.Equals(LogInputCategory.WindowsEventLogs)) { data3 = data2; } else { data3 = data; } _logContext.LogInputs.Add(new Models.LogInput { Name = input.Name, FirehoseStreamName = BucketName2, ConfigurationJSON = data3, LogInputCategory = input.LogInputCategory, LinkedUserID = user.ID, LinkedS3BucketID = bucket.ID, FilePath = input.FilePath, Filter = input.Filter, LogType = input.LogType, }); try { await _logContext.SaveChangesAsync(); TempData["Alert"] = "Success"; TempData["Message"] = "Log Input " + input.Name + " created successfully!"; return(RedirectToAction("Manage", new { InputID = _logContext.LogInputs.Where(LI => LI.Name.Equals(input.Name)).FirstOrDefault().ID })); } catch (DbUpdateException) { TempData["Alert"] = "Danger"; TempData["Message"] = "Error Creating log input " + input.Name + "!"; return(View(input)); } }
public void Log(string str) { LogInput.AppendText(str); LogInput.AppendText("\r\n"); }
public void Log(LogInput input) { var json = Newtonsoft.Json.JsonConvert.SerializeObject(input); logger.LogInformation($"消息上报: {json}"); }