public void AddMessage(InputLogEvent ev) { Encoding unicode = Encoding.Unicode; _totalMessageSize += unicode.GetMaxByteCount(ev.Message.Length); _request.LogEvents.Add(ev); }
private async void PutLog(string groupName, string streamName, string msg) { try { var req = new DescribeLogStreamsRequest(groupName) { LogStreamNamePrefix = streamName }; var resp = await _logsClient.DescribeLogStreamsAsync(req); var token = resp.LogStreams.FirstOrDefault(s => s.LogStreamName == streamName)?.UploadSequenceToken; var ie = new InputLogEvent { Message = msg, Timestamp = DateTime.UtcNow }; var request = new PutLogEventsRequest(groupName, streamName, new List <InputLogEvent> { ie }); if (!string.IsNullOrEmpty(token)) { request.SequenceToken = token; } await _logsClient.PutLogEventsAsync(request); } catch (Exception e) { // ignored } }
public static InputLogEvent ReadInputLogEvent(this BinaryReader reader) { var data = new InputLogEvent(); data.Message = reader.ReadNullableString(); data.Timestamp = reader.ReadDateTime(); return(data); }
static void Main(string[] args) { var logClient = new AmazonCloudWatchLogsClient(); // Add a new log group for testing const string newLogGroupName = "NewLogGroup"; DescribeLogGroupsResponse dlgr = logClient.DescribeLogGroups(); var groups = new List <LogGroup> { }; groups = dlgr.LogGroups; LogGroup lg = new LogGroup(); lg.LogGroupName = newLogGroupName; // Look for our new log group name to determine if we need to do setup LogGroup result = groups.Find( delegate(LogGroup bk) { return(bk.LogGroupName == newLogGroupName); } ); if (result != null) { Console.WriteLine(result.LogGroupName + " found"); } else { //Haven't seen this log group, set it up CreateLogGroupRequest clgr = new CreateLogGroupRequest(newLogGroupName); logClient.CreateLogGroup(clgr); // Create a file to sace next SequenceToken in File.CreateText("..\\..\\" + lg.LogGroupName + ".txt"); CreateLogStreamRequest csr = new CreateLogStreamRequest(lg.LogGroupName, newLogGroupName); logClient.CreateLogStream(csr); } string tokenFile = ""; try { Console.WriteLine(lg.LogGroupName); //Pick up the next sequence token from the last run tokenFile = lg.LogGroupName; StreamReader sr = File.OpenText("..\\..\\" + tokenFile + ".txt"); string sequenceToken = sr.ReadLine(); sr.Close(); lg.RetentionInDays = 30; string groupName = lg.LogGroupName;; TestMetricFilterRequest tmfr = new TestMetricFilterRequest(); List <InputLogEvent> logEvents = new List <InputLogEvent>(3); InputLogEvent ile = new InputLogEvent(); ile.Message = "Test Event 1"; //DateTime dt = new DateTime(1394793518000); DateTime dt = new DateTime(2017, 01, 11); ile.Timestamp = dt; logEvents.Add(ile); ile.Message = "Test Event 2"; logEvents.Add(ile); ile.Message = "This message also contains an Error"; logEvents.Add(ile); DescribeLogStreamsRequest dlsr = new DescribeLogStreamsRequest(groupName); PutLogEventsRequest pler = new PutLogEventsRequest(groupName, tokenFile, logEvents); pler.SequenceToken = sequenceToken; //use last sequence token PutLogEventsResponse plerp = new PutLogEventsResponse(); plerp = logClient.PutLogEvents(pler); Console.WriteLine("Next sequence token = " + plerp.NextSequenceToken); FileStream fs = File.OpenWrite("..\\..\\" + tokenFile + ".txt"); fs.Position = 0; UTF8Encoding utf8 = new UTF8Encoding(); Byte[] encodedBytes = utf8.GetBytes(plerp.NextSequenceToken); fs.Write(encodedBytes, 0, utf8.GetByteCount(plerp.NextSequenceToken)); fs.Close(); List <string> lem = new List <string>(1); lem.Add("Error"); tmfr.LogEventMessages = lem; tmfr.FilterPattern = "Error"; TestMetricFilterResponse tmfrp = new TestMetricFilterResponse(); tmfrp = logClient.TestMetricFilter(tmfr); var results = new List <MetricFilterMatchRecord> { }; results = tmfrp.Matches; Console.WriteLine("Found " + results.Count.ToString() + " match records"); IEnumerator ie = results.GetEnumerator(); while (ie.MoveNext()) { MetricFilterMatchRecord mfmr = (MetricFilterMatchRecord)ie.Current; Console.WriteLine("Event Message = " + mfmr.EventMessage); } Console.WriteLine("Metric filter test done"); } catch (Exception e) { Console.WriteLine(e.Message); } }
public static void WriteInputLogEvent(this BinaryWriter writer, InputLogEvent data) { writer.WriteNullableString(data.Message); writer.WriteDateTime(data.Timestamp); }
/// <summary> /// /// <para>WriteLogs:</para> /// /// <para>Writes logs to the logging service</para> /// /// <para>Check <seealso cref="IBLoggingServiceInterface.WriteLogs"/> for detailed documentation</para> /// /// </summary> public bool WriteLogs( List <BLoggingParametersStruct> _Messages, string _LogGroupName, string _LogStreamName, bool _bAsync = true, Action <string> _ErrorMessageAction = null) { if (_Messages == null || _Messages.Count == 0) { return(false); } if (_bAsync) { BTaskWrapper.Run(() => { WriteLogs(_Messages, _LogGroupName, _LogStreamName, false, _ErrorMessageAction); }); return(true); } else { _LogGroupName = BUtility.EncodeStringForTagging(_LogGroupName); _LogStreamName = BUtility.EncodeStringForTagging(_LogStreamName); string SequenceToken = null; bool bLogStreamAndGroupExists = false; try { var DescribeStreamRequest = new DescribeLogStreamsRequest(_LogGroupName); using (var CreatedDescribeTask = CloudWatchLogsClient.DescribeLogStreamsAsync(DescribeStreamRequest)) { CreatedDescribeTask.Wait(); if (CreatedDescribeTask.Result != null && CreatedDescribeTask.Result.LogStreams != null && CreatedDescribeTask.Result.LogStreams.Count > 0) { foreach (var Current in CreatedDescribeTask.Result.LogStreams) { if (Current != null && Current.LogStreamName == _LogStreamName) { SequenceToken = Current.UploadSequenceToken; bLogStreamAndGroupExists = true; break; } } } } } catch (Exception) { bLogStreamAndGroupExists = false; } if (!bLogStreamAndGroupExists) { try { var CreateGroupRequest = new CreateLogGroupRequest(_LogGroupName); using (var CreatedGroupTask = CloudWatchLogsClient.CreateLogGroupAsync(CreateGroupRequest)) { CreatedGroupTask.Wait(); } } catch (Exception e) { if (!(e is ResourceAlreadyExistsException)) { _ErrorMessageAction?.Invoke("BLoggingServiceAWS->WriteLogs: " + e.Message + ", Trace: " + e.StackTrace); return(false); } } try { var CreateStreamRequest = new CreateLogStreamRequest(_LogGroupName, _LogStreamName); using (var CreatedStreamTask = CloudWatchLogsClient.CreateLogStreamAsync(CreateStreamRequest)) { CreatedStreamTask.Wait(); } } catch (Exception e) { if (!(e is ResourceAlreadyExistsException)) { _ErrorMessageAction?.Invoke("BLoggingServiceAWS->WriteLogs: " + e.Message + ", Trace: " + e.StackTrace); return(false); } } } var LogEvents = new List <InputLogEvent>(); foreach (var Message in _Messages) { var LogEvent = new InputLogEvent() { Message = Message.Message, Timestamp = DateTime.UtcNow }; switch (Message.LogType) { case EBLoggingServiceLogType.Debug: LogEvent.Message = "Debug-> " + LogEvent.Message; break; case EBLoggingServiceLogType.Info: LogEvent.Message = "Info-> " + LogEvent.Message; break; case EBLoggingServiceLogType.Warning: LogEvent.Message = "Warning-> " + LogEvent.Message; break; case EBLoggingServiceLogType.Error: LogEvent.Message = "Error-> " + LogEvent.Message; break; case EBLoggingServiceLogType.Critical: LogEvent.Message = "Critical-> " + LogEvent.Message; break; } LogEvents.Add(LogEvent); } try { var PutRequest = new PutLogEventsRequest(_LogGroupName, _LogStreamName, LogEvents) { SequenceToken = SequenceToken }; using (var CreatedPutTask = CloudWatchLogsClient.PutLogEventsAsync(PutRequest)) { CreatedPutTask.Wait(); } } catch (Exception e) { _ErrorMessageAction?.Invoke("BLoggingServiceAWS->WriteLogs: " + e.Message + ", Trace: " + e.StackTrace); return(false); } return(true); } }
internal void ForwardLogs() { do { Action pushLogsToCloudWatchAction = () => { LogMessageDTO logMessageObj; while (LogService.taskQueue.TryDequeue(out logMessageObj)) { try { using (IAmazonCloudWatchLogs logsclient = Amazon.AWSClientFactory.CreateAmazonCloudWatchLogsClient(this.cloudWatchAccessKey, this.cloudWatchSecretKey, this.cloudWatchRegion)) { string logStreamName = string.IsNullOrWhiteSpace(logMessageObj.ApplicationAlias) ? "Custom" : logMessageObj.ApplicationAlias; // put the object into JSON format and send it to CloudWatch List <InputLogEvent> logEvents = new List <InputLogEvent>(); InputLogEvent logEntry = new InputLogEvent(); logEntry.Message = JsonConvert.SerializeObject(logMessageObj); logEntry.Timestamp = logMessageObj.Timestamp; logEvents.Add(logEntry); PutLogEventsRequest request = new PutLogEventsRequest(this.logGroupName, logStreamName, logEvents); PutLogEventsResponse response = null; for (int i = 0; i < 5; ++i) { try { lock (this.criticalSection) { // if we have a token, set it if (LogStreamTokenMap.ContainsKey(logStreamName)) { request.SequenceToken = LogStreamTokenMap[logStreamName]; } // put the logs and get the token for the next submissions of logs response = logsclient.PutLogEvents(request); var newToken = response.NextSequenceToken; if (LogStreamTokenMap.ContainsKey(logStreamName)) { LogStreamTokenMap[logStreamName] = newToken; } else { LogStreamTokenMap.Add(logStreamName, newToken); } } // if we successfully pushed the logs, exit the loop, otherwise we will exit in 5 tries break; } catch (InvalidSequenceTokenException) { // we don't have the right token for the next sequence in the stream, so get it again // in fact we will refresh all tokens for all streams var logstreamsrequest = new DescribeLogStreamsRequest(this.logGroupName); var logStreamResponse = logsclient.DescribeLogStreams(logstreamsrequest); var logstreamsList = logStreamResponse.LogStreams; lock (this.criticalSection) { foreach (var logstream in logstreamsList) { var appname = logstream.LogStreamName; var token = logstream.UploadSequenceToken; if (LogStreamTokenMap.ContainsKey(appname)) { LogStreamTokenMap[appname] = token; } else { LogStreamTokenMap.Add(appname, token); } } } } catch (ResourceNotFoundException) { // we likely introduced a new log stream that needs to be provisioned in CloudWatch // ignore exceptions in creation try { CreateLogGroupRequest logGroup = new CreateLogGroupRequest(this.logGroupName); logsclient.CreateLogGroup(logGroup); } catch (Exception) { } try { CreateLogStreamRequest logStream = new CreateLogStreamRequest(this.logGroupName, logStreamName); logsclient.CreateLogStream(logStream); lock (this.criticalSection) { LogStreamTokenMap.Remove(logStreamName); } } catch (Exception) { } } } if (response.HttpStatusCode == HttpStatusCode.OK) { ++LogService.successfulForwards; } else { ++LogService.failedForwards; } } } catch (Exception) { // can't really log this exception to avoid cyclical logging and deadlocks ++LogService.numExceptions; } } }; // Start 5 parallel tasks to consume the logs from the queue // the problem here is that the logs might not be pushed in order to CloudWatch // However, that's ok since there might be multiple instances of this service and logs could be coming // in to all instances at the same time and we might push logs to CloudWatch out of order Parallel.Invoke(pushLogsToCloudWatchAction, pushLogsToCloudWatchAction, pushLogsToCloudWatchAction, pushLogsToCloudWatchAction, pushLogsToCloudWatchAction); // sleep before getting into another iteration of this loop to look for logs Thread.Sleep(1000); } while (true); }