private static void log_direct_error(IApplication app) { //Now, we will log a synthetic error case to show other fields used for structured logging try { throw new Azos.Data.DocValidationException( //throw a fake data validation error schemaName: "medical.doctor", //Canonical schema name of data object message: "Doctor state is not valid: this provider not licensed in the requested medical procedure context" ); } catch (Exception error) { app.Log.Write( new Message { Type = MessageType.Error, // Error message Topic = "examples", // Archetype of message topic - a grouper for structured log analysis, e.g. "db", "logic", "web" From = nameof(log_direct_error), // A name of component/code point, e.g. "LogicA.Method1" Source = 100_234, //add source tracepoint - this can simplify complex logging scenarios Text = "Bad doctor: " + error.ToMessageWithType(), // Just a text with exception type name //Add Exception graph, some sinks know how to unwind all of the details and generate alerts etc.. //This is very much needed in Sky.Chronicle APM where you can browse exception details in a distributed //node cluster later Exception = error, //you can attach any string content for structured "parameter bag", you typically would use JSON //The parameters are typically just stored in the APM, and can be later indexed for later analysis Parameters = new { doc = "Smith Gregg", patient = "Ananda Sonali" }.ToJson(), //you can add archive dimensions = data keys for future log message analysis //we are adding "npi" provider id, "hcpc" medical procedure code, and financial class of the patient //Azos.Sky.Cronicle APM cloud solution automatically indexes log messages on the log archive dimensions ArchiveDimensions = ArchiveConventions.EncodeArchiveDimensions(new{ npi = "1230623234", hcpc = "h204.2", fc = "pvt" }) }.InitDefaultFields(app)
public string GetUserLogArchiveDimensions(IIdentityDescriptor identity) { if (identity == null) { return(null); } return(ArchiveConventions.EncodeArchiveDimensions(new { un = identity.IdentityDescriptorName })); }
public void Test02() { var encoded = ArchiveConventions.EncodeArchiveDimensions(new { a = 1, b = 3 }); encoded.See(); var decoded = ArchiveConventions.DecodeArchiveDimensionsMap(encoded); Aver.IsNotNull(decoded); Aver.AreEqual(2, decoded.Count); Aver.AreObjectsEqual(1, decoded["a"]); Aver.AreObjectsEqual(3, decoded["b"]); }
public void Test01() { var got = ArchiveConventions.DecodeArchiveDimensionsMap((string)null); Aver.IsNull(got); got = ArchiveConventions.DecodeArchiveDimensionsMap(" "); Aver.IsNull(got); got = ArchiveConventions.DecodeArchiveDimensionsMap((IArchiveLoggable)null); Aver.IsNull(got); got = ArchiveConventions.DecodeArchiveDimensionsMap("not a content produced by convention"); Aver.IsNull(got); }
public void Test03() { var encoded1 = ArchiveConventions.EncodeArchiveDimensions(new { a = 1, b = 3 }); var encoded2 = ArchiveConventions.EncodeArchiveDimensions(new { b = 3, a = 1, c = (string)null });//notice a different sequence of keys encoded1.See(); encoded2.See(); Aver.AreEqual(encoded1, encoded2);//however the strings are equal, because keys are sorted and nulls are skipped var decoded = ArchiveConventions.DecodeArchiveDimensionsMap(encoded1); Aver.IsNotNull(decoded); Aver.AreEqual(2, decoded.Count); Aver.AreObjectsEqual(1, decoded["a"]); Aver.AreObjectsEqual(3, decoded["b"]); }
public static BSONDocument ToBson(Message msg) { var doc = new BSONDocument(); doc.Set(DataDocConverter.GDID_CLRtoBSON(FLD_GDID, msg.Gdid)); doc.Set(DataDocConverter.GUID_CLRtoBSON(FLD_GUID, msg.Guid)); doc.Set(DataDocConverter.GUID_CLRtoBSON(FLD_RELATED_TO, msg.RelatedTo)); doc.Set(new BSONInt64Element(FLD_CHANNEL, (long)msg.Channel.ID)); doc.Set(new BSONInt64Element(FLD_APP, (long)msg.App.ID)); doc.Set(new BSONInt32Element(FLD_TYPE, (int)msg.Type)); doc.Set(new BSONInt32Element(FLD_SOURCE, msg.Source)); doc.Set(new BSONDateTimeElement(FLD_TIMESTAMP, msg.UTCTimeStamp)); if (msg.Host.IsNullOrWhiteSpace()) { doc.Set(new BSONNullElement(FLD_HOST)); } else { doc.Set(new BSONStringElement(FLD_HOST, msg.Host)); } if (msg.From.IsNullOrWhiteSpace()) { doc.Set(new BSONNullElement(FLD_FROM)); } else { doc.Set(new BSONStringElement(FLD_FROM, msg.From)); } if (msg.Topic.IsNullOrWhiteSpace()) { doc.Set(new BSONNullElement(FLD_TOPIC)); } else { doc.Set(new BSONStringElement(FLD_TOPIC, msg.Topic)); } if (msg.Text.IsNullOrWhiteSpace()) { doc.Set(new BSONNullElement(FLD_TEXT)); } else { doc.Set(new BSONStringElement(FLD_TEXT, msg.Text)); } if (msg.Parameters.IsNullOrWhiteSpace()) { doc.Set(new BSONNullElement(FLD_PARAMETERS)); } else { doc.Set(new BSONStringElement(FLD_PARAMETERS, msg.Parameters)); } if (msg.ExceptionData != null) { doc.Set(new BSONStringElement(FLD_EXCEPTION, msg.ExceptionData.ToJson(JsonWritingOptions.CompactRowsAsMap))); } else { doc.Set(new BSONNullElement(FLD_EXCEPTION)); } var ad = ArchiveConventions.DecodeArchiveDimensionsMap(msg); if (ad == null) { doc.Set(new BSONNullElement(FLD_AD)); } else { var adDoc = ad.ToBson(); doc.Set(new BSONDocumentElement(FLD_AD, adDoc)); } return(doc); }
public static Message FromBson(BSONDocument bson) { var msg = new Message(); if (bson[FLD_GDID] is BSONBinaryElement binGdid) { msg.Gdid = DataDocConverter.GDID_BSONtoCLR(binGdid); } if (bson[FLD_GUID] is BSONBinaryElement binGuid) { msg.Guid = DataDocConverter.GUID_BSONtoCLR(binGuid); } if (bson[FLD_RELATED_TO] is BSONBinaryElement binRel) { msg.RelatedTo = DataDocConverter.GUID_BSONtoCLR(binRel); } if (bson[FLD_CHANNEL] is BSONInt64Element chn) { msg.Channel = new Atom((ulong)chn.Value); } if (bson[FLD_APP] is BSONInt64Element app) { msg.App = new Atom((ulong)app.Value); } if (bson[FLD_TYPE] is BSONInt32Element tp) { msg.Type = (MessageType)tp.Value; } if (bson[FLD_SOURCE] is BSONInt32Element src) { msg.Source = src.Value; } if (bson[FLD_TIMESTAMP] is BSONDateTimeElement utc) { msg.UTCTimeStamp = utc.Value; } if (bson[FLD_HOST] is BSONStringElement host) { msg.Host = host.Value; } if (bson[FLD_FROM] is BSONStringElement from) { msg.From = from.Value; } if (bson[FLD_TOPIC] is BSONStringElement topic) { msg.Topic = topic.Value; } if (bson[FLD_TEXT] is BSONStringElement text) { msg.Text = text.Value; } if (bson[FLD_PARAMETERS] is BSONStringElement pars) { msg.Parameters = pars.Value; } if (bson[FLD_EXCEPTION] is BSONStringElement except) { msg.ExceptionData = JsonReader.ToDoc <WrappedExceptionData>(except.Value); } if (bson[FLD_AD] is BSONDocumentElement ad) { msg.ArchiveDimensions = ArchiveConventions.EncodeArchiveDimensions(ad.Value); } return(msg); }