public void TestRateStat_1() { record = new LogRecord() { Pci = 111, Sinr = 12, PdschTbCode0 = 12345, PdschTbCode1 = 23456, Time = DateTime.Parse("11:30:04") }; hRecord = new HugelandRecord() { Pci = 111, Sinr = 12, PdschTbCode0 = 12345, PdschTbCode1 = 23456, Time = DateTime.Parse("2012-11-22 11:30:04") }; stat = new RateStat(); stat.Import(record); Assert.AreEqual(stat.Pci, 111); Assert.AreEqual(stat.Sinr, 12); Assert.AreEqual(stat.PdschTbCode0, 12345); Assert.AreEqual(stat.PdschTbCode1, 23456); Assert.AreEqual(stat.Time.ToLongTimeString(), "11:30:04"); stat.Import(hRecord); Assert.AreEqual(stat.Pci, 111); Assert.AreEqual(stat.Sinr, 12); Assert.AreEqual(stat.PdschTbCode0, 12345); Assert.AreEqual(stat.PdschTbCode1, 0); Assert.AreEqual(stat.Time.ToLongTimeString(), "11:30:04"); Assert.AreEqual(stat.Time.ToLongDateString(), "2012年11月22日"); }
public void Add(LogRecord logRecord) { if (Url == logRecord.Url) { Count++; TotalTime += logRecord.TimeTaken; } }
/// <summary> /// Logs a message. /// </summary> /// <param name="level">Log level of the message.</param> /// <param name="sMsg">Message to log.</param> public void Log(LogLevels level, string sMsg, params Object[] args) { LogRecord record = new LogRecord(level, sMsg, args); lock(this) { _records.Enqueue(record); } }
public void AddLog(DateTime time, string message) { LogRecord item = new LogRecord(time, message); lock (this) { this.log.Add(item); } if (this.OnLogAdded != null) { this.OnLogAdded(item); } }
public bool Log(LogLevel logLevel, Func<string> messageFunc, Exception exception = null, params object[] formatParameters) { //if we are checking if level is supported if (messageFunc == null) { return true; } var record = new LogRecord(logLevel, string.Format(messageFunc(), formatParameters), exception); _logs.Add(record); return true; }
public HandoverInfo(LogRecord handoverRequestRecord) : this() { RequestLongitude = handoverRequestRecord.Longtitute; RequestLatitude = handoverRequestRecord.Lattitute; RequestTime = handoverRequestRecord.Time; PciBefore = handoverRequestRecord.Pci; EarfcnBefore = handoverRequestRecord.Earfcn; ENodebIdBefore = handoverRequestRecord.ENodebId; SectorIdBefore = handoverRequestRecord.SectorId; RsrpBefore = handoverRequestRecord.Rsrp; }
public void TestCoverageStat() { record = new LogRecord() { Pci = 111, Sinr = 12, Rsrp = -110, PdschTbCode0 = 12345, PdschTbCode1 = 23456, Longtitute = 112.1, Lattitute = 23.2, Time = DateTime.Parse("11:30:04"), ENodebId = 1, SectorId = 1, Earfcn =100 }; hRecord = new HugelandRecord() { Pci = 111, Sinr = 12, Rsrp = -95, Longtitute = 112.3, Lattitute = 23.4, PdschTbCode0 = 12345, PdschTbCode1 = 23456, Time = DateTime.Parse("2012-11-22 11:30:04"), ENodebId = 1, SectorId = 2, Earfcn = 1825 }; stat = new CoverageStat(); stat.Import(record); Assert.AreEqual(stat.Rsrp, -110); Assert.AreEqual(stat.Sinr, 12); Assert.AreEqual(stat.Longtitute, 112.1); Assert.AreEqual(stat.Lattitute, 23.2); Assert.AreEqual(stat.ENodebId, 1); Assert.AreEqual(stat.SectorId, 1); Assert.AreEqual(stat.Earfcn, 100); stat.Import(hRecord); Assert.AreEqual(stat.Rsrp, -95); Assert.AreEqual(stat.Sinr, 12); Assert.AreEqual(stat.Longtitute, 112.3); Assert.AreEqual(stat.Lattitute, 23.4); Assert.AreEqual(stat.ENodebId, 1); Assert.AreEqual(stat.SectorId, 2); Assert.AreEqual(stat.Earfcn, 1825); }
public void TestRateStat_2() { record = new LogRecord() { DlThroughput = 123567, UlMcs = 12, PdschScheduledSlots = 17, Time = DateTime.Parse("2012-11-22 11:30:04.221") }; hRecord = new HugelandRecord() { DlThroughput = 123567, UlMcs = 12 }; stat = new RateStat(); stat.Import(record); Assert.AreEqual(stat.DlThroughput, 123567); Assert.AreEqual(stat.UlMcs, 12); Assert.AreEqual(stat.Time.Second, 4); Assert.AreEqual(stat.Time.Millisecond, 221); stat.Import(hRecord); Assert.AreEqual(stat.DlThroughput, 123567); Assert.AreEqual(stat.UlMcs, 12); }
/// <summary> /// /// </summary> /// <param name="logHeader"></param> /// <param name="logBody"></param> /// <param name="stackfram"></param> /// <returns></returns> public override LogRecord FormatLogRecord(LogHeader logHeader, LogBody logBody, StackFrame stackfram) { var lr = new LogRecord(); var sb = new StringBuilder(); var seperator = TXTLogFactory.GetLogSeperator(logHeader); sb.Append(FormatTime(TXTLogFactory.GetLogGenTime(logHeader))); sb.Append(seperator); sb.Append(logHeader.LogName); sb.Append(seperator); sb.Append(logHeader.LogInfo); sb.Append(seperator); sb.Append(logHeader.LogType); sb.Append(seperator); sb.Append(logHeader.LogVersion); //2014-12-09 20:33:36:495|AdminService|WAP.WCF.HELPER|TEXT|1.0||||2014-12-09 20:33:36:495 999920 sb.Append(seperator); if (logHeader.LogTraceEnable) { var className = Path.GetFileName(LogManager.GetClassName(stackfram)); //VERSION:1.1.1.0 var methodName = LogManager.GetMethodName(stackfram); var lineNoumber = LogManager.GetLineNumber(stackfram); var colNumber = LogManager.GetColumnNumber(stackfram); sb.Append(className); sb.Append(seperator); sb.Append(methodName); sb.Append(seperator); sb.Append(lineNoumber); sb.Append(seperator); sb.Append(colNumber); sb.Append(seperator); } sb.Append(TXTLogFactory.GetLogBodyString(logBody)); lr.LogMessage = sb.ToString(); return lr; }
protected EventBase(LogRecord logRecord) { this.LogRecord = logRecord; }
public DbResult CreateDb() { var records = new LogRecord[_chunkRecs.Count][]; for (int i = 0; i < records.Length; ++i) { records[i] = new LogRecord[_chunkRecs[i].Length]; } var transactions = new Dictionary <int, TransactionInfo>(); var streams = new Dictionary <string, StreamInfo>(); var streamUncommitedVersion = new Dictionary <string, long>(); for (int i = 0; i < _chunkRecs.Count; ++i) { for (int j = 0; j < _chunkRecs[i].Length; ++j) { var rec = _chunkRecs[i][j]; TransactionInfo transInfo; bool transCreate = transactions.TryGetValue(rec.Transaction, out transInfo); if (!transCreate) { if (rec.Type == Rec.RecType.Commit) { throw new Exception("Commit for non-existing transaction."); } transactions[rec.Transaction] = transInfo = new TransactionInfo(rec.StreamId, rec.Id, rec.Id); streams[rec.StreamId] = new StreamInfo(-1); streamUncommitedVersion[rec.StreamId] = -1; } else { if (rec.Type == Rec.RecType.TransStart) { throw new Exception(string.Format("Unexpected record type: {0}.", rec.Type)); } } if (transInfo.StreamId != rec.StreamId) { throw new Exception(string.Format("Wrong stream id for transaction. Transaction StreamId: {0}, record StreamId: {1}.", transInfo.StreamId, rec.StreamId)); } if (rec.Type != Rec.RecType.Commit && transInfo.IsDelete) { throw new Exception("Transaction with records after delete record."); } if (rec.Type == Rec.RecType.Delete) { transInfo.IsDelete = true; } transInfo.LastPrepareId = rec.Id; } } for (int i = 0; i < _chunkRecs.Count; ++i) { var chunk = i == 0 ? _db.Manager.GetChunk(0) : _db.Manager.AddNewChunk(); _db.Config.WriterCheckpoint.Write(i * (long)_db.Config.ChunkSize); for (int j = 0; j < _chunkRecs[i].Length; ++j) { var rec = _chunkRecs[i][j]; var transInfo = transactions[rec.Transaction]; var logPos = _db.Config.WriterCheckpoint.ReadNonFlushed(); long streamVersion = streamUncommitedVersion[rec.StreamId]; if (streamVersion == -1 && rec.Type != Rec.RecType.TransStart && rec.Type != Rec.RecType.Prepare && rec.Type != Rec.RecType.Delete) { throw new Exception(string.Format("Stream {0} is empty.", rec.StreamId)); } if (streamVersion == EventNumber.DeletedStream && rec.Type != Rec.RecType.Commit) { throw new Exception(string.Format("Stream {0} was deleted, but we need to write some more prepares.", rec.StreamId)); } if (transInfo.FirstPrepareId == rec.Id) { transInfo.TransactionPosition = logPos; transInfo.TransactionEventNumber = streamVersion + 1; transInfo.TransactionOffset = 0; } LogRecord record; var expectedVersion = transInfo.FirstPrepareId == rec.Id ? streamVersion : ExpectedVersion.Any; switch (rec.Type) { case Rec.RecType.Prepare: { record = CreateLogRecord(rec, transInfo, logPos, expectedVersion); if (SystemStreams.IsMetastream(rec.StreamId)) { transInfo.StreamMetadata = rec.Metadata; } streamUncommitedVersion[rec.StreamId] += 1; break; } case Rec.RecType.Delete: { record = CreateLogRecord(rec, transInfo, logPos, expectedVersion); streamUncommitedVersion[rec.StreamId] = rec.Version == LogRecordVersion.LogRecordV0 ? int.MaxValue : EventNumber.DeletedStream; break; } case Rec.RecType.TransStart: case Rec.RecType.TransEnd: { record = CreateLogRecord(rec, transInfo, logPos, expectedVersion); break; } case Rec.RecType.Commit: { record = CreateLogRecord(rec, transInfo, logPos, expectedVersion); if (transInfo.StreamMetadata != null) { var streamId = SystemStreams.OriginalStreamOf(rec.StreamId); if (!streams.ContainsKey(streamId)) { streams.Add(streamId, new StreamInfo(-1)); } streams[streamId].StreamMetadata = transInfo.StreamMetadata; } if (transInfo.IsDelete) { streams[rec.StreamId].StreamVersion = EventNumber.DeletedStream; } else { streams[rec.StreamId].StreamVersion = transInfo.TransactionEventNumber + transInfo.TransactionOffset - 1; } break; } default: throw new ArgumentOutOfRangeException(); } var writerRes = chunk.TryAppend(record); if (!writerRes.Success) { throw new Exception(string.Format("Could not write log record: {0}", record)); } _db.Config.WriterCheckpoint.Write(i * (long)_db.Config.ChunkSize + writerRes.NewPosition); records[i][j] = record; } if (i < _chunkRecs.Count - 1 || (_completeLast && i == _chunkRecs.Count - 1)) { chunk.Complete(); } else { chunk.Flush(); } } return(new DbResult(_db, records, streams)); }
internal SuspendInstanceEvent(LogRecord logRecord) : base(logRecord) { Debug.Assert(IsSuspendInstanceEvent(logRecord)); }
protected abstract void SetRecordIdentifiers(LogRecord log);
public void WhenSeverityIsError_ThenFieldsAreExtracted() { var json = @" { 'protoPayload': { '@type': 'type.googleapis.com/google.cloud.audit.AuditLog', 'status': { 'code': 3, 'message': 'INVALID_ARGUMENT' }, 'authenticationInfo': { }, 'requestMetadata': { 'callerIp': '34.91.94.164', 'callerSuppliedUserAgent': 'google-cloud-sdk', 'requestAttributes': {}, 'destinationAttributes': {} }, 'serviceName': 'compute.googleapis.com', 'methodName': 'v1.compute.instances.insert', 'resourceName': 'projects/project-1/zones/us-central1-a/instances/instance-1', 'request': { '@type': 'type.googleapis.com/compute.instances.insert' } }, 'insertId': '-vwncp9d6006', 'resource': { 'type': 'gce_instance', 'labels': { 'zone': 'us-central1-a', 'instance_id': '11111111631960822', 'project_id': 'project-1' } }, 'timestamp': '2020-04-24T08:13:39.103Z', 'severity': 'ERROR', 'logName': 'projects/project-1/logs/cloudaudit.googleapis.com%2Factivity', 'operation': { 'id': 'operation-1587715943067-5a404ecca6fa4-dc7e343f-dbc3ca83', 'producer': 'compute.googleapis.com', 'last': true }, 'receiveTimestamp': '2020-04-24T08:13:40.134230447Z' }"; var r = LogRecord.Deserialize(json); Assert.IsTrue(InsertInstanceEvent.IsInsertInstanceEvent(r)); var e = (InsertInstanceEvent)r.ToEvent(); Assert.AreEqual(11111111631960822, e.InstanceId); Assert.AreEqual("instance-1", e.InstanceReference.Name); Assert.AreEqual("us-central1-a", e.InstanceReference.Zone); Assert.AreEqual("project-1", e.InstanceReference.ProjectId); Assert.AreEqual("ERROR", e.Severity); Assert.AreEqual(3, e.Status.Code); Assert.AreEqual("INVALID_ARGUMENT", e.Status.Message); Assert.AreEqual( new InstanceLocator("project-1", "us-central1-a", "instance-1"), e.InstanceReference); Assert.IsNull(e.Image); }
public void Save( LogRecord log ) { // create list of log items from the log record List<LogItem> LogItems = log.CreateLogItems(); // iterate and build the playback list float lasttime = 0.0f; Items = new List<PlaybackItem>(); foreach (LogItem item in LogItems ) { InteractLogItem logitem = item as InteractLogItem; // only record the non-scripted actions (player actions) if ( logitem != null && logitem.scripted.ToLower() == "false" ) { PlaybackItem playback = new PlaybackItem(); playback.Type = PlaybackItem.PlaybackType.interact; playback.InteractName = logitem.InteractName; playback.Character = logitem.param; playback.RealTime = logitem.time; playback.Args = logitem.args; // init delay to this one if unitialized if ( lasttime == 0.0f ) lasttime = logitem.time; // set delay to difference between this command and the last playback.Delay = logitem.time - lasttime; lasttime = logitem.time; // add it Items.Add(playback); } // get dialog button items DialogButtonItem dbi = item as DialogButtonItem; if ( dbi != null ) { PlaybackItem dbPlayback = new PlaybackItem(); dbPlayback.Type = PlaybackItem.PlaybackType.button; dbPlayback.Button = dbi.button; dbPlayback.Dialog = dbi.dialog; dbPlayback.RealTime = dbi.time; // init delay to this one if unitialized if ( lasttime == 0.0f ) lasttime = dbi.time; // set delay to difference between this command and the last dbPlayback.Delay = dbi.time - lasttime; lasttime = dbi.time; // add it Items.Add(dbPlayback); } } }
public void InfoMessage(LogRecord message) { _info.Add(message); }
public virtual bool PrepareRecord(LogRecord rec) {}
public virtual bool AbortRecord(LogRecord rec) {}
public virtual bool CommitRecord(LogRecord rec) {}
void OnGUI() { if ( ShowGUI == false ) return; GUILayout.BeginArea(new Rect(0,40,600,300)); if ( state != State.Playing ) { GUILayout.Box ("INTERACT PLAYBACK"); if ( GUILayout.Button ("Save") ) { // create a logrecord LogRecord record = new LogRecord(); record.Add(LogMgr.GetInstance().GetCurrent()); record.SetDateTime(); // create a playback file InteractPlaybackList list = new InteractPlaybackList(); list.Save(record); // now save this file list.SaveXML(null); } if ( GUILayout.Button ("Load") ) { debug = true; if ( state != State.Playing ) { // now create a playback list InteractPlaybackList list = new InteractPlaybackList(); list.LoadXML(null); // save start RestartWithList(list); //StartList(list); } else { UnityEngine.Debug.LogError("InteractPlaybackMgr.Update() : can't start new playback while one is in progress!!"); } } } else if ( state == State.Playing ) { GUILayout.Box ("INTERACT PLAYBACK : " + InteractPlaybackFileMgr.GetInstance().Filename); if ( GUILayout.Button ("Stop") ) { state = State.Complete; Time.timeScale = 1.0f; } GUILayout.BeginHorizontal(); if ( GUILayout.Button ("1x")) Time.timeScale = 1.0f; if ( GUILayout.Button ("2x")) Time.timeScale = 2.0f; if ( GUILayout.Button ("5x")) Time.timeScale = 5.0f; GUILayout.EndHorizontal(); } if ( state != State.Playing || currentIdx == -1 || debug == false ) { if ( restartTime != 0.0f && RestartOnDoneOrError == true) GUILayout.Button("Playback Complete, Restarting in " + ((int)(restartTime-elapsedTime)).ToString () + " seconds..."); GUILayout.EndArea(); return; } GUILayout.Space(10); if ( (currentIdx-1) >= 0 ) GUILayout.Label(" PB.curr=" + List.Items[currentIdx-1].Debug ());//InteractName + "," + List.Items[currentIdx].Character); GUILayout.Label(" PB.wait=" + List.Items[currentIdx].Debug ());//InteractName + "," + List.Items[currentIdx].Character); if ( (currentIdx+1) < List.Items.Count ) GUILayout.Label(" PB.next=" + List.Items[currentIdx+1].Debug ()); //InteractName + "," + List.Items[currentIdx+1].Character); GUILayout.EndArea(); }
protected override void WriteLogInternal(LogRecord record, string messageType, params object[] args) { try { LogEventInfo logEvent = LogEventInfo.Create(LogLevel.FromOrdinal((int)record.Severity), _logger.Name, record.Message); logEvent.Exception = record.Exception; logEvent.Properties.Add("EventDateTime", DateTime.UtcNow); logEvent.Properties.Add("AppDomainName", AppDomain.CurrentDomain.FriendlyName); logEvent.Properties.Add("LevelID", record.Severity.GetHashCode()); logEvent.Properties.Add("ApplicationName", _logger.Name); logEvent.Properties.Add("Title", record.Title); logEvent.Properties.Add("Category", record.Category); logEvent.Properties.Add("ReferenceID", record.ReferenceId); logEvent.Properties.Add("Identity", GetUserName()); logEvent.Properties.Add("MessageType", messageType); logEvent.Properties.Add("AlertCondition", record.AlertCondition); logEvent.Properties.Add("AdditionalInfo1", record.AdditionalInfo1); logEvent.Properties.Add("AdditionalInfo2", record.AdditionalInfo2); if (args != null && args.Length > 0) { logEvent.Parameters = args; } InternalLogger.Debug("*** Error level is enabled: " + _logger.IsErrorEnabled); InternalLogger.Debug("*** Logging: " + (LogLevel.FromOrdinal((int)record.Severity).Name + " - " + record.Title)); _logger.Log(logEvent); } catch (Exception ex) { Trace.WriteLine("Caught an exception in WriteLogInternal: " + ex.Message, "EazyWizy.Framework.Logging.NLog.NLogLogWriter"); InternalLogger.Error("*** Caught an exception in WriteLogInternal " + ex.ToString()); } }
public void UpdateCellInfoBefore(LogRecord record) { PciBefore = record.Pci; EarfcnBefore = record.Earfcn; ENodebIdBefore = record.ENodebId; SectorIdBefore = record.SectorId; }
public void UpdateCellInfoAfter(LogRecord record) { PciAfter = record.Pci; EarfcnAfter = record.Earfcn; ENodebIdAfter = record.ENodebId; SectorIdAfter = record.SectorId; }
public void Success(LogRecord successRecord) { HandoverSuccess = true; FinishedTime = successRecord.Time; FinishLongtitude = successRecord.Longtitute; FinishLatitude = successRecord.Lattitute; UpdateCellInfoAfter(successRecord); RsrpAfter = successRecord.Rsrp; }
public IActionResult PostTransaction([FromBody] WebAPIModel.Transaction transactionApi, string transactionIds) { LogRecord log = new LogRecord(); log.TimeStamp = DateTime.Now; log.Tag = "Transaction void"; log.Message = JsonConvert.SerializeObject(transactionApi); _context.LogRecord.Add(log); _context.SaveChanges(); APIResponse response = new APIResponse(); try { //List<Store> liststore = _context.Store.ToList(); //var idx1 = storeindex(transactionApi.storeCode, liststore); //Store store = liststore[idx1]; Store store = _context.Store.Where(c => c.Code == transactionApi.storeCode).First(); // coba StoreType storetype = _context.StoreType.Where(s => s.Id == store.StoreTypeId).FirstOrDefault(); // Models.Transaction transaction = new Models.Transaction(); Models.Transaction transaction = _context.Transaction.Where(x => x.TransactionId == transactionIds).First(); transaction.CustomerId = 1; try { Employee emp = _context.Employee.Where(c => c.EmployeeCode == transactionApi.employeeId).First(); transaction.EmployeeId = emp.Id; transaction.EmployeeCode = emp.EmployeeCode; transaction.EmployeeName = emp.EmployeeName; } catch { transaction.EmployeeId = 2; } transaction.MarginTransaction = 0; transaction.MethodOfPayment = transactionApi.paymentType.ToString(); transaction.Qty = 0; transaction.RecieptCode = transactionApi.receiptId; transaction.Spgid = 0; transaction.Text1 = transactionApi.spgId; transaction.StoreCode = transactionApi.storeCode; transaction.StoreId = store.Id; transaction.Status = transactionApi.status; try { transaction.CustomerCode = transactionApi.customerIdStore; } catch { transaction.CustomerCode = ""; } transaction.ClosingShiftId = transactionApi.openShiftId; transaction.ClosingStoreId = transactionApi.openStoreId; transaction.TransactionId = transactionApi.transactionId; transaction.TotalAmounTransaction = transactionApi.total; transaction.TotalDiscount = transactionApi.discount; transaction.TransactionId = transactionApi.transactionId; transaction.Cash = transactionApi.cash; transaction.Edc1 = transactionApi.Edc1; transaction.Edc2 = transactionApi.Edc2; transaction.Bank1 = transactionApi.Bank1; transaction.Bank2 = transactionApi.Bank2; transaction.NoRef1 = transactionApi.NoRef1; transaction.NoRef2 = transactionApi.NoRef2; transaction.Change = transactionApi.change; try { transaction.TransactionDate = DateTime.ParseExact(transactionApi.timeStamp, "MMM dd, yyyy h:mm:ss tt", CultureInfo.InvariantCulture); } catch { transaction.TransactionDate = DateTime.ParseExact(transactionApi.date + transactionApi.time, "yyyy-MM-dd" + "H:mm:ss", CultureInfo.InvariantCulture); } try { var tglclosing = _context.ClosingStore.OrderByDescending(x => x.ClosingTimeStamp).Where(x => x.StoreCode == transactionApi.storeCode).First().ClosingTimeStamp; if (tglclosing != DateTime.Now) { transaction.TransDateStore = tglclosing; } else { try { transaction.TransDateStore = DateTime.ParseExact(transactionApi.timeStamp, "MMM dd, yyyy h:mm:ss tt", CultureInfo.InvariantCulture); } catch { transaction.TransDateStore = DateTime.ParseExact(transactionApi.date + transactionApi.time, "yyyy-MM-dd" + "H:mm:ss", CultureInfo.InvariantCulture); } } } catch { } if (storetype.StoreInStore.Value == true) { transaction.TransactionType = Config.RetailEnum.transactionStoreinStore; //true } else { transaction.TransactionType = Config.RetailEnum.transactionStore; //false } try { bool employeeInMaster = _context.Employee.Any(c => c.EmployeeCode == transactionApi.customerId); if (employeeInMaster) { transaction.Text2 = transactionApi.customerId; transaction.TransactionType = Config.RetailEnum.transactionEmployee; } } catch { } //if (transaction.StoreCode.Equals("MBA") ) //{ // transaction.TransactionType = Config.RetailEnum.transactionStoreinStore; //} //else //{ // transaction.TransactionType = Config.RetailEnum.transactionStore; //} if (!ModelState.IsValid) { return(BadRequest(ModelState)); } _context.Transaction.Update(transaction); _context.SaveChanges(); // sequenceNumber(transactionApi); //log record //save Lines for (int i = 0; i < transactionApi.transactionLines.Count; i++) { Models.TransactionLines transactionLines = new Models.TransactionLines(); transactionLines.TransactionId = transaction.Id; transactionLines.ArticleId = transactionApi.transactionLines[i].article.articleId; transactionLines.ArticleIdAlias = transactionApi.transactionLines[i].article.articleIdAlias; transactionLines.ArticleName = transactionApi.transactionLines[i].article.articleName; transactionLines.UnitPrice = transactionApi.transactionLines[i].price; transactionLines.Amount = transactionApi.transactionLines[i].subtotal; transactionLines.Discount = transactionApi.transactionLines[i].discount; transactionLines.DiscountCode = transactionApi.transactionLines[i].discountCode; transactionLines.DiscountType = transactionApi.transactionLines[i].discountType; transactionLines.Qty = transactionApi.transactionLines[i].quantity; transactionLines.Spgid = transactionApi.transactionLines[i].spgId; _context.TransactionLines.Add(transactionLines); _context.SaveChanges(); //7k ms } // _context.SaveChanges(); //5k ms response.code = "1"; response.message = "Sucess Add Data"; inventory(transactionApi).Wait(); // inventory(transactionApi).ConfigureAwait(false); sequenceNumber(transactionApi); //add for infor if (transaction.TransactionType == Config.RetailEnum.transactionStoreinStore) { // WebAPIInforController.InforAPITransactionInStoreController inforAPIController = new WebAPIInforController.InforAPITransactionInStoreController(_context); // inforAPIController.AddBatchHead(transactionApi, transaction.Id).Wait(); } else { // WebAPIInforController.InforAPIController inforAPIController = new WebAPIInforController.InforAPIController(_context); // inforAPIController.postOrder(transactionApi, transaction.Id).Wait(); } //coba //for (int i = 0; i < transactionApi.transactionLines.Count; i++) //{ //} } catch (Exception ex) { response.code = "0"; response.message = ex.ToString(); } return(Ok(response)); // return CreatedAtAction("GetTransaction", new { id = transaction.Id }, transaction); }
// create a playback list from a log file public void Load(LogRecord log) { List = new InteractPlaybackList(); List.Save(log); currentIdx = 0; elapsedTime = StartTime; // init state state = State.Init; }
public IEnumerable<LogRecord> LoadRecordsFromFile(FileNode fileNode) { Argument.IsNotNull(() => fileNode); FileStream stream; Log.Debug("Loading records file file '{0}'", fileNode); try { stream = new FileStream(fileNode.FileInfo.FullName, FileMode.Open, FileAccess.Read); } catch (IOException ex) { Log.Warning(ex, "Failed to load records from file '{0}'", fileNode); yield break; } int counter = 0; using (stream) { using (var reader = new StreamReader(stream)) { string line; LogRecord record = null; while ((line = reader.ReadLine()) != null) { if (LogRecordPattern.IsMatch(line)) { if (record != null) { yield return record; } record = new LogRecord { Position = counter++, FileNode = fileNode, DateTime = ExtractDateTime(ref line) }; if (fileNode.IsUnifyNamed && record.DateTime.Date == DateTime.MinValue.Date) { record.DateTime = fileNode.DateTime.Date + record.DateTime.TimeOfDay; } record.LogEvent = ExtractLogEventType(ref line); record.TargetTypeName = ExtractTargetTypeName(ref line); record.ThreadId = ExtractThreadId(ref line); record.Message = line; } else { AppendMessageLine(record, line); } } if (record != null) { yield return record; } } } Log.Info("Read '{0}' records from file '{1}'", counter, fileNode); }
// Send a log record during the prepare phase. public virtual bool PrepareRecord(LogRecord rec) { // Nothing to do in the base class. return false; }
///<exclude/> public bool Equals(LogRecord other) { if (ReferenceEquals(null, other)) return false; if (ReferenceEquals(this, other)) return true; return other._Time.Equals(_Time) && other._Loggername == (_Loggername) && other._Level.Equals(_Level) && other._Message == (_Message); }
/// <summary> /// 该方法并没有用 /// </summary> public override void StartRecv() { this.LastRecvTime = TimeUitls.Now(); SetKcpSendTime(); while (true) { int n = kcp.PeekSize(); if (n == 0) { //LogRecord.Log(LogLevel.Error, "StartRecv", $"解包失败:{this.RemoteEndPoint}"); return; } int count = this.kcp.Recv(cacheBytes, 0, cacheBytes.Length); if (count <= 0) { return; } RecvParser.WriteBuffer(cacheBytes, 0, count); while (true) { try { var packet = RecvParser.ReadBuffer(); if (!packet.IsSuccess) { break; } if (!packet.IsHeartbeat) { //LogRecord.Log(LogLevel.Error, "StartRecv", $"收到远程电脑:{this.RemoteEndPoint}"); if (packet.IsRpc) { if (RpcDictionarys.TryRemove(packet.RpcId, out Action <Packet> action)) { //执行RPC请求回调 action(packet); } else { OnReceive?.Invoke(packet); } } else { OnReceive?.Invoke(packet); } } else { #if DEBUG LogRecord.Log(LogLevel.Warn, "HandleRecv", $"接收到客户端:{this.RemoteEndPoint}心跳包."); #endif } } catch (Exception e) { DisConnect(); #if DEBUG LogRecord.Log(LogLevel.Warn, "StartRecv", e); #endif return; } } } }
public void WhenSeverityIsNotice_ThenFieldsAreExtracted() { var json = @" { 'protoPayload': { '@type': 'type.googleapis.com/google.cloud.audit.AuditLog', 'authenticationInfo': { }, 'requestMetadata': { }, 'serviceName': 'compute.googleapis.com', 'methodName': 'v1.compute.instances.insert', 'authorizationInfo': [ ], 'resourceName': 'projects/111/zones/us-central1-a/instances/instance-1', 'request': { 'name': 'instance-group-1-xbtt', 'machineType': 'projects/111/zones/us-central1-a/machineTypes/n1-standard-4', 'canIpForward': false, 'networkInterfaces': [ { 'network': 'projects/111/global/networks/default', 'accessConfigs': [ { 'type': 'ONE_TO_ONE_NAT', 'name': 'External NAT', 'networkTier': 'PREMIUM' } ], 'subnetwork': 'projects/111/regions/us-central1/subnetworks/default' } ], 'disks': [ { 'type': 'PERSISTENT', 'mode': 'READ_WRITE', 'deviceName': 'instance-1', 'boot': true, 'initializeParams': { 'sourceImage': 'projects/project-1/global/images/image-1', 'diskSizeGb': '127', 'diskType': 'projects/111/zones/us-central1-a/diskTypes/pd-standard' }, 'autoDelete': true } ], 'serviceAccounts': [ { 'email': '*****@*****.**', 'scopes': [ 'https://www.googleapis.com/auth/devstorage.read_only', 'https://www.googleapis.com/auth/logging.write', 'https://www.googleapis.com/auth/monitoring.write', 'https://www.googleapis.com/auth/servicecontrol', 'https://www.googleapis.com/auth/service.management.readonly', 'https://www.googleapis.com/auth/trace.append' ] } ], 'scheduling': { 'onHostMaintenance': 'TERMINATE', 'automaticRestart': false, 'preemptible': false, 'nodeAffinitys': [ { 'key': 'license', 'operator': 'IN', 'values': [ 'byol' ] } ] }, 'displayDevice': { 'enableDisplay': false }, 'links': [ { 'target': 'projects/111/locations/us-central1-a/instances/instance-group-1-xbtt', 'type': 'MEMBER_OF', 'source': 'projects/111/locations/us-central1-a/instanceGroupManagers/instance-group-1@3579973466633327805' } ], 'requestId': '4a68f20d-9f80-32f3-adc4-acf842d7ae0b', '@type': 'type.googleapis.com/compute.instances.insert' }, 'response': { 'id': '5042353291971988238', 'name': 'operation-1588508129141-5a4bd5ec2a16d-418ba83e-11fc353d', 'zone': 'https://www.googleapis.com/compute/v1/projects/project-1/zones/us-central1-a', 'clientOperationId': '4a68f20d-9f80-32f3-adc4-acf842d7ae0b', 'operationType': 'insert', 'targetLink': 'https://www.googleapis.com/compute/v1/projects/project-1/zones/us-central1-a/instances/instance-group-1-xbtt', 'targetId': '518436304627895054', 'status': 'RUNNING', 'user': '******', 'progress': '0', 'insertTime': '2020-05-03T05:15:29.813-07:00', 'startTime': '2020-05-03T05:15:29.817-07:00', 'selfLink': 'https://www.googleapis.com/compute/v1/projects/project-1/zones/us-central1-a/operations/operation-1588508129141-5a4bd5ec2a16d-418ba83e-11fc353d', 'selfLinkWithId': 'https://www.googleapis.com/compute/v1/projects/project-1/zones/us-central1-a/operations/5042353291971988238', '@type': 'type.googleapis.com/operation' }, 'resourceLocation': { 'currentLocations': [ 'us-central1-a' ] } }, 'insertId': '3vuqdhe1iqbu', 'resource': { 'type': 'gce_instance', 'labels': { 'zone': 'us-central1-a', 'instance_id': '11111111631960822', 'project_id': 'project-1' } }, 'timestamp': '2020-05-03T12:15:29.009Z', 'severity': 'NOTICE', 'logName': 'projects/project-1/logs/cloudaudit.googleapis.com%2Factivity', 'operation': { 'id': 'operation-1588508129141-5a4bd5ec2a16d-418ba83e-11fc353d', 'producer': 'compute.googleapis.com', 'first': true }, 'receiveTimestamp': '2020-05-03T12:15:30.903794912Z' } "; var r = LogRecord.Deserialize(json); Assert.IsTrue(InsertInstanceEvent.IsInsertInstanceEvent(r)); var e = (InsertInstanceEvent)r.ToEvent(); Assert.AreEqual(11111111631960822, e.InstanceId); Assert.AreEqual("instance-1", e.InstanceReference.Name); Assert.AreEqual("us-central1-a", e.InstanceReference.Zone); Assert.AreEqual("project-1", e.InstanceReference.ProjectId); Assert.AreEqual("NOTICE", e.Severity); Assert.IsNull(e.Status); Assert.AreEqual( new InstanceLocator("project-1", "us-central1-a", "instance-1"), e.InstanceReference); Assert.AreEqual( new ImageLocator("project-1", "image-1"), e.Image); }
protected bool TryReadBackwardInternal(ReaderWorkItem workItem, long actualPosition, out int length, out LogRecord record) { length = -1; record = null; if (actualPosition < 2 * sizeof(int)) // no space even for length prefix and suffix { return(false); } var realPos = GetRawPosition(actualPosition); workItem.Stream.Position = realPos - sizeof(int); length = workItem.Reader.ReadInt32(); if (length <= 0) { throw new InvalidReadException( string.Format("Log record that ends at actual pos {0} has non-positive length: {1}. " + "In chunk {2}.", actualPosition, length, Chunk)); } if (length > TFConsts.MaxLogRecordSize) { throw new ArgumentException( string.Format("Log record that ends at actual pos {0} has too large length: {1} bytes, " + "while limit is {2} bytes. In chunk {3}.", actualPosition, length, TFConsts.MaxLogRecordSize, Chunk)); } if (actualPosition < length + 2 * sizeof(int)) // no space for record + length prefix and suffix { throw new UnableToReadPastEndOfStreamException( string.Format("There is not enough space to read full record (length suffix: {0}). " + "Actual post-position: {1}. Something is seriously wrong in chunk {2}.", length, actualPosition, Chunk)); } workItem.Stream.Position = realPos - length - 2 * sizeof(int); // verify suffix length == prefix length int prefixLength = workItem.Reader.ReadInt32(); if (prefixLength != length) { throw new Exception( string.Format("Prefix/suffix length inconsistency: prefix length({0}) != suffix length ({1})" + "Actual post-position: {2}. Something is seriously wrong in chunk {3}.", prefixLength, length, actualPosition, Chunk)); } record = LogRecord.ReadFrom(workItem.Reader); return(true); }
public override async Task TestFixtureSetUp() { await base.TestFixtureSetUp(); _logFormat = LogFormatHelper <TLogFormat, TStreamId> .LogFormatFactory.Create(new() { IndexDirectory = GetFilePathFor("index"), }); _db = new TFChunkDb(TFChunkHelper.CreateSizedDbConfig(PathName, 0, chunkSize: 16 * 1024)); _db.Open(); var chunk = _db.Manager.GetChunkFor(0); var streamName = "es-to-scavenge"; var pos = 0L; _logFormat.StreamNameIndex.GetOrReserve(_logFormat.RecordFactory, streamName, 0, out var streamId, out var streamRecord); if (streamRecord is not null) { var res = chunk.TryAppend(streamRecord); pos = res.NewPosition; } var expectedVersion = ExpectedVersion.NoStream; _p1 = LogRecord.SingleWrite(_logFormat.RecordFactory, pos, Guid.NewGuid(), Guid.NewGuid(), streamId, expectedVersion++, "et1", new byte[2048], new byte[] { 5, 7 }); _res1 = chunk.TryAppend(_p1); _c1 = LogRecord.Commit(_res1.NewPosition, Guid.NewGuid(), _p1.LogPosition, 0); _cres1 = chunk.TryAppend(_c1); _p2 = LogRecord.SingleWrite(_logFormat.RecordFactory, _cres1.NewPosition, Guid.NewGuid(), Guid.NewGuid(), streamId, expectedVersion++, "et1", new byte[2048], new byte[] { 5, 7 }); _res2 = chunk.TryAppend(_p2); _c2 = LogRecord.Commit(_res2.NewPosition, Guid.NewGuid(), _p2.LogPosition, 1); _cres2 = chunk.TryAppend(_c2); _p3 = LogRecord.SingleWrite(_logFormat.RecordFactory, _cres2.NewPosition, Guid.NewGuid(), Guid.NewGuid(), streamId, expectedVersion++, "et1", new byte[2048], new byte[] { 5, 7 }); _res3 = chunk.TryAppend(_p3); _c3 = LogRecord.Commit(_res3.NewPosition, Guid.NewGuid(), _p3.LogPosition, 2); _cres3 = chunk.TryAppend(_c3); chunk.Complete(); _originalFileSize = chunk.FileSize; _db.Config.WriterCheckpoint.Write(chunk.ChunkHeader.ChunkEndPosition); _db.Config.WriterCheckpoint.Flush(); _db.Config.ChaserCheckpoint.Write(chunk.ChunkHeader.ChunkEndPosition); _db.Config.ChaserCheckpoint.Flush(); var scavenger = new TFChunkScavenger <TStreamId>(_db, new FakeTFScavengerLog(), new FakeTableIndex <TStreamId>(), new FakeReadIndex <TLogFormat, TStreamId>(x => EqualityComparer <TStreamId> .Default.Equals(x, streamId), _logFormat.Metastreams), _logFormat.Metastreams); await scavenger.Scavenge(alwaysKeepScavenged : true, mergeChunks : false); _scavengedChunk = _db.Manager.GetChunk(0); }
internal TerminateOnHostMaintenanceEvent(LogRecord logRecord) : base(logRecord) { Debug.Assert(IsTerminateOnHostMaintenanceEvent(logRecord)); }
private LogRecord CreateLogRecord(Rec rec, TransactionInfo transInfo, long logPos, long expectedVersion) { switch (rec.Type) { case Rec.RecType.Prepare: { int transOffset = transInfo.TransactionOffset; transInfo.TransactionOffset += 1; if (rec.Version == LogRecordVersion.LogRecordV0) { return(CreateLogRecordV0(rec, transInfo, transOffset, logPos, expectedVersion, rec.Metadata == null ? rec.Id.ToByteArray() : FormatRecordMetadata(rec), PrepareFlags.Data | (transInfo.FirstPrepareId == rec.Id ? PrepareFlags.TransactionBegin : PrepareFlags.None) | (transInfo.LastPrepareId == rec.Id ? PrepareFlags.TransactionEnd : PrepareFlags.None) | (rec.Metadata == null ? PrepareFlags.None : PrepareFlags.IsJson))); } return(LogRecord.Prepare(logPos, Guid.NewGuid(), rec.Id, transInfo.TransactionPosition, transOffset, rec.StreamId, expectedVersion, rec.PrepareFlags | (transInfo.FirstPrepareId == rec.Id ? PrepareFlags.TransactionBegin : PrepareFlags.None) | (transInfo.LastPrepareId == rec.Id ? PrepareFlags.TransactionEnd : PrepareFlags.None) | (rec.Metadata == null ? PrepareFlags.None : PrepareFlags.IsJson), rec.EventType, rec.Metadata == null ? rec.Id.ToByteArray() : FormatRecordMetadata(rec), null, rec.TimeStamp)); } case Rec.RecType.Delete: { int transOffset = transInfo.TransactionOffset; transInfo.TransactionOffset += 1; if (rec.Version == LogRecordVersion.LogRecordV0) { return(CreateLogRecordV0(rec, transInfo, transOffset, logPos, expectedVersion, LogRecord.NoData, PrepareFlags.StreamDelete | (transInfo.FirstPrepareId == rec.Id ? PrepareFlags.TransactionBegin : PrepareFlags.None) | (transInfo.LastPrepareId == rec.Id ? PrepareFlags.TransactionEnd : PrepareFlags.None))); } return(LogRecord.Prepare(logPos, Guid.NewGuid(), rec.Id, transInfo.TransactionPosition, transOffset, rec.StreamId, expectedVersion, PrepareFlags.StreamDelete | (transInfo.FirstPrepareId == rec.Id ? PrepareFlags.TransactionBegin : PrepareFlags.None) | (transInfo.LastPrepareId == rec.Id ? PrepareFlags.TransactionEnd : PrepareFlags.None), rec.EventType, LogRecord.NoData, null, rec.TimeStamp)); } case Rec.RecType.TransStart: case Rec.RecType.TransEnd: { if (rec.Version == LogRecordVersion.LogRecordV0) { return(CreateLogRecordV0(rec, transInfo, -1, logPos, expectedVersion, LogRecord.NoData, (transInfo.FirstPrepareId == rec.Id ? PrepareFlags.TransactionBegin : PrepareFlags.None) | (transInfo.LastPrepareId == rec.Id ? PrepareFlags.TransactionEnd : PrepareFlags.None))); } return(LogRecord.Prepare(logPos, Guid.NewGuid(), rec.Id, transInfo.TransactionPosition, -1, rec.StreamId, expectedVersion, (transInfo.FirstPrepareId == rec.Id ? PrepareFlags.TransactionBegin : PrepareFlags.None) | (transInfo.LastPrepareId == rec.Id ? PrepareFlags.TransactionEnd : PrepareFlags.None), rec.EventType, LogRecord.NoData, null, rec.TimeStamp)); } case Rec.RecType.Commit: { if (rec.Version == LogRecordVersion.LogRecordV0) { return(new CommitLogRecord(logPos, Guid.NewGuid(), transInfo.TransactionPosition, DateTime.UtcNow, transInfo.TransactionEventNumber, LogRecordVersion.LogRecordV0)); } return(LogRecord.Commit(logPos, Guid.NewGuid(), transInfo.TransactionPosition, transInfo.TransactionEventNumber)); } default: throw new ArgumentOutOfRangeException(); } }
public async Task LogFilteringTest() { // add a test log record var utcnow = DateTime.UtcNow.Date; var elasticLogStore = new ElasticSearchLogStore(() => utcnow); const string appPath = "c:\\###rather_not_existing_application_path2###"; var logrec = new LogRecord { LoggerName = "TestLogger", ApplicationPath = appPath, LogLevel = LogRecord.ELogLevel.Error, TimeUtc = DateTime.UtcNow, ProcessId = -1, ThreadId = 456, Server = "TestServer", Identity = "TestIdentity", CorrelationId = Guid.NewGuid().ToString(), Message = "Test log message to store in the log", ExceptionMessage = "Test exception log message", ExceptionType = "TestException", ExceptionAdditionalInfo = "Additinal info for the test exception", AdditionalFields = new Dictionary <string, Object> { { "Host", "testhost.com" }, { "LoggedUser", "testloggeduser" }, { "HttpStatusCode", "200.1" }, { "Url", "http://testhost.com" }, { "Referer", "http://prevtesthost.com" }, { "ClientIP", null }, { "RequestData", "test test test" }, { "ResponseData", null }, { "ServiceName", "TestService" }, { "ServiceDisplayName", "Test service generating logs" }, { "NotExisting", null } }, PerformanceData = new Dictionary <string, float> { { "CPU", 2.0f }, { "Memory", 20000000f } } }; // add log await elasticLogStore.AddLogRecordAsync(logrec); // give it 2s to index await Task.Delay(2000); // check content var searchResults = await elasticLogStore.FilterLogsAsync(new LogSearchCriteria { FromUtc = DateTime.UtcNow.AddMinutes(-10), ToUtc = DateTime.UtcNow.AddMinutes(10), ApplicationPath = appPath, Levels = new[] { LogRecord.ELogLevel.Error, LogRecord.ELogLevel.Info }, Limit = 10, Offset = 0, Server = "TestServer", Keywords = new KeywordsParsed { FreeText = "test exception" } }); Assert.NotNull(searchResults.FoundItems); var foundItems = searchResults.FoundItems.ToArray(); Assert.True(foundItems.Length == 1); var logrec2 = foundItems[0]; Assert.Equal(logrec.LoggerName, logrec2.LoggerName); Assert.Equal(logrec.ApplicationPath, logrec2.ApplicationPath); Assert.Equal(logrec.LogLevel, logrec2.LogLevel); Assert.Equal(logrec.TimeUtc.ToShortDateString(), logrec2.TimeUtc.ToShortDateString()); Assert.Equal(logrec.ProcessId, logrec2.ProcessId); Assert.Equal(logrec.ThreadId, logrec2.ThreadId); Assert.Equal(logrec.Server, logrec2.Server); Assert.Equal(logrec.Identity, logrec2.Identity); Assert.Equal(logrec.CorrelationId, logrec2.CorrelationId); Assert.Equal(logrec.Message, logrec2.Message); Assert.Equal(logrec.ExceptionMessage, logrec2.ExceptionMessage); Assert.Equal(logrec.ExceptionType, logrec2.ExceptionType); Assert.Equal(logrec.ExceptionAdditionalInfo, logrec2.ExceptionAdditionalInfo); Assert.Equal(logrec.AdditionalFields["Host"], logrec2.AdditionalFields["Host"]); Assert.Equal(logrec.AdditionalFields["LoggedUser"], logrec2.AdditionalFields["LoggedUser"]); Assert.Equal(logrec.AdditionalFields["HttpStatusCode"], logrec2.AdditionalFields["HttpStatusCode"]); Assert.Equal(logrec.AdditionalFields["Url"], logrec2.AdditionalFields["Url"]); Assert.Equal(logrec.AdditionalFields["Referer"], logrec2.AdditionalFields["Referer"]); Assert.Equal(logrec.AdditionalFields["RequestData"], logrec2.AdditionalFields["RequestData"]); Assert.Equal(logrec.AdditionalFields["ServiceName"], logrec2.AdditionalFields["ServiceName"]); Assert.Equal(logrec.AdditionalFields["ServiceDisplayName"], logrec2.AdditionalFields["ServiceDisplayName"]); }
private static bool IsCommitAlike(LogRecord rec) { return(rec.RecordType == LogRecordType.Commit || (rec.RecordType == LogRecordType.Prepare && ((PrepareLogRecord)rec).Flags.HasAnyOf(PrepareFlags.IsCommitted))); }
public IEnumerator SaveBackground() { /* moved this block into thread // create a logrecord LogRecord record = new LogRecord(); record.Add(LogMgr.GetInstance().GetCurrent()); record.SetDateTime(); // create a playback file InteractPlaybackList list = new InteractPlaybackList(); list.Save(record); // now save this file list.SaveXML(FullPath); */ yield return null; LogRecord record = new LogRecord(); record.Add(LogMgr.GetInstance().GetCurrent()); record.SetDateTime(); while (savingFile) // to prevent other threads yield return new WaitForSeconds(0.5f); savingFile = true; ThreadedSavePlayback saveThread = new ThreadedSavePlayback(); saveThread.filePath = FullPath; saveThread.record = record; Thread saveCallThread = new Thread(saveThread.StartSaveCall); saveCallThread.Start(); while (!saveThread.ThreadComplete) yield return new WaitForSeconds(.1f); // while (saveThread.ThreadRunning) // yield return new WaitForSeconds(.5f); saveCallThread.Abort(); saveCallThread.Join(); savingFile = false; yield return null; }
public bool Verify(LogRecord record) { return(record.Level >= MinimumLevel); }
public void Fail(LogRecord failRecord) { HandoverSuccess = false; FinishedTime = failRecord.Time; FinishLongtitude = failRecord.Longtitute; FinishLatitude = failRecord.Lattitute; UpdateCellInfoAfter(failRecord); }
public Task WriteEvent(LogRecord record) { Console.WriteLine(record.Message); return(Task.FromResult(default(object))); }
public IAsyncResult BeginMakeStable(LogRecord record, AsyncCallback callback, object asyncState) { myDelegate del = new myDelegate(_makeStable); return(del.BeginInvoke(record, callback, asyncState)); }
public void a_record_is_not_written_at_first_but_written_on_second_try() { var filename1 = GetFilePathFor("chunk-000000.000000"); var filename2 = GetFilePathFor("chunk-000001.000000"); var chunkHeader = new ChunkHeader(TFChunk.CurrentChunkVersion, 10000, 0, 0, false, Guid.NewGuid()); var chunkBytes = chunkHeader.AsByteArray(); var bytes = new byte[ChunkHeader.Size + 10000 + ChunkFooter.Size]; Buffer.BlockCopy(chunkBytes, 0, bytes, 0, chunkBytes.Length); File.WriteAllBytes(filename1, bytes); _checkpoint = new InMemoryCheckpoint(0); var db = new TFChunkDb(TFChunkHelper.CreateDbConfig(PathName, _checkpoint, new InMemoryCheckpoint())); db.Open(); var tf = new TFChunkWriter(db); long pos; var record1 = new PrepareLogRecord(logPosition: 0, correlationId: _correlationId, eventId: _eventId, expectedVersion: 1234, transactionPosition: 0, transactionOffset: 0, eventStreamId: "WorldEnding", timeStamp: new DateTime(2012, 12, 21), flags: PrepareFlags.None, eventType: "type", data: new byte[] { 1, 2, 3, 4, 5 }, metadata: new byte[8000]); Assert.IsTrue(tf.Write(record1, out pos)); // almost fill up first chunk var record2 = new PrepareLogRecord(logPosition: pos, correlationId: _correlationId, eventId: _eventId, expectedVersion: 1234, transactionPosition: pos, transactionOffset: 0, eventStreamId: "WorldEnding", timeStamp: new DateTime(2012, 12, 21), flags: PrepareFlags.None, eventType: "type", data: new byte[] { 1, 2, 3, 4, 5 }, metadata: new byte[8000]); Assert.IsFalse(tf.Write(record2, out pos)); // chunk has too small space var record3 = new PrepareLogRecord(logPosition: pos, correlationId: _correlationId, eventId: _eventId, expectedVersion: 1234, transactionPosition: pos, transactionOffset: 0, eventStreamId: "WorldEnding", timeStamp: new DateTime(2012, 12, 21), flags: PrepareFlags.None, eventType: "type", data: new byte[] { 1, 2, 3, 4, 5 }, metadata: new byte[2000]); Assert.IsTrue(tf.Write(record3, out pos)); tf.Close(); db.Dispose(); Assert.AreEqual(record3.GetSizeWithLengthPrefixAndSuffix() + 10000, _checkpoint.Read()); using (var filestream = File.Open(filename2, FileMode.Open, FileAccess.Read)) { filestream.Seek(ChunkHeader.Size + sizeof(int), SeekOrigin.Begin); var reader = new BinaryReader(filestream); var read = LogRecord.ReadFrom(reader); Assert.AreEqual(record3, read); } }
public void LogSync(LogRecord logRecord) { var message = $"{logRecord.TimeStamp:HH:mm:ss.ffff} | {logRecord.Message}"; Console.WriteLine(message); }
public static bool IsTerminateOnHostMaintenanceEvent(LogRecord record) { return(record.IsSystemEvent && record.ProtoPayload.MethodName == Method); }
public void OnLog(LogRecord logRecord) { _logger.LogClientLib(logRecord.message); }
public async Task TestAddLogRecord() { var utcnow = DateTime.UtcNow.Date; var elasticLogStore = new ElasticSearchLogStore(() => utcnow); const string appPath = "c:\\###rather_not_existing_application_path###"; var hash = BitConverter.ToString(MD5.Create().ComputeHash(Encoding.UTF8.GetBytes(appPath))).Replace("-", string.Empty); var logrec = new LogRecord { LoggerName = "TestLogger", ApplicationPath = appPath, LogLevel = LogRecord.ELogLevel.Error, TimeUtc = DateTime.UtcNow, ProcessId = -1, ThreadId = 456, Server = "TestServer", Identity = "TestIdentity", CorrelationId = Guid.NewGuid().ToString(), Message = "Test log message to store in the log", ExceptionMessage = "Test exception log message", ExceptionType = "TestException", ExceptionAdditionalInfo = "Additinal info for the test exception", AdditionalFields = new Dictionary <string, Object> { { "Host", "testhost.com" }, { "LoggedUser", "testloggeduser" }, { "HttpStatusCode", "200.1" }, { "Url", "http://testhost.com" }, { "Referer", "http://prevtesthost.com" }, { "ClientIP", null }, { "RequestData", "test test test" }, { "ResponseData", null }, { "ServiceName", "TestService" }, { "ServiceDisplayName", "Test service generating logs" }, { "NotExisting", null } }, PerformanceData = new Dictionary <string, float> { { "CPU", 2.0f }, { "Memory", 20000000f } } }; // add log await elasticLogStore.AddLogRecordAsync(logrec); var lim = new LogIndexManager(client, () => utcnow); // check if index was created var ir = await client.IndexExistsAsync(lim.GetCurrentIndexName()); Assert.Equal(true, ir.Exists); // give it 2s to index await Task.Delay(2000); var res = await client.SearchAsync <ElasticLogRecord>(s => s.Query(f => f.Term(lr => lr.ProcessId, -1))); Assert.Equal(1L, res.Total); var dbLogRec = res.Hits.First().Source; // check logs content Assert.Equal(logrec.LoggerName, dbLogRec.LoggerName); Assert.Equal(logrec.ApplicationPath, dbLogRec.ApplicationPath); Assert.Equal(Enum.GetName(typeof(LogRecord.ELogLevel), logrec.LogLevel), dbLogRec.LogLevel); Assert.Equal(logrec.TimeUtc.ToShortDateString(), dbLogRec.TimeUtc.ToShortDateString()); Assert.Equal(logrec.ProcessId, dbLogRec.ProcessId); Assert.Equal(logrec.ThreadId, dbLogRec.ThreadId); Assert.Equal(logrec.Server, dbLogRec.Server); Assert.Equal(logrec.Identity, dbLogRec.Identity); Assert.Equal(logrec.CorrelationId, dbLogRec.CorrelationId); Assert.Equal(logrec.Message, dbLogRec.Message); Assert.Equal(logrec.ExceptionMessage, dbLogRec.ExceptionMessage); Assert.Equal(logrec.ExceptionType, dbLogRec.ExceptionType); Assert.Equal(logrec.ExceptionAdditionalInfo, dbLogRec.ExceptionAdditionalInfo); Assert.Equal((string)logrec.AdditionalFields["Host"], dbLogRec.Host); Assert.Equal((string)logrec.AdditionalFields["LoggedUser"], dbLogRec.LoggedUser); Assert.Equal((string)logrec.AdditionalFields["HttpStatusCode"], dbLogRec.HttpStatusCode); Assert.Equal((string)logrec.AdditionalFields["Url"], dbLogRec.Url); Assert.Equal((string)logrec.AdditionalFields["Referer"], dbLogRec.Referer); Assert.Equal((string)logrec.AdditionalFields["ClientIP"], dbLogRec.ClientIP); Assert.Equal((string)logrec.AdditionalFields["RequestData"], dbLogRec.RequestData); Assert.Equal((string)logrec.AdditionalFields["ResponseData"], dbLogRec.ResponseData); Assert.Equal((string)logrec.AdditionalFields["ServiceName"], dbLogRec.ServiceName); Assert.Equal((string)logrec.AdditionalFields["ServiceDisplayName"], dbLogRec.ServiceDisplayName); var dbPerfLogs = dbLogRec.PerfData; Assert.True(dbPerfLogs.Count == 2); float r; Assert.True(dbPerfLogs.TryGetValue("CPU", out r)); Assert.Equal(r, logrec.PerformanceData["CPU"]); Assert.True(dbPerfLogs.TryGetValue("Memory", out r)); Assert.Equal(r, logrec.PerformanceData["Memory"]); res = await client.SearchAsync <ElasticLogRecord>(s => s.Query(f => f.Term(lr => lr.ExceptionType, "test"))); Assert.Equal(1L, res.Total); dbLogRec = res.Hits.First().Source; Assert.Equal("TestException", dbLogRec.ExceptionType); }
private static bool ContainStackTraceForClassHierarchy(LogRecord logRecord, string expectedStackTrace) { return(logRecord.Body.StringValue.Contains(expectedStackTrace)); }
internal DeleteInstanceEvent(LogRecord logRecord) : base(logRecord) { Debug.Assert(IsDeleteInstanceEvent(logRecord)); }
public static bool IsDeleteInstanceEvent(LogRecord record) { return(record.IsActivityEvent && record.ProtoPayload.MethodName == Method); }
public void DebugMessage(LogRecord message) { _debug.Add(message); }
public void FileLogger_LogWithAutoFlush() { #region Arrange var msg1 = new LogRecord { Timestamp = DateTime.Now, Message = "str", Level = LogLevel.Error, Exception = new Exception("Ex") }; var msg2 = new LogRecord { Timestamp = DateTime.Now, Message = "str2", Level = LogLevel.Error, Exception = new Exception("Ex") }; var tempFile = Path.GetTempFileName(); var serializer = StringLogSerializerBuilder.DefaultSerializer; var logger = new FileLogger(tempFile) { AutoFlush = true, Serializer = serializer }; var expected1 = serializer.Serialize(msg1) + Environment.NewLine; var expected2 = serializer.Serialize(msg2) + Environment.NewLine; var target1 = string.Empty; var target2 = string.Empty; var error = default(Exception); #endregion Arrange #region Act try { logger.Log(msg1); target1 = NonBlockRead(tempFile); logger.IsEnabled = false; File.Delete(tempFile); logger.IsEnabled = true; logger.Log(msg2); target2 = NonBlockRead(tempFile); logger.IsEnabled = false; File.Delete(tempFile); logger.Dispose(); } catch (Exception ex) { error = ex; File.Exists(tempFile) .If(true, x => File.Delete(tempFile)); } #endregion Act #region Assert Assert.IsNull(error); Assert.AreEqual(expected1, target1); Assert.AreEqual(expected2, target2); Assert.AreEqual(false, File.Exists(tempFile)); #endregion Assert }
private void AppendMessageLine(LogRecord logRecord, string line) { Argument.IsNotNull(() => line); if (logRecord == null) { return; } logRecord.Message += (Environment.NewLine + line); }
internal StartWithEncryptionKeyEvent(LogRecord logRecord) : base(logRecord) { Debug.Assert(IsStartWithEncryptionKeyEvent(logRecord)); }
private LogRecord ParseLogRecord(string expectedFilename) { long rowPosition = _fsLog.Position; if (_fsLog.Read(_recoveryBuffer, 0, 32) != 32) // read first 32 bytes return null; long lsn = BitConverter.ToInt64(_recoveryBuffer, 0); if (lsn != rowPosition) return null; int recordLength = BitConverter.ToInt32(_recoveryBuffer, 8); if (recordLength < 32 || recordLength > 8496) return null; long prev_lsn = BitConverter.ToInt64(_recoveryBuffer, 12); if (prev_lsn > lsn) return null; int transactionID = BitConverter.ToInt32(_recoveryBuffer, 20); int operation = BitConverter.ToInt32(_recoveryBuffer, 24); if (operation < 1 || operation > 4) return null; if (operation == WRITE_OPERATION) { int filenameLength = BitConverter.ToInt32(_recoveryBuffer, 28); if ((filenameLength > 255) || (32 + filenameLength > recordLength)) return null; if (_fsLog.Read(_recoveryBuffer, 32, recordLength - 32) != recordLength - 32) return null; string filename = System.Text.Encoding.UTF8.GetString(_recoveryBuffer, 32, filenameLength); int idx = 32 + filenameLength; if (filename != expectedFilename) return null; if (idx + 12 > recordLength) return null; long blockNo = BitConverter.ToInt64(_recoveryBuffer, idx); int count = BitConverter.ToInt32(_recoveryBuffer, idx + 8); idx += 12; if (idx + count + 4 > recordLength) return null; byte[] AFIM = new byte[count]; Array.Copy(_recoveryBuffer, idx, AFIM, 0, count); idx += count; UInt32 read_crc32 = BitConverter.ToUInt32(_recoveryBuffer, idx); UInt32 computed_crc32 = CRC32.Compute(_recoveryBuffer, 0, idx); if (read_crc32 != computed_crc32) { return null; } WriteLogRecord writeLogRecord = new WriteLogRecord(); writeLogRecord.lsn = lsn; writeLogRecord.prev_lsn = prev_lsn; writeLogRecord.transactionID = transactionID; writeLogRecord.operation = operation; writeLogRecord.filename = filename; writeLogRecord.blockNo = blockNo; writeLogRecord.count = count; writeLogRecord.AFIM = AFIM; return writeLogRecord; } else { UInt32 read_crc32 = BitConverter.ToUInt32(_recoveryBuffer, 28); UInt32 computed_crc32 = CRC32.Compute(_recoveryBuffer, 0, 28); if (read_crc32 != computed_crc32) { return null; } LogRecord logRecord = new LogRecord(); logRecord.lsn = lsn; logRecord.prev_lsn = prev_lsn; logRecord.transactionID = transactionID; logRecord.operation = operation; return logRecord; } }
public static bool IsStartWithEncryptionKeyEvent(LogRecord record) { return(record.IsActivityEvent && record.ProtoPayload.MethodName == BetaMethod); }
public string FormatMessage(LogRecord par1LogRecord) { return Utils.Edge.Diagnostic.Internal.NotImplemented<string>(); }
public static void putRecord(LogRecord lr) { lock (log_strings) { log_strings.Add (lr); } }