// Note that integration must be set AFTER the MongoDB handler module protected override void Initialize() { Interlocked.Exchange(ref initializedTimestamp, MC2DateTimeValue.Now().Ticks); axToTroPollingInterval = (int)Message["axtotropollinginterval"].GetValueOrDefault(10000); troToAxPollingInterval = (int)Message["trotoaxpollinginterval"].GetValueOrDefault(12000); string integrationFolderAxToTro = remoteConnection.GetDataPath() + Path.DirectorySeparatorChar + IntegrationAxToTroFolder; string integrationFolderTroToAx = remoteConnection.GetDataPath() + Path.DirectorySeparatorChar + IntegrationTroToAxFolder; string integrationFolderTroToAxCopy = remoteConnection.GetDataPath() + Path.DirectorySeparatorChar + IntegrationTroToAxFolderCopy; new DirectoryInfo(integrationFolderAxToTro).Create(); new DirectoryInfo(integrationFolderTroToAx).Create(); new DirectoryInfo(integrationFolderTroToAxCopy).Create(); var mongoDBHandler = ((MongoDBHandlerServer)handlerContainer.GetHandler("mongodbhandler")); axToTroImport = new AXToTroImport( logger, integrationFolderAxToTro, mongoDBHandler); axToTroPollingThread = new Thread(RunAXToTroImport); axToTroPollingThread.Start(); troToAxExport = new TroToAXExport(logger, integrationFolderTroToAx, integrationFolderTroToAxCopy, mongoDBHandler.Database, (DataTree)Message.Clone()); troToAXPollingThread = new Thread(RunTroToAXExport); troToAXPollingThread.Start(); }
private void StartExport() { logger.LogTrace("Looking for documents to export to Visma."); const string FilePrefix = "VismaExport"; const string FilePrefixAnnotated = "VismaExportAnnotated"; const string FilePrefixAnnotatedTotals = "VismaExportAnnotatedTotals"; DateTime now = MC2DateTimeValue.Now().ToLocalTime(); exportFileName = Path.Combine(filePath, FilePrefix + "_" + string.Format("{0:0000}-{1:00}-{2:00}-{3:00}-{4:00}", now.Year, now.Month, now.Day, now.Hour, now.Second) + ".csv"); exportFileNameAnnotated = Path.Combine(filePath, FilePrefixAnnotated + "_" + string.Format("{0:0000}-{1:00}-{2:00}-{3:00}-{4:00}", now.Year, now.Month, now.Day, now.Hour, now.Second) + ".txt"); exportFileNameAnnotatedTotals = Path.Combine(filePath, FilePrefixAnnotatedTotals + "_" + string.Format("{0:0000}-{1:00}-{2:00}-{3:00}-{4:00}", now.Year, now.Month, now.Day, now.Hour, now.Second) + ".txt"); ExportFile = File.OpenWrite(exportFileName); ExportFileAnnotated = File.OpenWrite(exportFileNameAnnotated); ExportFileAnnotatedTotals = File.OpenWrite(exportFileNameAnnotatedTotals); rawCsvLines.Clear(); processedCsvLines.Clear(); }
private void MarkEntriesAsExported(IEnumerable <BsonDocument> cursor, string collectionName) { MongoCollection collection = database.GetCollection(collectionName); DateTime now = MC2DateTimeValue.Now().ToUniversalTime(); logger.LogInfo("Marking " + cursor.Count() + " items as completed in collection " + collectionName); foreach (BsonDocument document in cursor) { if (failedExports.Contains((ObjectId)document[DBQuery.Id])) { logger.LogInfo("Marking item as failed because it was found in failed exports list"); document["exportfailurecount_ax"] = (int)document.GetValue("exportfailurecount_ax", 0) + 1; WorkerTimesheetEntryExportsFailed++; } else if (!succeededExports.Contains((ObjectId)document[DBQuery.Id])) { logger.LogInfo("Item was not found in succeeded list. It's possible it was accepted during the accept procedure and will be exported during the next export.", document[DBQuery.Id].ToString()); continue; } else { document["exported_ax"] = true; document["exporttimestamp_ax"] = now; } collection.Save(document, WriteConcern.Acknowledged); } }
public MC2Value getdatetimecontrolyears(int startYear = -1, int endYear = -1) { var result = new DataTree(); var now = MC2DateTimeValue.Now(); int currentYear = now.Year; // Display previous year for january and february. if (startYear == -1) { startYear = (now.Month > 2) ? currentYear : currentYear - 1; } // Default to showing next year to future. if (endYear == -1) { endYear = currentYear + 1; } for (int i = startYear; i <= endYear; i++) { var yearNode = result.AddNodeWithIndex(); yearNode.Value = i.ToString(); if (i == currentYear) { yearNode["selected"] = true; } } return(result); }
static void Main(string[] args) { var sw = new System.Diagnostics.Stopwatch(); sw.Start(); var settings = new MongoDB.Driver.MongoServerSettings(); var address = new MongoDB.Driver.MongoServerAddress(Properties.Settings.Default.Server, Properties.Settings.Default.Port); settings.Server = address; MongoDB.Driver.MongoServer server = new MongoDB.Driver.MongoServer(settings); var client = new MongoDB.Driver.MongoClient(); MongoDatabase database = server.GetDatabase("mc2db"); Console.WriteLine("Aloitetaan..."); var now = MC2DateTimeValue.Now().ToLocalTime(); var nowStr = string.Format("{0:0000}-{1:00}-{2:00}-{3:00}-{4:00}-{5:00}", now.Year, now.Month, now.Day, now.Hour, now.Minute, now.Second); var logFile = Path.Combine(string.Format("{0}_{1}.txt", Properties.Settings.Default.ExcelPath + "\\Log", nowStr)); if (!Directory.Exists(Path.GetDirectoryName(logFile))) { Directory.CreateDirectory(Path.GetDirectoryName(logFile)); } var x = new PayrollExport(logFile, database); x.ExportDocuments(); Console.WriteLine("Valmis...aikaa kului {0} minuuttia", sw.Elapsed.TotalMinutes); }
private bool SQLDocument( BsonDocument sourceDocument, MongoCollection <BsonDocument> sourceCollection, MongoCollection <BsonDocument> targetCollection, DataTree schemaCollection) { bool documentSQLd = false; try { logger.LogDebug("caching document", sourceCollection.Name, sourceDocument[DBDocument.Id]); // Save document to SQL collection sourceDocument["__SQLd"] = MC2DateTimeValue.Now(); targetCollection.Save(sourceDocument); // Purge original doucment sourceCollection.Remove(Query.EQ(DBDocument.Id, sourceDocument[DBDocument.Id])); documentSQLd = true; } catch (Exception ex) { logger.LogError("Failed to SQL document", sourceCollection.Name, sourceDocument[DBDocument.Id], ex); } return(documentSQLd); }
private void StartBatch(XmlDocument exportDocument, string parentTag) { if (batchStarted) { return; } logger.LogDebug("Starting new export batch."); batchStarted = true; batchStartTimestamp = MC2DateTimeValue.Now().ToUniversalTime(); if (!database.CollectionExists("axintegration")) { database.CreateCollection("axintegration"); } MongoCollection <BsonDocument> axIntegrationCollection = database.GetCollection("axintegration"); BsonDocument transactionInfoDocument = axIntegrationCollection.FindOne(Query.EQ("identifier", TransactionInfoId)); if (transactionInfoDocument == null) { transactionInfoDocument = new BsonDocument(); transactionInfoDocument["identifier"] = TransactionInfoId; } if (!transactionInfoDocument.Contains("batchid")) { transactionInfoDocument.Set("batchid", 0); } // Note that AX export is single threaded and not thread safe BatchId = (int)transactionInfoDocument["batchid"] + 1; transactionInfoDocument.Set("batchid", BatchId); axIntegrationCollection.Save(transactionInfoDocument, WriteConcern.Acknowledged); XmlElement batchElement = exportDocument.CreateElement("EFIOriginalBatchID"); batchElement.InnerText = BatchId.ToString(); XmlNode parentNode = exportDocument.GetElementsByTagName(parentTag)[0]; if (parentNode.ChildNodes.Count == 0) { parentNode.AppendChild(batchElement); } else { parentNode.InsertBefore(batchElement, parentNode.FirstChild); } }
public ActionResult addhistoryentry(string historyaddress, string redirectaddress, string token) { var historyEntry = new DataTree(); historyEntry["ispost"] = false; historyEntry["address"] = historyaddress; historyEntry["timestamp"] = MC2DateTimeValue.Now(); historyEntry["addedprogrammatically"] = true; Runtime.HistoryManager.AddHistoryEntry(historyEntry, token); return(Redirect(redirectaddress)); }
private void MarkAutomaticWorkAcceptanceDone() { var collection = database.GetCollection(TroHelpersHandlerServerInfo.HandlerName); BsonDocument lastAccepted = collection.FindOne(Query.EQ("identifier", "automaticworklastaccepted")); if (lastAccepted == null) { lastAccepted = new BsonDocument(); lastAccepted.Set("identifier", "automaticworklastaccepted"); } lastAccepted.Set("timestamp", MC2DateTimeValue.Now()); collection.Save(lastAccepted, WriteConcern.Acknowledged); }
/// <summary> /// Defaults to current time and day or day given in selecteddatekey parameter /// </summary> /// <param name="itemSchema"></param> /// <param name="value"></param> /// <returns></returns> public MC2Value selectedstart(DataTree itemSchema) { DateTime resultDate; if (this.Runtime.CurrentActionCall.Parameters.Contains("start")) { resultDate = (MC2DateTimeValue)MC2DateTimeValue.TryConvertValueFromString(Runtime.CurrentActionCall.Parameters["start"]); } else { // Get current time for selected day. DateTime now = MC2DateTimeValue.Now().ToUniversalTime(); resultDate = new DateTime(now.Year, now.Month, now.Day, now.Hour, now.Minute, 0, DateTimeKind.Utc); } resultDate = ApplyTimeAccuracy(resultDate, itemSchema); return(resultDate); }
/// <summary> /// Defaults to current day or day given in selecteddatekey parameter /// </summary> /// <param name="itemSchema"></param> /// <param name="value"></param> /// <returns></returns> public MC2Value selecteddate(DataTree itemSchema) { string resultDateKey = Runtime.CurrentActionCall.Parameters["selecteddatekey"]; DateTime resultDate; if (string.IsNullOrEmpty(resultDateKey)) { DateTime now = MC2DateTimeValue.Now(); resultDate = new DateTime(now.Year, now.Month, now.Day, 0, 0, 0, DateTimeKind.Utc); } else { resultDate = DateTime.ParseExact(resultDateKey, "yyyyMMdd", CultureInfo.InvariantCulture); resultDate = new DateTime(resultDate.Year, resultDate.Month, resultDate.Day, 0, 0, 0, DateTimeKind.Utc); } return(resultDate); }
private bool IsAutomaticWorkAcceptanceDue() { var collection = database.GetCollection(TroHelpersHandlerServerInfo.HandlerName); BsonDocument lastAccepted = collection.FindOne(Query.EQ("identifier", "automaticworklastaccepted")); if (lastAccepted == null) { return(true); } logger.LogInfo("Accepting all unaccepted work for previous week."); DateTime timeStamp = (DateTime)lastAccepted["timestamp"]; DateTime beginningOfWeek = MC2DateTimeValue.Now().StartOfWeek(); // Return true if work hasn't been accepted this week. return(timeStamp.CompareTo(beginningOfWeek) < 0); }
private void MarkEntriesAsExported(IEnumerable <BsonDocument> cursor, string entryType) { if (cursor == null) { return; } MongoCollection collection = database.GetCollection(entryType); DateTime now = MC2DateTimeValue.Now().ToUniversalTime(); logger.LogInfo("Marking " + cursor.Count() + " items as completed."); foreach (BsonDocument document in cursor) { document["exported_visma"] = true; document["exporttimestamp_visma"] = now; document["__readonly"] = true; collection.Save(document); MarkTimesheetEntryDetailsAsReadonly(document, true); } }
/// <summary> /// Starting the actual exporting whilst also update statuses and logger info /// </summary> /// <param name="exportTask">Payroll export task</param> private void StartExport() { try { //Cache often used collections PopulateCollectionsToCache(); UpdateExportStatus(PayrollConstants.ProcessingData); EntriesToPayroll <Absence> absences = GetAbsencesToExport(); logWriter.Flush(); EntriesToPayroll <Timesheet> workHours = GetTimesheetToExport(); //EntriesToPayroll<Timesheet> workHours = new EntriesToPayroll<Timesheet>(); var now = MC2DateTimeValue.Now().ToLocalTime(); var nowStr = string.Format("{0:0000}-{1:00}-{2:00}-{3:00}-{4:00}-{5:00}", now.Year, now.Month, now.Day, now.Hour, now.Minute, now.Second); UpdateExportStatus(PayrollConstants.GeneratingExcel); ExportExcelDocument( Path.Combine(Properties.Settings.Default.ExcelPath, string.Format("{0}_{1}", nowStr, Properties.Settings.Default.ExcelFile)), absences, workHours, Properties.Settings.Default.Language); } catch (Exception ex) { UpdateExportStatus(PayrollConstants.Failed); throw ex; } //Some of the entries not exported UpdateExportStatus(PayrollConstants.Completed); UpdateExportStatus(string.Format("Alldone Time eleapsed={0}minutes", (sw.Elapsed.TotalMinutes))); sw.Stop(); if (logWriter != null) { logWriter.Dispose(); } }
private void ApproveWork(string collectionName) { DateTime beginningOfWeek = MC2DateTimeValue.Now().StartOfWeek(); MongoCollection <BsonDocument> collection = database.GetCollection(collectionName); IMongoQuery query = Query.And( Query.LT("starttimestamp", beginningOfWeek), Query.NE("approvedbyworker", true) ); MongoCursor <BsonDocument> cursor = collection.Find(query); foreach (var document in cursor) { document["approvedbyworker"] = true; invalidatedCacheItems.Add(new DataTree(document[DBQuery.Id].ToString())); collection.Save(document, WriteConcern.Acknowledged); } }
/// <summary> /// Returns the overridden date set for testing as a string. /// </summary> /// <returns></returns> public MC2Value overriddendate() { return(MC2DateTimeValue.Now()); }
private DBQuery QueryGetProjects( string[] splitTerms, string orderby, bool ascending, int documentsperpage, int page, string projectmanager) { const int MinTermLength = 2; var resultsQuery = new DBQuery(); var searchFields = Schema.GetSearchFields(Runtime.Schema["tro"]["project"]); var andQueries = new List <IMongoQuery>(); // If threre are search terms // - Get element that matches both search term and project manager (if present) // - Repeat for each search term // // If there are no search terms // - Get element that matches project manager. If no relation is present get all items. // if (searchFields.Count > 0 && splitTerms != null && splitTerms.Length > 0) { foreach (string term in splitTerms) { if (term.Length < MinTermLength) { continue; } var orQueries = new List <IMongoQuery>(); foreach (string searchField in searchFields) { var relationAndFilterQueries = new List <IMongoQuery>(); relationAndFilterQueries.Add(Query.Matches(searchField, new BsonRegularExpression(term, "i"))); if (!string.IsNullOrEmpty(projectmanager)) { relationAndFilterQueries.Add( Query.EQ("projectmanager", new ObjectId(projectmanager))); } orQueries.Add(Query.And(relationAndFilterQueries)); } andQueries.Add(Query.Or(orQueries)); } } // Add project manager to query if (!string.IsNullOrEmpty(projectmanager)) { andQueries.Add(Query.EQ("projectmanager", new ObjectId(projectmanager))); } // Add project end date to query andQueries.Add(Query.GT("projectend", new BsonDateTime(MC2DateTimeValue.Now()))); resultsQuery["project"][DBQuery.Condition] = Query.And(andQueries).ToString(); // Apply sorting and paging if (!resultsQuery["project"][DBQuery.Condition].Empty) { resultsQuery["project"][DBQuery.OrderBy] = orderby; resultsQuery["project"][DBQuery.Ascending] = ascending; resultsQuery["project"][DBQuery.DocumentsPerPage] = documentsperpage; resultsQuery["project"][DBQuery.Page] = page; resultsQuery["project"][DBQuery.IncludeTotals] = true; return(resultsQuery); } else { return(null); } }
private int ExportExpenseEntriesToXml(List <BsonDocument> expensesToExport, XmlDocument xmlDocument) { XmlNode exportParentNode = xmlDocument.GetElementsByTagName("JournalTable")[0]; int expensesExported = 0; foreach (BsonDocument expenseDocument in expensesToExport) { try { string categoryIdString = GetProjectCategoryForExpense(expenseDocument); // No categoryId means the expense is not exported to AX. if (categoryIdString == null) { continue; } if (!expenseDocument.Contains("user")) { throw new Exception("User is missing from expense"); } MongoCollection <BsonDocument> usersCollection = database.GetCollection("user"); BsonDocument workerDocument = usersCollection.FindOne(Query.EQ(DBQuery.Id, expenseDocument["user"][0])); if (!workerDocument.Contains("identifier")) { throw new Exception("Worker document is missing an identifier"); } XmlElement expenseElement = xmlDocument.CreateElement("JournalTrans"); expenseElement.SetAttribute("class", "entity"); XmlElement expenseInnerElement = xmlDocument.CreateElement("ProjTrans"); expenseInnerElement.SetAttribute("class", "entity"); XmlElement categoryId = xmlDocument.CreateElement("CategoryId"); XmlElement projId = xmlDocument.CreateElement("ProjId"); XmlElement quantity = xmlDocument.CreateElement("Qty"); XmlElement worker = xmlDocument.CreateElement("Worker"); XmlElement transDate = xmlDocument.CreateElement("TransDate"); XmlElement transactionId = xmlDocument.CreateElement("EFIOriginalTransactionID"); transactionId.InnerText = GetNextTransactionId().ToString(); categoryId.InnerText = categoryIdString; projId.InnerText = GetProjectIdForArticleEntry(expenseDocument); quantity.InnerText = Convert.ToString(expenseDocument.GetValue("amount", 0)); worker.InnerText = (string)workerDocument["identifier"]; DateTime timeStamp = (DateTime)expenseDocument.GetValue("date", MC2DateTimeValue.Now()); transDate.InnerText = string.Format("{0:0000}-{1:00}-{2:00}", timeStamp.Year, timeStamp.Month, timeStamp.Day); expenseElement.AppendChild(transactionId); expenseElement.AppendChild(transDate); expenseElement.AppendChild(expenseInnerElement); expenseInnerElement.AppendChild(categoryId); expenseInnerElement.AppendChild(projId); expenseInnerElement.AppendChild(quantity); expenseInnerElement.AppendChild(worker); exportParentNode.AppendChild(expenseElement); expensesExported++; succeededExports.Add((ObjectId)expenseDocument[DBQuery.Id]); } catch (Exception ex) { logger.LogError("Failed to handle expense entry. Skipping this entry", ex, expenseDocument[DBQuery.Id]); IncreaseExportFailureCount(expenseDocument, "dayentry"); } } return(expensesExported); }
private int ExportArticleEntriesToXml(List <BsonDocument> articlesToExport, XmlDocument xmlDocument) { XmlNode exportParentNode = xmlDocument.GetElementsByTagName("InventJournalTable")[0]; int articlesExported = 0; foreach (BsonDocument articleEntry in articlesToExport) { try { BsonDocument article = GetArticleForArticleEntry(articleEntry); if (!article.Contains("identifier")) { throw new Exception("article is missing an identifier"); } XmlElement itemEntry = xmlDocument.CreateElement("InventJournalTrans"); itemEntry.SetAttribute("class", "entity"); XmlElement itemId = xmlDocument.CreateElement("ItemId"); XmlElement projId = xmlDocument.CreateElement("ProjId"); XmlElement quantity = xmlDocument.CreateElement("Qty"); XmlElement transDate = xmlDocument.CreateElement("TransDate"); XmlElement transactionId = xmlDocument.CreateElement("EFIOriginalTransactionID"); XmlElement txt = xmlDocument.CreateElement("Txt"); itemId.InnerText = (string)article["identifier"]; projId.InnerText = GetProjectIdForArticleEntry(articleEntry); quantity.InnerText = Convert.ToString(articleEntry.GetValue("amount", 0)); DateTime timeStamp = (DateTime)articleEntry.GetValue("timestamp", MC2DateTimeValue.Now()); transDate.InnerText = string.Format("{0:0000}-{1:00}-{2:00}", timeStamp.Year, timeStamp.Month, timeStamp.Day); transactionId.InnerText = GetNextTransactionId().ToString(); txt.InnerText = (string)articleEntry.GetValue("note", string.Empty); itemEntry.AppendChild(transactionId); itemEntry.AppendChild(itemId); itemEntry.AppendChild(projId); itemEntry.AppendChild(quantity); itemEntry.AppendChild(transDate); // Note: text not added as of yet exportParentNode.AppendChild(itemEntry); articlesExported++; succeededExports.Add((ObjectId)articleEntry[DBQuery.Id]); } catch (Exception ex) { logger.LogError("Failed to handle article entry. Skipping this entry", ex, articleEntry[DBQuery.Id]); IncreaseExportFailureCount(articleEntry, "articleentry"); } } return(articlesExported); }
internal DataTree GetSuggestedAllocationsForUser(string userId = null, int days = 1) { DateTime now = MC2DateTimeValue.Now(); return(GetSuggestedAllocationsForUser(userId, now, days)); }