private LSDecimal Calculate_PointOfSale(Basket basket, Dictionary <string, string> existingTransactions) { WebTrace.Write("CertiTAX: Begin Calculate POS"); CertiTAX.Order taxOrder = new CertiTAX.Order(); //SET THE TAXORDER ADDRESS BuildTaxOrderAddress(taxOrder, StoreDataSource.Load().DefaultWarehouse); //BUILD THE TAXORDER OBJECT BuildTaxOrder(taxOrder, basket, 0, existingTransactions); taxOrder.Nexus = "POS"; //EXECUTE THE TRANSACTION CertiTAX.TaxTransaction taxTransaction = null; try { taxTransaction = (new CertiTAX.CertiCalc()).Calculate(taxOrder); } catch (Exception ex) { WebTrace.Write("CertiTax could not calculate tax. The error was: " + ex.Message); if (!this.IgnoreFailedConfirm) { throw; } } //PARSE THE RESULTS LSDecimal totalTax = ParseTaxTransaction(taxTransaction, basket, 0); WebTrace.Write("CertiTAX: End Calculate POS"); return(totalTax); }
protected void Page_Load(object sender, EventArgs e) { bool luceneEnabled = ApplicationSettings.Instance.SearchProvider == "LuceneSearchProvider"; bool sqlFtsEnabled = ApplicationSettings.Instance.SearchProvider == "SqlFtsSearchProvider"; if (luceneEnabled) { // EVICT THE STORE FROM THE SESSION CACHE AND RELOAD // THIS IS DUE TO 2ND LEVEL CACHING NOT BEING UPDATED BY THE ASYNC PROCESS Store store = AbleContext.Current.Store; AbleContext.Current.Database.GetSession().Evict(store); AbleContext.Current.DatabaseFactory.SessionFactory.Evict(store.GetType(), store); AbleContext.Current.Store = StoreDataSource.Load(store.Id); ToggleProgress(false); NoFTSPanel.Visible = false; FTSPanel.Visible = true; } else if (sqlFtsEnabled) { ToggleProgressSQLFts(false); SQLFtsPanel.Visible = true; NoFTSPanel.Visible = false; FTSPanel.Visible = false; } else { NoFTSPanel.Visible = true; FTSPanel.Visible = false; } }
/// <summary> /// Implementation of data recryption /// </summary> /// <param name="storeId">ID of the store context</param> /// <param name="oldKey">Existing key used to encrypt data</param> /// <param name="newKey">New key to be applied to encrypted data</param> private static void InternalRecryptDatabase(int storeId, byte[] oldKey, byte[] newKey) { // WE MUST INITIALIZE THE STORE CONTEXT AS THIS THREAD HAS NO HTTPCONTEXT Store store = StoreDataSource.Load(storeId); if (store != null) { // INITIALIZE THE TOKEN WITH THE STORE CONTEXT Token.Instance.InitStoreContext(store); // PROCESS RECORDS IN BATCHES OF 100 int lastCount = 0; int count = RecryptionHelper.GetRecryptionWorkload(); while ((count > 0) && (count != lastCount)) { List <RecryptRecord> records = RecryptionHelper.LoadForRecrypt(100); foreach (RecryptRecord record in records) { record.DoRecrypt(oldKey, newKey); } // if lastCount and count ever match, it means nothing changed in the last iteration // keep track of this to prevent endless looping in case a recrypt operation fails lastCount = count; count = RecryptionHelper.GetRecryptionWorkload(); } // REMOVE RECRYPT FLAG RecryptionHelper.SetRecryptionFlag(false); } }
private void AsycnSendEmails(int storeId, int userId, MailMessage[] messages) { //REINITIALIZE THE TOKEN WITH SAVED STORE/USER CONTEXT Store store = StoreDataSource.Load(storeId); if (store != null) { Token.Instance.InitStoreContext(store); User user = UserDataSource.Load(userId); Token.Instance.InitUserContext(user); SendEmails(messages); } }
private PackageList PreparePackages(Warehouse origin, BasketItemCollection contents) { PackageList plist = PackageManager.GetPackageList(contents); if (plist == null || plist.Count == 0) { return(null); } ProviderUnits pUnits = GetProviderUnits(origin.Country); //GET UNITS USED BY STORE Store store = StoreDataSource.Load(Token.Instance.StoreId); MeasurementUnit storeMeasurementUnit = store.MeasurementUnit; WeightUnit storeWeightUnit = store.WeightUnit; bool requireMC = storeMeasurementUnit != pUnits.MeasurementUnit; bool requireWC = storeWeightUnit != pUnits.WeightUnit; if (requireMC && requireWC) { plist.ConvertBoth(pUnits.WeightUnit, pUnits.MeasurementUnit); } else if (requireWC) { plist.ConvertWeight(pUnits.WeightUnit); } else if (requireMC) { plist.ConvertDimensions(pUnits.MeasurementUnit); } LSDecimal maxWeight = LocaleHelper.ConvertWeight(WeightUnit.Pounds, MaxPackageWeight, pUnits.WeightUnit); LSDecimal minWeight = LocaleHelper.ConvertWeight(WeightUnit.Pounds, MinPackageWeight, pUnits.WeightUnit); if (EnablePackageBreakup && maxWeight > 0) { //compose packages (splits items larger than the maximum carrier weight) plist.Compose(maxWeight, minWeight); } else { plist.EnsureMinimumWeight(minWeight); } //convert weights to whole numbers plist.ConvertWeightToWholeNumber(); return(plist); }
/// <summary> /// Upload compressed feed /// </summary> /// <param name="options">Feed configuration options</param> /// <returns>true if feed upload is successful, false otherwise</returns> public bool UploadCompressedFeed(FeedOptions options) { bool needTokenReset = false; if (Token.Instance.Store == null) { Store store = StoreDataSource.Load(options.StoreId); Token.Instance.InitStoreContext(store); needTokenReset = true; } string compressedFile = Path.Combine(options.FeedDataPath, options.CompressedFeedFileName); if (!File.Exists(compressedFile)) { UpdateStatus(100, "Can not upload. Compressed feed file does not exist. Please create the compressed feed file first.", false); return(false); } try { string remoteFileName = options.RemoteFileName; if (string.IsNullOrEmpty(remoteFileName)) { remoteFileName = options.CompressedFeedFileName; } UploadFile(options, compressedFile, options.CompressedFeedFileName); } catch (Exception e) { UpdateStatus(100, "An error occured while uploading: " + e.Message, false); Utility.Logger.Warn("FTP upload of " + options.CompressedFeedFileName + " could not be completed.", e); if (needTokenReset) { Token.ResetInstance(); } return(false); } if (needTokenReset) { Token.ResetInstance(); } UpdateStatus(100, "Compressed Feed File Uploaded.", true); return(true); }
/// <summary> /// Upload un-compressed feed /// </summary> /// <param name="options">Feed configuration options</param> /// <returns>true if feed upload is successful, false otherwise</returns> public bool UploadUncompressedFeed(FeedOptions options) { // NEED TO INITIALIZE STORE CONTEXT ( AS THIS CAN BE CALLED IN ASYN WAY AS WELL), IN CASE AN ERROR OCCURS THEN WE NEED TO LOG FOR STORE bool needTokenReset = false; if (Token.Instance.Store == null) { Store store = StoreDataSource.Load(options.StoreId); Token.Instance.InitStoreContext(store); needTokenReset = true; } string feedFile = Path.Combine(options.FeedDataPath, options.FeedFileName); if (!File.Exists(feedFile)) { UpdateStatus(100, "Can not upload. Feed file does not exist. Please Create the Feed file first.", false); return(false); } try { string remoteFileName = options.RemoteFileName; if (string.IsNullOrEmpty(remoteFileName)) { remoteFileName = options.FeedFileName; } UploadFile(options, feedFile, remoteFileName); } catch (Exception e) { UpdateStatus(100, "An error occured while uploading: " + e.Message, false); Utility.Logger.Warn("FTP upload of " + options.FeedFileName + " could not be completed.", e); if (needTokenReset) { Token.ResetInstance(); } return(false); } if (needTokenReset) { Token.ResetInstance(); } UpdateStatus(100, "Uncompressed Feed File Uploaded.", true); return(true); }
/// <summary> /// Create a feed /// </summary> /// <param name="options">Feed configuration options</param> /// <returns>true if feed creation is successful, false otherwise</returns> public bool CreateFeed(FeedOptions options) { string feedFile = Path.Combine(options.FeedDataPath, options.FeedFileName); if (File.Exists(feedFile) && !options.OverwriteFeedFile) { UpdateStatus(100, "Feed File Already Exists. You should either chose to overwrite the feed file or provide a different name.", false); return(false); } string criteria = "VisibilityId<>" + (short)CatalogVisibility.Private; if (!options.IncludeAllProducts) { criteria = criteria + " AND ExcludeFromFeed=0"; } string headerRow = GetHeaderRow(); if (!headerRow.EndsWith("\r\n")) { headerRow += "\r\n"; } try { using (StreamWriter feedWriter = File.CreateText(feedFile)) { feedWriter.Write(headerRow); feedWriter.Close(); } } catch (System.UnauthorizedAccessException accessException) { UpdateStatus(100, "Access restricted on feed data folder. In order to create feeds, the current user (" + Misc.GetProcessIdentity() + ") needs write access to feeds data folder.", false); UpdateStatus(100, "Access exception : " + accessException.Message, false); return(false); } bool needTokenReset = false; if (Token.Instance.Store == null) { Store store = StoreDataSource.Load(options.StoreId); Token.Instance.InitStoreContext(store); needTokenReset = true; } try { using (StreamWriter feedWriter = File.AppendText(feedFile)) { ProductCollection products; int count = ProductDataSource.CountForCriteria(criteria); int startIndex = 0; while (startIndex < count) { // DETERMINE HOW MANY ROWS LEFT TO INCLUDE IN FEED int rowsRemaining = count - startIndex; // ONLY PROCESS 1000 ROWS AT A TIME int maxRows = (rowsRemaining > 1000) ? 1000 : rowsRemaining; // CALCULATE PROGRESS PERCENTAGE AND DISPLAY PROGRESS int percentDone = startIndex / count * 100; UpdateStatus(percentDone, "Generating feed for products from " + startIndex + " to " + (startIndex + maxRows) + " out of " + count + " products."); // GET THE ROWS TO BE OUTPUT products = ProductDataSource.LoadForCriteria(criteria, maxRows, startIndex); // GENERATE THE FEED DATA string feedData = GetFeedData(products); // WRITE DATA TO THE FEED FILE if (!feedData.EndsWith("\r\n")) { feedData += "\r\n"; } feedWriter.Write(feedData); feedWriter.Flush(); // LOOP TO THE NEXT BLOCK OF DATA startIndex += 1000; } // CLOSE THE FEED FILE feedWriter.Close(); } } catch (Exception e) { Logger.Warn("Error Feed Creator Thread : " + e.Message, e); UpdateStatus(100, "Error while creating feed file." + e.Message, false); return(false); } finally { if (needTokenReset) { Token.ResetInstance(); } } UpdateStatus(100, string.Format("Feed file generated at {0}.", options.FeedDataPath), true); return(true); }