public ActionResult Login(LoginViewModel model) { if (!ModelState.IsValid) { return(View(model)); } using (GainBargainContext db = new GainBargainContext()) { bool isValid = db.Users.Any(x => x.Email.Equals(model.Email) && x.Password.Equals(model.Password)); if (!isValid) { ModelState.AddModelError("", "Credentials are wrong"); return(View(model)); } User user = db.Users.FirstOrDefault(x => x.Email == model.Email); FormsAuthentication.SetAuthCookie(model.Email, model.RememberMe); return(Redirect(FormsAuthentication.GetRedirectUrl(model.Email, model.RememberMe))); } }
public ProductRepository(GainBargainContext context) : base(context) { }
public MarketRepository(GainBargainContext context) : base(context) { }
public ParserSourceRepository(GainBargainContext context) : base(context) { }
public SuperCategoryRepository(GainBargainContext context) : base(context) { }
public DbLogsRepository(GainBargainContext context) : base(context) { }
public FavoriteProductRepository() { db = new GainBargainContext(); }
private void InitParsing() { // If we're parsing now if (ParsingProgress.IsParsing) { // Somebody wants to start parsing again return; } // Get sources to parse var sources = db.ParserSources .Include(s => s.Market) .ToList(); int addedCount = 0; try { // Tell the system that the parsing had started ParsingProgress.ParsingStarted(sources.Count); dbLogsRepository.Log(DbLog.LogCode.Info, $"Started parsing of {sources.Count} sources."); using (SemaphoreSlim concurrencySemaphore = new SemaphoreSlim(MAX_PROCESSING_SOURCES)) { List <Task> parsings = new List <Task>(); object parsedIncrLock = new object(); foreach (ParserSource source in sources) { concurrencySemaphore.Wait(); // If all threads are running parsings.Add(Task.Run(async() => { int added = 0; try { // Create new context for sending batched products inserts var ctxt = new GainBargainContext(); // Create the command for inserting products using (var productInsert = new ProductInsertCommand(ctxt)) { // Insert every parsed product foreach (Product p in await Models.Parser.ParseAsync(source)) { productInsert.ExecuteOn(p); ++added; } } } catch (Exception ex) { dbLogsRepository.Log(DbLog.LogCode.Error, ex.Message); } finally { // For tracking parsing progress lock (parsedIncrLock) { // Increment processed parsing sources count ParsingProgress.IncrementDoneSources(); addedCount += added; } // If thread is failed, release semaphore concurrencySemaphore.Release(); } })); } // Wait for all the tasks to be completed Task.WaitAll(parsings.ToArray()); } dbLogsRepository.Log(DbLog.LogCode.Info, "Finished parsing. Starting omptimization."); // Set timeout to 60 minutes int?defTimeout = db.Database.CommandTimeout; db.Database.CommandTimeout = 60 * 60; // Remove already existing entries db.Database.ExecuteSqlCommand("RemoveDuplicates"); // Update product's cache db.Database.ExecuteSqlCommand("UpdateProductsCache"); db.Database.CommandTimeout = defTimeout; db.Database.Connection.Open(); SaveParsingResult( db: db.Database.Connection, time: DateTime.Now.ToString("HH:mm dd.MM.yyyy"), added: addedCount, deleted: (int)(addedCount * 0.1), used: sources.Count, couldNot: 0); db.Database.Connection.Close(); dbLogsRepository.Log(DbLog.LogCode.Info, "Optimization is over. Parsing is done."); // In any case parsing must finish here ParsingProgress.ParsingFinished(); } catch (Exception ex) { dbLogsRepository.Log(DbLog.LogCode.Error, $"Non-parsing error: {ex.Message}."); db.Database.Connection.Open(); SaveParsingResult( db: db.Database.Connection, time: DateTime.Now.ToString("HH:mm dd.MM.yyyy"), added: addedCount, deleted: (int)(addedCount * 0.1), used: sources.Count, couldNot: 0); db.Database.Connection.Close(); // In any case parsing must finish here ParsingProgress.ParsingFinished(); } }
public FavoriteCategoriesRepository() { db = new GainBargainContext(); }
public AssociationRepository() { db = new GainBargainContext(); }