/// <summary> /// Create a new instance of this command /// </summary> /// <param name="settingManager">The settings manager to use</param> /// <param name="parentWindow">The parent window</param> /// <param name="showIfLocalIsNewer">Show if the local version is newer</param> public UpdateApplicationCommand(SettingManager settingManager, Window parentWindow, bool showIfLocalIsNewer) { worker = new QueueWorker(settingManager); this.settingManager = settingManager; this.parentWindow = parentWindow; this.showIfLocalIsNewer = showIfLocalIsNewer; }
public async Task CloseAsync() { this._queueWorker.IsEnabled = false; try { await this.StopReadingAsync().ConfigureAwait(false); } catch (Exception ex) { Debug.WriteLine("MediaReader.CloseAsync(): stop reading failed: " + ex.Message); } QueueWorker <WorkBuffer> queue = this._queueWorker; if (null != queue) { try { await queue.CloseAsync().ConfigureAwait(false); } catch (Exception ex) { Debug.WriteLine("MediaReader.CloseAsync(): queue close failed: " + ex.Message); } } this.FlushBuffers(); if (null != this._mediaParser) { this._mediaParser.ProcessEndOfData(); } if (this._bufferingManager != null && null != queue) { this._bufferingManager.Shutdown((IQueueThrottling)queue); } }
/// <summary> /// Create a new instance of this command /// </summary> /// <param name="settingManager">The setting manager to use</param> public DownloadAllPatchesCommand(SettingManager settingManager) { worker = new QueueWorker(settingManager); worker.ProgressChanged += (sender, data) => { TriggerProgressChanged(data); }; }
public MultiFileLoader(bool synchronousMode) { _worker = new QueueWorker <LoadInfo>(null, LoadFile); _loadingPaths = new Dictionary <MsDataFileUri, int>(); _synchronousMode = synchronousMode; _statusLock = new object(); ResetStatus(); }
public async Task InitializeAsync(ISegmentManagerReaders segmentManagerReaders, Action checkConfiguration, Action checkForSamples, CancellationToken cancellationToken, Action <IProgramStreams> programStreamsHandler) { this._checkConfiguration = checkConfiguration; Task startReaderTask = this._segmentReaders.Manager.StartAsync(); MediaReader localReader = this; QueueWorker <WorkBuffer> queueWorker = new QueueWorker <WorkBuffer>((Action <WorkBuffer>)(wi => { IMediaParser mediaParser = localReader._mediaParser; if (null == wi) { mediaParser.ProcessEndOfData(); } else { if (null != wi.Metadata) { mediaParser.StartSegment(wi.Metadata); wi.Metadata = (ISegmentMetadata)null; } mediaParser.ProcessData(wi.Buffer, 0, wi.Length); } }), (Action <WorkBuffer>)(buffer => this._blockingPool.Free(buffer))); this._queueWorker = queueWorker; this._callbackReader = new CallbackReader(segmentManagerReaders.Readers, new Action <WorkBuffer>(queueWorker.Enqueue), this._blockingPool); this._bufferingManager.Initialize((IQueueThrottling)queueWorker, checkForSamples); try { await startReaderTask.ConfigureAwait(false); ContentType contentType = this._segmentReaders.Manager.ContentType; if ((ContentType)null == contentType) { Debug.WriteLine("MediaReader.CreateReaderPipeline() unable to determine content type, defaulting to transport stream"); contentType = ContentTypes.TransportStream; } else if (ContentTypes.Binary == contentType) { Debug.WriteLine("MediaReader.CreateReaderPipeline() detected binary content, defaulting to transport stream"); contentType = ContentTypes.TransportStream; } MediaParserParameters mediaParserParameters = new MediaParserParameters(); this._mediaParser = await this._mediaParserFactory.CreateAsync((IMediaParserParameters)mediaParserParameters, contentType, cancellationToken).ConfigureAwait(false); if (null == this._mediaParser) { throw new NotSupportedException("Unsupported content type: " + (object)contentType); } this._mediaParser.ConfigurationComplete += new EventHandler(this.ConfigurationComplete); this._mediaParser.Initialize(this._bufferingManager, programStreamsHandler); this._mediaParser.InitializeStream(this._segmentReaders.Manager.StreamMetadata); } catch (Exception ex) { this._bufferingManager.Shutdown((IQueueThrottling)queueWorker); throw; } }
public void EnqueuedWorkIsExecuted() { var value = new Synced <int>(); var queueWorker = new QueueWorker(); queueWorker.Do(() => value.Value = 10); value.WaitFor(i => i == 10); }
private QueueWorker GetQueueWorker(string sessionId) { if (workers.TryGetValue(sessionId, out var worker)) { logging.Log($"Reusing worker {worker}"); return(worker); } worker = new QueueWorker(); logging.Log($"Creating new worker {worker}"); return(worker); }
public ReportGenerationQueueManager(IMatchManager matchManager) { _matchManager = matchManager; Debug.WriteLine($"ReportGenerationQueueManager: creating ReportGenerationNotifyIcon (Thread '{Thread.CurrentThread.Name}')"); _notifyIcon = new ReportGenerationNotifyIcon(); // AsyncOperation being created in the constructor gives us a way of letting code run on the UI (main) thread, // which is needed for NotifyIcon (& its BalloonTip) to generate click events _asyncOp = AsyncOperationManager.CreateOperation(null); _queueWorker = new QueueWorker(this); Start(); }
private void CreateSendingQueue() { if (sendingQueue != null) { return; } sendingQueue = new QueueWorker <Wrapper <XInputReport> >(1, (report) => { if (report != null) { DeviceReportEvent threadSafeEvent = DeviceReport; threadSafeEvent?.Invoke(report.Value); } }, $"ReportThread {DevicePath}"); }
static void Main(string[] args) { _cmdArgs = new CommandArgs(); if (!_cmdArgs.ParseArgs(args)) return; // Remove all SKYD files foreach (var skydFile in Directory.EnumerateFiles(Path.GetDirectoryName(_cmdArgs.FilePath) ?? string.Empty, "*.skyd")) { try { File.Delete(skydFile); } catch (Exception) { Console.Error.WriteLine("Error: Failed to remove existing file {0}", skydFile); return; } } _stopwatch = new Stopwatch(); _stopwatch.Start(); _queue = new QueueWorker<List<string>>(Run); _queue.RunAsync(_cmdArgs.Processes, "Start Skyline"); string dataFilter = "*.*"; if (!string.IsNullOrEmpty(_cmdArgs.DataFilter)) dataFilter = _cmdArgs.DataFilter; string dataDir = "."; if (!string.IsNullOrEmpty(_cmdArgs.DataDir)) dataDir = _cmdArgs.DataDir; var fileGroupCurrent = new List<string>(); var fileGroups = new List<List<string>> {fileGroupCurrent}; var dataFiles = Directory.EnumerateFiles(dataDir, dataFilter).ToArray(); int filesPerProcess = (int) Math.Ceiling(((double) dataFiles.Length)/_cmdArgs.Processes); foreach (var dataFile in dataFiles) { if (fileGroupCurrent.Count >= filesPerProcess) { fileGroupCurrent = new List<string>(); fileGroups.Add(fileGroupCurrent); } fileGroupCurrent.Add(dataFile); } _queue.Add(fileGroups, true); Console.WriteLine("Elapsed time: " + _stopwatch.Elapsed.ToString(@"mm\:ss")); }
public FileLoadCompletionAccumulator(Action <IList <Completion> > complete, int threadCount, int loadingCount) { _complete = complete; _threadCount = threadCount; _loadingCount = loadingCount; // If there are multiple threads or even multiple files make committing // completed caches asynchronous to the loading process. With multiple // threads this will also commit only every time all threads complete a // file. if (_threadCount > 1 && _loadingCount > 1) { _completionWorker = new QueueWorker <Completion>(null, ConsumeCompletion); _completionWorker.RunAsync(1, @"Commit loaded files"); _accumulatedCompletions = new List <Completion>(); } }
public static bool CancelTransaction(Transaction transaction) { bool success = CreateForbiddenMatch(transaction.BuyerPostId, transaction.SellerPostId) > 0 && CreateForbiddenMatch(transaction.SellerPostId, transaction.BuyerPostId) > 0; if (success) { Dictionary <string, object> updateDictionary = new Dictionary <string, object>(); updateDictionary.Add("IsDeleted", 1); UpdateTransaction(transaction.TransactionId, updateDictionary); PostHandler.updatePostState(transaction.SellerPostId, 0); PostHandler.updatePostState(transaction.BuyerPostId, 0); Profile buyer = ProfileHandler.GetProfile(transaction.BuyerId); Profile seller = ProfileHandler.GetProfile(transaction.SellerId); Textbook book = TextbookHandler.getTextbook(transaction.TextbookId); EmailUtility.SendEmail( Convert.ToString(buyer.Email), Convert.ToString(buyer.Name), "Your transaction has been cancelled", String.Format("Item: {0}</br>{1} has cancelled the transaction with you!<br/>" + "We'll try to match you with someone else for this item.", book.BookTitle, seller.Name) ); EmailUtility.SendEmail( Convert.ToString(seller.Email), Convert.ToString(seller.Name), "Your transaction has been cancelled", String.Format("Item: {0}<br/>{1} has cancelled the transaction with you!<br/>" + "We'll try to match you with someone else for this item.", book.BookTitle, buyer.Name) ); Post sellerPost = PostHandler.getPost(transaction.SellerPostId); Post buyerPost = PostHandler.getPost(transaction.BuyerPostId); Task.Run(() => QueueWorker.AddPost(sellerPost)); Task.Run(() => QueueWorker.AddPost(buyerPost)); } return(success); }
protected void Application_Start() { AreaRegistration.RegisterAllAreas(); WebApiConfig.Register(GlobalConfiguration.Configuration); FilterConfig.RegisterGlobalFilters(GlobalFilters.Filters); RouteConfig.RegisterRoutes(RouteTable.Routes); BundleConfig.RegisterBundles(BundleTable.Bundles); AuthConfig.RegisterAuth(); ViewEngines.Engines.Add(new RazorEngineCustomized()); // Queue the Processor thread for BookQueue. Put this call // before Parallel.Invoke to begin processing as soon as // new posts are added to BookQueue. Task.Factory.StartNew(() => QueueWorker.ProcessPosts()); //Register Cache Entry for 24 hour email reminder RegisterCacheEntry(); }
private QueueWorker Map(SqlDataReader reader) { var result = new QueueWorker { Id = reader.GetInt64(0), Name = reader.GetString(1), Endpoint = reader.GetString(2), Method = reader.GetString(3), Priority = reader.GetInt16(4), Retries = reader.GetInt16(5), MaxProcessingTime = reader.GetInt16(6), BatchSize = reader.GetInt16(7), Enabled = reader.GetBoolean(8), RetryDelay = reader.GetInt16(9), RetryDelayMultiplier = reader.GetInt16(10), ApiKey = reader.IsDBNull(11) ? null : reader.GetString(11) }; return(result); }
public void Dispose() { if (0 != Interlocked.Exchange(ref this._isDisposed, 1)) { return; } using (this._callbackReader) ; using (this._queueWorker) ; using (this._blockingPool) ; using (this._mediaParser) ; this._callbackReader = (CallbackReader)null; this._queueWorker = (QueueWorker <WorkBuffer>)null; this._blockingPool = (IBlockingPool <WorkBuffer>)null; this._mediaParser = (IMediaParser)null; this._bufferingManager = (IBufferingManager)null; this._segmentReaders = (ISegmentManagerReaders)null; }
public async Task StopAsync() { this._queueWorker.IsEnabled = false; ConfiguredTaskAwaitable configuredTaskAwaitable = this.StopReadingAsync().ConfigureAwait(false); await configuredTaskAwaitable; QueueWorker <WorkBuffer> queue = this._queueWorker; if (null != queue) { try { configuredTaskAwaitable = queue.ClearAsync().ConfigureAwait(false); await configuredTaskAwaitable; } catch (Exception ex) { Debug.WriteLine("MediaReader.StopAsync(): queue clear failed: " + ex.Message); } } this.FlushBuffers(); }
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline. public void Configure(IApplicationBuilder app, IHostingEnvironment env, IApplicationLifetime lifetime) { if (env.IsDevelopment()) { app.UseDeveloperExceptionPage(); } else { app.UseExceptionHandler("/Home/Error"); } app.UseStaticFiles(); app.UseAuthentication(); app.UseMvc(routes => { routes.MapRoute( name: "default", template: "{controller=Home}/{action=Index}/{id?}"); }); var services = app.ApplicationServices; var jobService = services.GetRequiredService <IJobService>(); var jobRegistry = services.GetRequiredService <IJobRegistry>(); var lockFactory = services.GetRequiredService <ILockFactory>(); var aciService = services.GetRequiredService <IACIService>(); var loggerFactory = services.GetRequiredService <ILoggerFactory>(); var cronWorker = new CronWorker(jobService, jobRegistry, lockFactory, aciService, loggerFactory.CreateLogger("CronWorker")); var queueWorker = new QueueWorker(jobService, jobRegistry, lockFactory, aciService, loggerFactory.CreateLogger("QueueWorker")); lifetime.ApplicationStopping.Register(() => { cronWorker.Stop(); queueWorker.Stop(); }); cronWorker.Start(); queueWorker.Start(); }
public void WorkQueueIsSwappedCorrectlyOnLongRunningWorkAsync() { var value1 = new Synced <bool>(); var value2 = new Synced <bool>(); var queueWorker = new QueueWorker(); queueWorker.Do(async() => { await Task.Delay(500); value1.Value = true; }); queueWorker.Do(() => { value2.Value = true; return(Task.CompletedTask); }); value2.WaitFor(b => b); value1.Value.ShouldBe(true); }
/// <inheritdoc/> public override void Execute(object parameter) { if (parameter is LaunchGameData gameData) { ITaskFactory taskFactory = new LaunchGameFactory( manager.GetValue <string>("VersionInformation"), gameData.Patch, gameData.Speed ); QueueWorker worker = new QueueWorker(settingManager); worker.ProgressChanged += (sender, data) => { TriggerProgressChanged(data); }; manager.SaveSettings(); Task <bool> startTask = worker.AsyncExecuteTasks(taskFactory); startTask.ContinueWith((result) => { ExecutionDone(); }); } }
private void btnStart_Click(object sender, EventArgs e) { btnStart.Enabled = false; _timeColor = lblTime.ForeColor; lblTime.ForeColor = Color.Red; _stopwatch = new Stopwatch(); _stopwatch.Start(); _timer = new Timer {Interval = 500}; _timer.Tick += (s, e1) => lblTime.Text = _stopwatch.Elapsed.ToString(@"mm\:ss"); // Not L10N _timer.Start(); if (File.Exists(SkydFile)) File.Delete(SkydFile); _log = Log.Replace("#", DateTime.Now.ToString("yyyy-MM-dd_HH-mm-ss")); _dataDir = Path.Combine(RootDir, comboModel.Text); _uiIndex = comboUI.SelectedIndex; _queue = new QueueWorker<string>(Run) {CompleteAction = RunFinished}; _queue.RunAsync((int) numericMaxProcesses.Value, "Start Skyline"); _queue.Add(Directory.EnumerateFiles(_dataDir, comboModel.Text == "wiff" ? "*.wiff" : "*.mz5")); }
public void WorkQueueIsSwappedCorrectlyOnLongRunningWork() { var value1 = new Synced <bool>(); var value2 = new Synced <bool>(); var queueWorker = new QueueWorker(); queueWorker.Do(() => { Thread.Sleep(200); value1.Value = true; }); Thread.Sleep(100); queueWorker.Do(() => { Thread.Sleep(200); value2.Value = true; }); value2.WaitFor(b => b); value1.Value.ShouldBe(true); }
public void StartReading() { lock (readingLock) { if (DeviceReport == null) { reading = false; } if (reading) { return; } reading = true; sendingQueue = new QueueWorker <Wrapper <HidReport> >(5, (report) => { if (report != null) { DeviceReportEvent threadSafeEvent = DeviceReport; threadSafeEvent?.Invoke(report.Value); } }, $"ReportThread {GetUniqueKey(DevicePath)}"); readingThread = new Thread(() => { while (reading) { if (DeviceReport == null) { break; } try { HidSharp.HidStream _stream = GetStream(); lock (_stream) { byte[] data = _stream.Read(); //DeviceReportEvent threadSafeEvent = DeviceReport; ////new Thread(() => ////{ //// threadSafeEvent?.Invoke(new HidReport() { ReportId = data[0], ReportType = HidReportType.Input, ReportBytes = data.Skip(1).ToArray() }); ////}).Start(); // //ThreadPool.QueueUserWorkItem((stateInfo) => //{ // threadSafeEvent?.Invoke(new HidReport() { ReportId = data[0], ReportType = HidReportType.Input, ReportBytes = data.Skip(1).ToArray() }); //}); sendingQueue.EnqueueTask(new HidReport() { ReportId = data[0], ReportType = HidReportType.Input, ReportBytes = data.Skip(1).ToArray() }); } if (PollingRate > 0) { int SleepTime = 0; while (SleepTime < (PollingRate / 1000)) { Thread.Sleep(1000); SleepTime++; } Thread.Sleep(PollingRate % 1000); } } catch (System.TimeoutException) { } catch (Exception) // for example System.IO.IOException: 'Operation failed after some time.' { reading = false; } } reading = false; }); readingThread.Start(); } }
/// <summary> /// Collects statistics on how much space savings minimizing will achieve, and (if outStream /// is not null) writes out the minimized cache file. /// </summary> public void Minimize(Settings settings, ProgressCallback progressCallback, Stream outStream, FileStream outStreamScans = null, FileStream outStreamPeaks = null, FileStream outStreamScores = null) { var writer = outStream == null ? null : new Writer(ChromatogramCache, settings.CacheFormat, outStream, outStreamScans, outStreamPeaks, outStreamScores); var statisticsCollector = new MinStatisticsCollector(this); bool readChromatograms = settings.NoiseTimeRange.HasValue || writer != null; var chromGroupHeaderToIndex = new Dictionary <long, int>(ChromGroupHeaderInfos.Count); for (int i = 0; i < ChromGroupHeaderInfos.Count; i++) { var cghi = ChromGroupHeaderInfos[i]; chromGroupHeaderToIndex.Add(cghi.LocationPoints, i); } var chromGroups = new ChromatogramGroupInfo[ChromGroupHeaderInfos.Count]; var transitionGroups = new List <TransitionGroupDocNode> [ChromGroupHeaderInfos.Count]; foreach (var nodePep in Document.Molecules) { foreach (var nodeGroup in nodePep.TransitionGroups) { foreach (var chromGroupInfo in ChromatogramCache.LoadChromatogramInfos(nodePep, nodeGroup, _tolerance, null)) { int headerIndex = chromGroupHeaderToIndex[chromGroupInfo.Header.LocationPoints]; if (chromGroups[headerIndex] == null) { chromGroups[headerIndex] = chromGroupInfo; transitionGroups[headerIndex] = new List <TransitionGroupDocNode>(); } transitionGroups[headerIndex].Add(nodeGroup); } } } var minimizer = new QueueWorker <MinimizeParams>(null, MinimizeAndWrite); minimizer.RunAsync(MINIMIZING_THREADS, @"Minimizing/Writing", MAX_GROUP_READ_AHEAD); for (int iHeader = 0; iHeader < ChromGroupHeaderInfos.Count; iHeader++) { var chromGroupInfo = chromGroups[iHeader]; IList <TransitionGroupDocNode> transitionGroupDocNodes; if (chromGroupInfo == null) { chromGroupInfo = ChromatogramCache.LoadChromatogramInfo(ChromGroupHeaderInfos[iHeader]); transitionGroupDocNodes = new TransitionGroupDocNode[0]; } else { transitionGroupDocNodes = transitionGroups[iHeader]; } if (readChromatograms) { try { chromGroupInfo.ReadChromatogram(ChromatogramCache, true); } catch (Exception exception) { Trace.TraceWarning(@"Unable to read chromatogram {0}", exception); } } if (minimizer.Exception != null) { break; } minimizer.Add(new MinimizeParams(writer, settings, chromGroupInfo, transitionGroupDocNodes, progressCallback, statisticsCollector)); // Null out the ChromGroup in our array so it can be garbage collected. chromGroups[iHeader] = null; } minimizer.DoneAdding(true); if (minimizer.Exception != null) { throw minimizer.Exception; } statisticsCollector.ReportProgress(progressCallback, true); if (writer != null) { writer.WriteEndOfFile(); } }
private void RunFinished() { _queue = null; _stopwatch = null; if (_timer != null) { _timer.Stop(); _timer = null; Invoke(new Action(() => btnStart.Enabled = true)); } lblTime.ForeColor = _timeColor; }
public void Dispose() { if (0 != Interlocked.Exchange(ref _isDisposed, 1)) return; using (_callbackReader) { } using (_queueWorker) { } using (_blockingPool) { } using (_mediaParser) { } _callbackReader = null; _queueWorker = null; _blockingPool = null; _mediaParser = null; _bufferingManager = null; _segmentReaders = null; }
public async Task InitializeAsync(ISegmentManagerReaders segmentManagerReaders, Action checkConfiguration, Action checkForSamples, CancellationToken cancellationToken, Action<IProgramStreams> programStreamsHandler) { _checkConfiguration = checkConfiguration; var startReaderTask = _segmentReaders.Manager.StartAsync(); var localReader = this; var queueWorker = new QueueWorker<WorkBuffer>( wi => { //Debug.WriteLine("MediaReader dequeued " + wi); var mediaParser = localReader._mediaParser; if (null == wi) mediaParser.ProcessEndOfData(); else { if (null != wi.Metadata) { mediaParser.StartSegment(wi.Metadata); wi.Metadata = null; } mediaParser.ProcessData(wi.Buffer, 0, wi.Length); } }, buffer => _blockingPool.Free(buffer)); _queueWorker = queueWorker; _callbackReader = new CallbackReader(segmentManagerReaders.Readers, queueWorker.Enqueue, _blockingPool); _bufferingManager.Initialize(queueWorker, checkForSamples); try { await startReaderTask.ConfigureAwait(false); var streamContentType = _segmentReaders.Manager.StreamContentType; if (null == streamContentType) { Debug.WriteLine("MediaReader.CreateReaderPipeline() unable to determine content type, defaulting to transport stream"); streamContentType = ContentTypes.TransportStream; } else if (ContentTypes.Binary == streamContentType) { Debug.WriteLine("MediaReader.CreateReaderPipeline() detected binary content, defaulting to transport stream"); streamContentType = ContentTypes.TransportStream; } var mediaParserParameters = new MediaParserParameters(); _mediaParser = await _mediaParserFactory.CreateAsync(mediaParserParameters, streamContentType, cancellationToken).ConfigureAwait(false); if (null == _mediaParser) throw new NotSupportedException("Unsupported stream content type: " + streamContentType); _mediaParser.ConfigurationComplete += ConfigurationComplete; _mediaParser.Initialize(_bufferingManager, programStreamsHandler); _mediaParser.InitializeStream(_segmentReaders.Manager.StreamMetadata); } catch (Exception) { _bufferingManager.Shutdown(queueWorker); throw; } }
public ActionResult CreatePost(CreatePostModel model) { if (ModelState.IsValid) { int textbookId = model.TextBookId; // if we have a new textbook, store it if (model.IsNewBook) { // proceed if course id exists; otherwise create the course first Course course = CourseHandler.getCourseByName(model.CourseName); if (course == null) { model.CourseId = CourseHandler.CreateCourse(model.CourseName); } var newTextbook = new Textbook( -1, // id doesnt matter here model.BookTitle, model.ISBN, model.Author, model.CourseId, model.CourseName, model.BookImageUrl, null, 1, 0, DateTime.Now, DateTime.Now ); textbookId = TextbookHandler.createTextBook(newTextbook); } int profileId = ProfileHandler.GetProfileId(User.Identity.Name); int price = model.Price; ActionBy actionBy = model.ActionBy; if (model.IsNegotiable) { if (actionBy == ActionBy.Buyer) { price = int.MaxValue; } else { price = 0; } } var newPost = new Post( -1, // id doesnt matter here profileId, textbookId, actionBy, price, model.BookCondition, 0, 1, 0, DateTime.Now, DateTime.Now ); int postId = PostHandler.createPost(newPost); newPost.PostId = postId; Task.Run(() => QueueWorker.AddPost(newPost)); // TODO: redirect to special "you've successfully created post" page // with links to create another buy/sell post return(RedirectToAction("Index", "Home")); } // If we got this far, something failed, redisplay form IEnumerable <Textbook> textBookCollection = TextbookHandler.getAllTextbooks(); // test data //for(int i = 0; i < 100; i++) { // Textbook book = new Textbook( // i, // "Financial Accounting " + i, // "100000000000" + i, // "Author " + i, // 100 + i, // "AFM 10" + i, // null, // 10 + i, // 1, // 0, // DateTime.Now, // DateTime.Now // ); // textBookCollection.Add(book); //} model.PostTypes = SelectListUtility.getPostTypes(); model.BookConditions = SelectListUtility.getBookConditions(); model.Textbooks = textBookCollection; return(View("CreatePost", model)); }