public PageDetail Add(PageDetail page) { return(new PageDetail() { Id = Guid.NewGuid() }); }
/* * private static void squeezeHorseThenAdd(PageDetail curr, Horse entry, double entryHeight, double depthNotYetUsed) * { * * double shrinkFactor = Constants.PageHeight / ( Constants.PageHeight - depthNotYetUsed + entryHeight); * // 1a. srink existing entries. list of header+firsthorse * if (curr.seeHeaderAndFirstHorseList != null) * { * curr.seeHeaderAndFirstHorseList.ForEach(hf => { * hf.header.newHeight = shrinkFactor * hf.header.height; * hf.firstHorse.newHeight = shrinkFactor * hf.firstHorse.height; * }); * } * //1b. other horses * curr.secondAndNextHorses.ForEach(h => h.newHeight = shrinkFactor * h.height); * //2. shrink current|last horse, add page statistic. Then add the horse to page * entry.newHeight = shrinkFactor * entry.height; * curr.runningDepth = Constants.PageHeight; * curr.depthNotYetUsed = 0; * entry.positionOnPage.leftspaceatEnd = curr.depthNotYetUsed; * curr.entryCount = curr.entryCount + 1; * entry.pgno = curr.pgNum; * curr.secondAndNextHorses.Add(entry); * * } */ private static void squeezeHeaderFirsthorseThenAdd(PageDetail curr, HeaderAndFirstHorse hf, double entryHeight, double depthNotYetUsed) { double shrinkFactor = Constants.PageHeight / (Constants.PageHeight - depthNotYetUsed + entryHeight); //1. srink existing secondAndNextHorses list curr.secondAndNextHorses.ForEach(h => h.newHeight = shrinkFactor * h.height); //2. srink existing shrink header items if (curr.seeHeaderAndFirstHorseList != null) { curr.seeHeaderAndFirstHorseList.ForEach(entry => { entry.firstHorse.newHeight = shrinkFactor * entry.firstHorse.height; entry.header.newHeight = shrinkFactor * entry.header.height; }); } //3. shrink HeaderAndFirstHorse before putting hf.firstHorse.newHeight = shrinkFactor * hf.firstHorse.height; hf.header.newHeight = shrinkFactor * hf.firstHorse.height; //addd the hf to page curr.addHeaderAndFirstHorse(hf); //set the page statistic curr.runningDepth = Constants.PageHeight; curr.depthNotYetUsed = 0; hf.firstHorse.positionOnPage.leftspaceatEnd = curr.depthNotYetUsed; curr.isthereAheader = true; curr.entryCount = curr.entryCount + 2; //calling method responsible to set as last entry on page }
public ActionResult EditContent(int id) { ViewBag.PAGE = _pageService.GetByPageID(id); PageDetail detail = _detailService.GetByPageID(id); return(View(detail)); }
static bool Save(Page page, PageDetail detail) { page.Title = detail.Title; page.DateCreated = DateTime.ParseExact(detail.DateCreated, "yyyy-MM-dd HH:mm", CultureInfo.InvariantCulture); page.IsPublished = detail.IsPublished; page.ShowInList = detail.ShowInList; page.IsDeleted = detail.IsDeleted; page.Content = detail.Content; page.Description = GetDescription(detail.Description, detail.Content); page.Keywords = detail.Keywords; page.IsFrontPage = detail.IsFrontPage; // if changing slug, should be unique if (page.Slug != detail.Slug) { page.Slug = GetUniqueSlug(detail.Slug); } if (detail.Parent != null && detail.Parent.OptionValue != null) { try { page.Parent = Guid.Parse(detail.Parent.OptionValue); } catch (Exception) { Utils.Log("Error parsing parent ID while saving page"); } } page.Save(); return(true); }
/// <summary> /// Initialize a new wrapper and return the current page /// </summary> /// <param name="page">The current Page</param> /// <param name="ns">The namespace of the current page</param> /// <param name="detail">The desired verbosity of the XML</param> public OneNote(out Page page, out XNamespace ns, PageDetail detail = PageDetail.Selection) : this() { // page may be null if in an empty section page = GetPage(detail); ns = page?.Namespace; }
private static void markLastHorseOnPage(PageDetail curr) { if (curr.getLastHorseOnPage() != null) { curr.getLastHorseOnPage().positionOnPage.where = EntryLocationOnPage.LastEntryOnPage; } }
public List <DynamicPlaceSections> GetAllDynamicSections(IOrderedEnumerable <IGrouping <string, PlaceDetail> > categoryGroup, FIL.Contracts.Enums.MasterEventType masterEventType, PageDetail pageDetail, bool IsMainCategory = false) { List <DynamicPlaceSections> DynamicPlaceSections = new List <DynamicPlaceSections>(); foreach (var currentCatGroup in categoryGroup) { List <PlaceDetail> placeDetailList = new List <PlaceDetail>(); if ((pageDetail.IsCategoryLevel == true && !pageDetail.IsCityLevel && !pageDetail.IsStateLevel && !pageDetail.IsCountryLevel) && pageDetail.PageType == Contracts.Enums.PageType.Category && (masterEventType != Contracts.Enums.MasterEventType.Online && masterEventType != Contracts.Enums.MasterEventType.InRealLife)) { var groupPlaceByCountry = currentCatGroup.GroupBy(s => s.CountryName).Take(4); foreach (var currentPlaceGroup in groupPlaceByCountry) { placeDetailList.AddRange(currentPlaceGroup.OrderBy(a => Guid.NewGuid()).Take(4).ToList()); } } else { placeDetailList.AddRange(currentCatGroup.OrderBy(a => Guid.NewGuid()).Take(16).ToList()); } var DynamicPlaceSection = GetDynamicPlaceSection(placeDetailList, masterEventType, currentCatGroup.Key, pageDetail, IsMainCategory); DynamicPlaceSections.Add(DynamicPlaceSection); } return(DynamicPlaceSections); }
private static void shrinkHorsesOnPagetoAddaHeader1stHorse(PageDetail curr, HeaderAndFirstHorse hf, double entryHeight, double depthNotYetUsed) { double shrinkFactor = Constants.PageHeight / (Constants.PageHeight - depthNotYetUsed + entryHeight); //1a. shrink existing entries. start with second list (only horses) curr.secondAndNextHorses.ForEach(h => h.newHeight = shrinkFactor * h.height); //1b. shrink existing entries. header+firsthorse list if (curr.seeHeaderAndFirstHorseList != null) { curr.seeHeaderAndFirstHorseList.ForEach(ahf => { ahf.header.newHeight = shrinkFactor * ahf.header.height; ahf.firstHorse.newHeight = shrinkFactor * ahf.firstHorse.height; }); } //2. shrink new entry (hf). set page level and hf level statistics, set as last entry. Add to page (on header+firsthorse list) hf.header.newHeight = shrinkFactor * hf.header.height; hf.firstHorse.newHeight = shrinkFactor * hf.firstHorse.height; curr.isthereAheader = true; curr.runningDepth = Constants.PageHeight; curr.depthNotYetUsed = 0; hf.firstHorse.positionOnPage.leftspaceatEnd = curr.depthNotYetUsed; hf.firstHorse.positionOnPage.where = EntryLocationOnPage.LastEntryOnPage; hf.firstHorse.pgno = curr.pgNum; curr.addHeaderAndFirstHorse(hf); curr.entryCount = curr.entryCount + 2; }
/// <summary> /// Update page /// </summary> /// <param name="page">Page to update</param> /// <param name="action">Action to execute</param> /// <returns>True on success</returns> public bool Update(PageDetail page, string action) { if (!Security.IsAuthorizedTo(BlogEngine.Core.Rights.CreateNewPages)) { throw new System.UnauthorizedAccessException(); } var corePage = (from p in Page.Pages.ToList() where p.Id == page.Id select p).FirstOrDefault(); if (action == "publish") { corePage.IsPublished = true; corePage.Save(); return(true); } if (action == "unpublish") { corePage.IsPublished = false; corePage.Save(); return(true); } if (corePage != null && Save(corePage, page)) { return(true); } return(false); }
public ActionResult EditPage(PageVM model) { // Check model state if (!ModelState.IsValid) { return View(model); } using (DefaultConnection db = new DefaultConnection()) { // Get page id int id = model.Id; // Init slug string slug = "home"; // Get the page PageDetail dto = db.Pages.Find(id); // DTO the title dto.Title = model.Title; // Check for slug and set it if need be if (model.Slug != "home") { if (string.IsNullOrWhiteSpace(model.Slug)) { slug = model.Title.Replace(" ", "-").ToLower(); } else { slug = model.Slug.Replace(" ", "-").ToLower(); } } // Make sure title and slug are unique if (db.Pages.Where(x => x.Id != id).Any(x => x.Title == model.Title) || db.Pages.Where(x => x.Id != id).Any(x => x.Slug == slug)) { ModelState.AddModelError("", "That title or slug already exists."); return View(model); } // DTO the rest dto.Slug = slug; dto.Body = model.Body; dto.HasSidebar = model.HasSidebar; // Save the DTO db.SaveChanges(); } // Set TempData message TempData["SM"] = "You have edited the page!"; // Redirect return RedirectToAction("EditPage"); }
public PageVM(PageDetail row) { Id = row.Id; Title = row.Title; Slug = row.Slug; Body = row.Body; Sorting = row.Sorting; HasSidebar = row.HasSidebar; }
/// <summary> /// Gets the raw XML of the specified page. /// </summary> /// <param name="pageId">The unique ID of the page</param> /// <param name="detail">The desired verbosity of the XML</param> /// <returns>A string specifying the root XML of the page</returns> public string GetPageXml(string pageId, PageDetail detail = PageDetail.Basic) { if (string.IsNullOrEmpty(pageId)) { return(null); } onenote.GetPageContent(pageId, out var xml, (PageInfo)detail, XMLSchema.xs2013); return(xml); }
public ActionResult DeletePage(int id) { Page pages = _pageService.GetByPageID(id); PageDetail pageDetail = _detailService.GetByPageID(id); _detailService.Delete(pageDetail); _pageService.Delete(pages); ViewBag.Pages = _pageService.GetList(); return(RedirectToAction("AddPage")); }
public HttpResponseMessage Post([FromBody] PageDetail item) { var result = repository.Add(item); if (result == null) { return(Request.CreateResponse(HttpStatusCode.NotModified)); } return(Request.CreateResponse(HttpStatusCode.Created, result)); }
private void fitA_Large_horse(PageDetail curr, Horse ahorse, List <PageDetail> pages) //horse bigger than full pageHeight { if (curr.depthNotYetUsed == Config.Constants.PageHeight) { ahorse.positionOnPage.where = EntryLocationOnPage.FirstEntryOnPage; } //curr.secondAndNextHorses.Add(ahorse); // ???? here or at the ending page bring down // part of stats go in end page int endpageCount = (((ahorse.height - curr.depthNotYetUsed) % Config.Constants.PageHeight) > 0) ? 1 : 0; //0 or 1 int middlePageCount = Convert.ToInt16(Math.Floor((ahorse.height - curr.depthNotYetUsed) / Config.Constants.PageHeight)); // on or more int totalpageSpanned = 1 + middlePageCount + endpageCount; //begin page + middle pages + end page // for debugging Debug.Print("totalpageSpanned: " + totalpageSpanned); double contentGoesOnBeginPage = curr.depthNotYetUsed; //beginning page //no horse statistics curr.runningDepth = Config.Constants.PageHeight; curr.depthNotYetUsed = 0; curr.doesVeryLargeHorseBegin = true; //full middle pages for (int i = 1; i <= middlePageCount; i++) { PageDetail midPg = new PageDetail(++pgNumAtLastline); pages.Add(midPg); midPg.runningDepth = Config.Constants.PageHeight; midPg.depthNotYetUsed = 0; midPg.doesVeryLargeHorseMiddle = true; if ((endpageCount == 0) && (i == middlePageCount)) { //horse fit at the endline of this page : use case 1 midPg.secondAndNextHorses.Add(ahorse); ahorse.positionOnPage.leftspaceatEnd = 0; midPg.doesVeryLargeHorseEnd = true; midPg.entryCount = midPg.entryCount + 1; ahorse.pgno = midPg.pgNum; } }//for //ending page (partly empty) if (endpageCount == 1) // has an ending page: use case horse fit on a last space but has space at the end { PageDetail lastPg = new PageDetail(++pgNumAtLastline); pages.Add(lastPg); lastPg.secondAndNextHorses.Add(ahorse); lastPg.runningDepth = lastPg.runningDepth + (ahorse.height - contentGoesOnBeginPage - (middlePageCount * Constants.PageHeight)); lastPg.depthNotYetUsed = lastPg.bottom - lastPg.runningDepth; lastPg.doesVeryLargeHorseEnd = true; ahorse.positionOnPage.leftspaceatEnd = lastPg.depthNotYetUsed; lastPg.entryCount = lastPg.entryCount + 1; ahorse.pgno = lastPg.pgNum; } }
public ActionResult AddPage(PageVM model) { // Check model state if (!ModelState.IsValid) { return View(model); } using (DefaultConnection db = new DefaultConnection()) { // Declare slug string slug; // Init pageDTO PageDetail dto = new PageDetail(); // DTO title dto.Title = model.Title; // Check for and set slug if need be if (string.IsNullOrWhiteSpace(model.Slug)) { slug = model.Title.Replace(" ", "-").ToLower(); } else { slug = model.Slug.Replace(" ", "-").ToLower(); } // Make sure title and slug are unique if (db.Pages.Any(x => x.Title == model.Title) || db.Pages.Any(x => x.Slug == slug)) { ModelState.AddModelError("", "That title or slug already exists."); return View(model); } // DTO the rest dto.Slug = slug; dto.Body = model.Body; dto.HasSidebar = model.HasSidebar; dto.Sorting = 100; // Save DTO db.Pages.Add(dto); db.SaveChanges(); } // Set TempData message TempData["SM"] = "You have added a new page!"; // Redirect return RedirectToAction("AddPage"); }
public Boolean ActivePage(string id) { PageDetail page = new PageDetail(); page = this.DeGetPage(id); page.IsActive = true; if (UpdatePage(page)) { return(true); } return(false); }
public Boolean UpdatePage(PageDetail page) { try { _context.Entry(page).State = Microsoft.EntityFrameworkCore.EntityState.Modified; _context.SaveChanges(); return(true); } catch { return(false); } }
private static void fitAHorse(PageDetail curr, Horse ahorse) { if (curr.depthNotYetUsed == Config.Constants.PageHeight) { ahorse.positionOnPage.where = EntryLocationOnPage.FirstEntryOnPage; } curr.secondAndNextHorses.Add(ahorse); curr.runningDepth = curr.runningDepth + ahorse.height; curr.depthNotYetUsed = curr.bottom - curr.runningDepth; ahorse.positionOnPage.leftspaceatEnd = curr.depthNotYetUsed; curr.entryCount = curr.entryCount + 1; ahorse.pgno = curr.pgNum; }
private static void fitHeaderWithFirstHorse(PageDetail curr, HeaderAndFirstHorse hf) { if (curr.depthNotYetUsed == Config.Constants.PageHeight) { hf.firstHorse.positionOnPage.where = EntryLocationOnPage.FirstEntryOnPage; } curr.isthereAheader = true; curr.addHeaderAndFirstHorse(hf); curr.runningDepth = curr.runningDepth + hf.header.height + hf.firstHorse.height; curr.depthNotYetUsed = curr.bottom - curr.runningDepth; hf.firstHorse.positionOnPage.leftspaceatEnd = curr.depthNotYetUsed; curr.entryCount = curr.entryCount + 2; hf.firstHorse.pgno = curr.pgNum; }
private static void saveAPagebyshrinkLastHorseOfCard(List <PageDetail> pages) { Horse lastHorseOftheCard = pages.Last().secondAndNextHorses.Last(); PageDetail pgBeforeLast = pages[pages.Count - 2]; if (isSqueezable(pgBeforeLast.depthNotYetUsed, lastHorseOftheCard.height)) { Debug.WriteLine("saving last page by shrinking in saveAPagebyshrinkLastHorseOfCard"); pgBeforeLast.secondAndNextHorses.Last().positionOnPage.where = EntryLocationOnPage.MiddleEntryOnPage; shrinkHorsesOnPagetoAddaHorse(pgBeforeLast, lastHorseOftheCard, lastHorseOftheCard.height, pgBeforeLast.depthNotYetUsed); //markLastHorseOnPage(pgBeforeLast); //override firstEntryOnPage to lastEntryonPage pages.RemoveAt(pages.Count - 1); } }
private void shrinkFitAHorseAtTheBottom(PageDetail curr, Horse ahorse) // if needed only <= 10 dots { if (curr.depthNotYetUsed == Constants.PageHeight) { ahorse.positionOnPage.where = EntryLocationOnPage.FirstEntryOnPage; } ahorse.newHeight = ahorse.height - (ahorse.height - curr.depthNotYetUsed); curr.secondAndNextHorses.Add(ahorse); curr.runningDepth = Constants.PageHeight; curr.depthNotYetUsed = curr.bottom - curr.runningDepth; ahorse.positionOnPage.leftspaceatEnd = curr.depthNotYetUsed; curr.entryCount = curr.entryCount + 1; ahorse.pgno = curr.pgNum; ahorse.positionOnPage.where = EntryLocationOnPage.LastEntryOnPage; }
/// <summary> /// Gets the specified page. /// </summary> /// <param name="pageId">The unique ID of the page</param> /// <param name="info">The desired verbosity of the XML</param> /// <returns>A Page containing the root XML of the page</returns> public Page GetPage(string pageId, PageDetail info = PageDetail.All) { if (string.IsNullOrEmpty(pageId)) { return(null); } onenote.GetPageContent(pageId, out var xml, (PageInfo)info, XMLSchema.xs2013); if (!string.IsNullOrEmpty(xml)) { return(new Page(XElement.Parse(xml))); } return(null); }
public HttpResponseMessage Update([FromBody] PageDetail item) { try { repository.Update(item, "update"); return(Request.CreateResponse(HttpStatusCode.OK)); } catch (UnauthorizedAccessException) { return(Request.CreateResponse(HttpStatusCode.Unauthorized)); } catch (Exception) { return(Request.CreateResponse(HttpStatusCode.InternalServerError)); } }
/// <summary> /// Add new page /// </summary> /// <param name="detail">Page</param> /// <returns>Saved page with new ID</returns> public PageDetail Add(PageDetail detail) { if (!Security.IsAuthorizedTo(BlogEngine.Core.Rights.CreateNewPages)) { throw new System.UnauthorizedAccessException(); } var page = new Page(); if (Save(page, detail)) { return(ToJsonDetail(page)); } return(null); }
/// <summary> /// Add new page /// </summary> /// <param name="detail">Page</param> /// <returns>Saved page with new ID</returns> public PageDetail Add(PageDetail detail) { if (!Security.IsAuthorizedTo(Rights.CreateNewPages)) { throw new UnauthorizedAccessException(); } var page = new Page(); if (Save(page, detail)) { return(Json.GetPageDetail(page)); } return(null); }
public PageDetail GetPageDetailsByIndex(int FileID, int PageID) { PageDetail list = null; try { //using (var db = new Cubicle_EntityEntities()) //{ // list = db.PageDetails.Where(p => p.FileID == FileID && p.PageIndex == PageID).FirstOrDefault(); //} } catch (Exception ex) { //bool false = BusinessLogicExceptionHandler.HandleException(ref ex); } return list; }
// GET: Admin/Pages/DeletePage/id public ActionResult DeletePage(int id) { using (DefaultConnection db = new DefaultConnection()) { // Get the page PageDetail dto = db.Pages.Find(id); // Remove the page db.Pages.Remove(dto); // Save db.SaveChanges(); } // Redirect return RedirectToAction("Index"); }
public Boolean CreatePage(PageDetail page) { try { DateTime today = DateTime.Today; page.StartDate = today; page.IsActive = true; page.Id = DateTime.Now.ToString("yymmssfff"); _context.PageDetails.Add(page); _context.SaveChanges(); return(true); } catch { return(false); } }
public IActionResult UpdatePage(GetPageDetails form) { string res = ""; if (form.DefImage != null) { res = AddFile.AddImage(form.DefImage, form.Id); } var result = new List <String>(); if (form.Image != null) { result = AddFile.AddImages(form.Image, form.Id); } PageDetail det = new PageDetail(); det.Id = form.Id; det.Topic = form.Topic; det.SubTopic = form.SubTopic; det.Type = form.Type; det.Dis1 = form.Dis1; det.Dis2 = form.Dis2; det.Dis3 = form.Dis3; det.IsActive = true; det.District = form.District; det.Town = form.Town; det.Location = form.location; if (form.DefImage != null) { det.DefImage = res; } try { var im = _service.AddImageName(result, form.Id); var up = _service.UpdatePage(det); if (up && im) { return(Ok()); } return(BadRequest()); } catch { return(BadRequest()); } }