private List <PageTag> WriteColumn(DataColumn column, Thrift.SchemaElement tse, IDataTypeHandler dataTypeHandler, int maxRepetitionLevel, int maxDefinitionLevel) { var pages = new List <PageTag>(); /* * Page header must preceeed actual data (compressed or not) however it contains both * the uncompressed and compressed data size which we don't know! This somehow limits * the write efficiency. */ using (var ms = new MemoryStream()) { Thrift.PageHeader dataPageHeader = _footer.CreateDataPage(column.TotalCount); //chain streams together so we have real streaming instead of wasting undefraggable LOH memory using (GapStream pageStream = DataStreamFactory.CreateWriter(ms, _compressionMethod, true)) { using (var writer = new BinaryWriter(pageStream, Encoding.UTF8, true)) { if (maxRepetitionLevel > 0) { WriteLevels(writer, column.RepetitionLevels, maxRepetitionLevel); } if (maxDefinitionLevel > 0) { WriteLevels(writer, column.DefinitionLevels, maxDefinitionLevel); } dataTypeHandler.Write(tse, writer, column.DefinedData); writer.Flush(); } pageStream.Flush(); //extremely important to flush the stream as some compression algorithms don't finish writing dataPageHeader.Uncompressed_page_size = (int)pageStream.Position; } dataPageHeader.Compressed_page_size = (int)ms.Position; //write the header in int headerSize = _thriftStream.Write(dataPageHeader); ms.Position = 0; ms.CopyTo(_stream); var dataTag = new PageTag { HeaderMeta = dataPageHeader, HeaderSize = headerSize }; pages.Add(dataTag); } return(pages); }
internal static void CreateNewPage(PageTag tag) { point++; if (current != null) { current.Hide(); } int index = (int)tag; if (nav_buff[index] == null) { CreateNewPageA(tag); } current = nav_buff[index]; ln[point] = current; #if phone float x = 0; float y = 0; if (Component.screenX > Component.screenY) { x += 45; } else { y += 23; } current.Create(parent, new RawRectangleF(x, y, (float)Component.screenX, (float)Component.screenY)); #else current.Create(parent, new RawRectangleF(0, 0, (float)Component.screenX, (float)Component.screenY)); #endif ThreadManage.UpdateUI = true; }
private List <PageTag> WriteColumn(DataColumn column, Thrift.SchemaElement tse, IDataTypeHandler dataTypeHandler, int maxRepetitionLevel, int maxDefinitionLevel) { var pages = new List <PageTag>(); /* * Page header must preceeed actual data (compressed or not) however it contains both * the uncompressed and compressed data size which we don't know! This somehow limits * the write efficiency. */ using (var ms = new MemoryStream()) { Thrift.PageHeader dataPageHeader = _footer.CreateDataPage(column.TotalCount); //chain streams together so we have real streaming instead of wasting undefraggable LOH memory using (PositionTrackingStream pps = DataStreamFactory.CreateWriter(ms, _compressionMethod)) { using (var writer = new BinaryWriter(pps)) { if (column.HasRepetitions) { throw new NotImplementedException(); } if (column.HasDefinitions) { WriteLevels(writer, column.DefinitionLevels, maxDefinitionLevel); } dataTypeHandler.Write(tse, writer, column.DefinedData); } dataPageHeader.Uncompressed_page_size = (int)pps.Position; } dataPageHeader.Compressed_page_size = (int)ms.Position; //write the hader in int headerSize = _thriftStream.Write(dataPageHeader); ms.Position = 0; ms.CopyTo(_stream); var dataTag = new PageTag { HeaderMeta = dataPageHeader, HeaderSize = headerSize }; pages.Add(dataTag); } return(pages); }
private static void CreatePageTagRef(int pId, int tId, WikiContext context) { PageTag pt = new PageTag(); pt.PageId = pId; pt.TagId = tId; context.PageTags.Add(pt); context.SaveChanges(); }
/// <summary> /// Clones the page tags. /// </summary> /// <param name="pageTag">The page tag.</param> /// <param name="newPage">The new page.</param> private void ClonePageTags(PageTag pageTag, PageProperties newPage) { var newPageHtmlControl = new PageTag { Page = newPage, Tag = pageTag.Tag }; repository.Save(newPageHtmlControl); }
public PageTag CreateNewPageTag(PageProperties page = null, Tag tag = null) { var entity = new PageTag(); PopulateBaseFields(entity); entity.Page = page ?? CreateNewPageProperties(); entity.Tag = tag ?? CreateNewTag(); return(entity); }
public static void CreateNewPage(PageTag tag) { point++; if (current != null) { current.Hide(); } int index = (int)tag; if (nav_buff[index] == null) { CreateNewPageA(tag); } current = nav_buff[index]; ln[point] = current; current.Create(parent, new Thickness(0, 0, Component.screenX, Component.screenY)); }
public async Task <PageTagDto> Handle(Command request, CancellationToken cancellationToken) { var pageTag = new PageTag { Title = request.Title }; _context.PageTags.Add(pageTag); var success = await _context.SaveChangesAsync() > 0; if (success) { var toReturn = _mapper.Map <PageTag, PageTagDto>(pageTag); return(toReturn); } throw new Exception("Problem saving changes"); }
public async Task <int> CreateAsync(int profileId, CreatePageInputModel input, string path) { var page = new Page() { Name = input.Name, Description = input.Description, Email = input.Email, Phone = input.Phone, ProfileId = profileId, }; if (input.Image?.Length > 0) { page.ImageId = await this.imagesService.CreateAsync(input.Image, path); } await this.pagesRepository.AddAsync(page); await this.pagesRepository.SaveChangesAsync(); if (input.Tags?.Count() > 0) { foreach (var tag in input.Tags.Distinct()) { var tagId = await this.tagsService.GetIdAsync(tag); var pageTag = new PageTag() { TagId = tagId, PageId = page.Id, }; await this.pageTagsRepository.AddAsync(pageTag); } await this.pageTagsRepository.SaveChangesAsync(); } return(page.Id); }
private static void CreateOverviewPage(WikiContext db) { Tag t1 = new Tag(); t1.Name = "WikiCore"; t1.Color = 1; db.Tags.Add(t1); Page p = new Page(); p.Title = "Start"; p.Content = "Welcome to __WikiCore__.\r\n\r\nWikiCore is a modest, small and fast Wiki featuring [MarkDown](https://daringfireball.net/projects/markdown/) editing.\r\n\r\nUnlike regular Wikis pages are organized with tags.\r\n\r\nPlease report Bugs in a [GitHub-Issue](https://github.com/philphilphil/WikiCore/issues)."; db.Pages.Add(p); var pt = new PageTag { Tag = t1, Page = p }; db.PageTags.Add(pt); db.SaveChanges(); }
static void CreateNewPageA(PageTag tag) { switch (tag) { case PageTag.main: nav_buff[(int)PageTag.main] = new MainEx(); break; case PageTag.partial: nav_buff[(int)PageTag.partial] = new PartialNav(); break; case PageTag.page_m: nav_buff[(int)PageTag.page_m] = new PageNav_m(); break; case PageTag.nav: break; case PageTag.search: break; case PageTag.videopage: nav_buff[(int)PageTag.videopage] = new VideoPage(); break; case PageTag.player: nav_buff[(int)PageTag.player] = new Player(); break; case PageTag.playerEx: nav_buff[(int)PageTag.playerEx] = new PlayerEx(); break; } }
/// <summary> /// Saves the page tags. /// </summary> /// <param name="page">The page.</param> /// <param name="tags">The tags.</param> /// <param name="newCreatedTags">The new created tags.</param> public void SavePageTags(PageProperties page, IList<string> tags, out IList<Tag> newCreatedTags) { var trimmedTags = new List<string>(); if (tags != null) { foreach (var tag in tags) { trimmedTags.Add(tag.Trim()); } } newCreatedTags = new List<Tag>(); Tag tagAlias = null; // Tags merge: IList<PageTag> pageTags = unitOfWork.Session .QueryOver<PageTag>() .Where(t => !t.IsDeleted && t.Page.Id == page.Id) .JoinQueryOver<Tag>(t => t.Tag, JoinType.InnerJoin) .Where(t => !t.IsDeleted) .List<PageTag>(); // Remove deleted tags: for (int i = pageTags.Count - 1; i >= 0; i--) { string tag = null; tag = trimmedTags.FirstOrDefault(s => s.ToLower() == pageTags[i].Tag.Name.ToLower()); if (tag == null) { unitOfWork.Session.Delete(pageTags[i]); } } // Add new tags: List<string> tagsInsert = new List<string>(); foreach (string tag in trimmedTags) { PageTag existPageTag = pageTags.FirstOrDefault(pageTag => pageTag.Tag.Name.ToLower() == tag.ToLower()); if (existPageTag == null) { tagsInsert.Add(tag); } } if (tagsInsert.Count > 0) { // Get existing tags: IList<Tag> existingTags = unitOfWork.Session.QueryOver(() => tagAlias) .Where(t => !t.IsDeleted) .Where(Restrictions.In(Projections.Property(() => tagAlias.Name), tagsInsert)) .List<Tag>(); foreach (string tag in tagsInsert) { PageTag pageTag = new PageTag(); pageTag.Page = page; Tag existTag = existingTags.FirstOrDefault(t => t.Name.ToLower() == tag.ToLower()); if (existTag != null) { pageTag.Tag = existTag; } else { Tag newTag = new Tag(); newTag.Name = tag; unitOfWork.Session.SaveOrUpdate(newTag); newCreatedTags.Add(newTag); pageTag.Tag = newTag; } unitOfWork.Session.SaveOrUpdate(pageTag); } } }
/// <summary> /// Saves the page tags. /// </summary> /// <param name="page">The page.</param> /// <param name="tags">The tags.</param> /// <param name="newCreatedTags">The new created tags.</param> public void SavePageTags(PageProperties page, IList <string> tags, out IList <Tag> newCreatedTags) { var trimmedTags = new List <string>(); if (tags != null) { foreach (var tag in tags) { trimmedTags.Add(tag.Trim()); } } // remove tags who are equal (tags are case insensitive) for (int i = 0; i < trimmedTags.Count; i++) { for (int j = i + 1; j < trimmedTags.Count; j++) { if (i != j && trimmedTags[i].ToLowerInvariant() == trimmedTags[j].ToLowerInvariant()) { trimmedTags.RemoveAt(j); --j; } } } newCreatedTags = new List <Tag>(); Tag tagAlias = null; // Tags merge: IList <PageTag> pageTags = unitOfWork.Session .QueryOver <PageTag>() .Where(t => !t.IsDeleted && t.Page.Id == page.Id) .JoinQueryOver <Tag>(t => t.Tag, JoinType.InnerJoin) .Where(t => !t.IsDeleted) .List <PageTag>(); // All page tag list var finalTagList = new List <PageTag>(); // Remove deleted tags: for (int i = pageTags.Count - 1; i >= 0; i--) { string tag = null; tag = trimmedTags.FirstOrDefault(s => s.ToLower() == pageTags[i].Tag.Name.ToLower()); if (tag == null) { var tagToRemove = pageTags[i]; UpdateModifiedInformation(tagToRemove); unitOfWork.Session.Delete(tagToRemove); finalTagList.Add(tagToRemove); } } // Add new tags: List <string> tagsInsert = new List <string>(); foreach (string tag in trimmedTags) { PageTag existPageTag = pageTags.FirstOrDefault(pageTag => pageTag.Tag.Name.ToLower() == tag.ToLower()); if (existPageTag == null) { tagsInsert.Add(tag); } } if (tagsInsert.Count > 0) { // Get existing tags: IList <Tag> existingTags = unitOfWork.Session.QueryOver(() => tagAlias) .Where(t => !t.IsDeleted) .Where(Restrictions.In(Projections.Property(() => tagAlias.Name), tagsInsert)) .List <Tag>(); foreach (string tag in tagsInsert) { PageTag pageTag = new PageTag(); pageTag.Page = page; Tag existTag = existingTags.FirstOrDefault(t => t.Name.ToLower() == tag.ToLower()); if (existTag != null) { pageTag.Tag = existTag; } else { Tag newTag = new Tag(); newTag.Name = tag; unitOfWork.Session.SaveOrUpdate(newTag); newCreatedTags.Add(newTag); pageTag.Tag = newTag; } UpdateModifiedInformation(pageTag); unitOfWork.Session.SaveOrUpdate(pageTag); finalTagList.Add(pageTag); } } page.PageTags = finalTagList; }
/// <summary> /// Saves the page tags. /// </summary> /// <param name="page">The page.</param> /// <param name="tags">The tags.</param> /// <param name="newCreatedTags">The new created tags.</param> public void SavePageTags(PageProperties page, IList<string> tags, out IList<Tag> newCreatedTags) { var trimmedTags = new List<string>(); if (tags != null) { foreach (var tag in tags) { trimmedTags.Add(tag.Trim()); } } // remove tags who are equal (tags are case insensitive) for (int i = 0; i < trimmedTags.Count; i++) { for (int j = i + 1; j < trimmedTags.Count; j++) { if (i != j && trimmedTags[i].ToLowerInvariant() == trimmedTags[j].ToLowerInvariant()) { trimmedTags.RemoveAt(j); --j; } } } newCreatedTags = new List<Tag>(); Tag tagAlias = null; // Tags merge: IList<PageTag> pageTags = unitOfWork.Session .QueryOver<PageTag>() .Where(t => !t.IsDeleted && t.Page.Id == page.Id) .JoinQueryOver<Tag>(t => t.Tag, JoinType.InnerJoin) .Where(t => !t.IsDeleted) .List<PageTag>(); // All page tag list var finalTagList = new List<PageTag>(); // Remove deleted tags: for (int i = pageTags.Count - 1; i >= 0; i--) { string tag = null; tag = trimmedTags.FirstOrDefault(s => s.ToLower() == pageTags[i].Tag.Name.ToLower()); if (tag == null) { var tagToRemove = pageTags[i]; UpdateModifiedInformation(tagToRemove); unitOfWork.Session.Delete(tagToRemove); finalTagList.Add(tagToRemove); } } // Add new tags: List<string> tagsInsert = new List<string>(); foreach (string tag in trimmedTags) { PageTag existPageTag = pageTags.FirstOrDefault(pageTag => pageTag.Tag.Name.ToLower() == tag.ToLower()); if (existPageTag == null) { tagsInsert.Add(tag); } } if (tagsInsert.Count > 0) { // Get existing tags: IList<Tag> existingTags = unitOfWork.Session.QueryOver(() => tagAlias) .Where(t => !t.IsDeleted) .Where(Restrictions.In(Projections.Property(() => tagAlias.Name), tagsInsert)) .List<Tag>(); foreach (string tag in tagsInsert) { PageTag pageTag = new PageTag(); pageTag.Page = page; Tag existTag = existingTags.FirstOrDefault(t => t.Name.ToLower() == tag.ToLower()); if (existTag != null) { pageTag.Tag = existTag; } else { Tag newTag = new Tag(); newTag.Name = tag; unitOfWork.Session.SaveOrUpdate(newTag); newCreatedTags.Add(newTag); pageTag.Tag = newTag; } UpdateModifiedInformation(pageTag); unitOfWork.Session.SaveOrUpdate(pageTag); finalTagList.Add(pageTag); } } page.PageTags = finalTagList; }
private void UpdateModifiedInformation(PageTag mediaTag) { UpdateModifiedInformation(mediaTag.Page); }
/// <summary> /// Saves the page tags. /// </summary> /// <param name="page">The page.</param> /// <param name="tags">The tags.</param> /// <param name="newCreatedTags">The new created tags.</param> public void SavePageTags(PageProperties page, IList <string> tags, out IList <Tag> newCreatedTags) { var trimmedTags = new List <string>(); if (tags != null) { foreach (var tag in tags) { trimmedTags.Add(tag.Trim()); } } newCreatedTags = new List <Tag>(); Tag tagAlias = null; // Tags merge: IList <PageTag> pageTags = unitOfWork.Session .QueryOver <PageTag>() .Where(t => !t.IsDeleted && t.Page.Id == page.Id) .JoinQueryOver <Tag>(t => t.Tag, JoinType.InnerJoin) .Where(t => !t.IsDeleted) .List <PageTag>(); // Remove deleted tags: for (int i = pageTags.Count - 1; i >= 0; i--) { string tag = null; tag = trimmedTags.FirstOrDefault(s => s.ToLower() == pageTags[i].Tag.Name.ToLower()); if (tag == null) { UpdateModifiedInformation(pageTags[i]); unitOfWork.Session.Delete(pageTags[i]); } } // Add new tags: List <string> tagsInsert = new List <string>(); foreach (string tag in trimmedTags) { PageTag existPageTag = pageTags.FirstOrDefault(pageTag => pageTag.Tag.Name.ToLower() == tag.ToLower()); if (existPageTag == null) { tagsInsert.Add(tag); } } if (tagsInsert.Count > 0) { // Get existing tags: IList <Tag> existingTags = unitOfWork.Session.QueryOver(() => tagAlias) .Where(t => !t.IsDeleted) .Where(Restrictions.In(Projections.Property(() => tagAlias.Name), tagsInsert)) .List <Tag>(); foreach (string tag in tagsInsert) { PageTag pageTag = new PageTag(); pageTag.Page = page; Tag existTag = existingTags.FirstOrDefault(t => t.Name.ToLower() == tag.ToLower()); if (existTag != null) { pageTag.Tag = existTag; } else { Tag newTag = new Tag(); newTag.Name = tag; unitOfWork.Session.SaveOrUpdate(newTag); newCreatedTags.Add(newTag); pageTag.Tag = newTag; } UpdateModifiedInformation(pageTag); unitOfWork.Session.SaveOrUpdate(pageTag); } } }
private List <PageTag> WriteColumn(DataColumn column, Thrift.SchemaElement tse, IDataTypeHandler dataTypeHandler, int maxRepetitionLevel, int maxDefinitionLevel) { var pages = new List <PageTag>(); /* * Page header must preceeed actual data (compressed or not) however it contains both * the uncompressed and compressed data size which we don't know! This somehow limits * the write efficiency. */ using (var ms = new MemoryStream()) { Thrift.PageHeader dataPageHeader = _footer.CreateDataPage(column.Data.Length); //chain streams together so we have real streaming instead of wasting undefraggable LOH memory using (GapStream pageStream = DataStreamFactory.CreateWriter(ms, _compressionMethod, _compressionLevel, true)) { using (var writer = new BinaryWriter(pageStream, Encoding.UTF8, true)) { if (column.RepetitionLevels != null) { WriteLevels(writer, column.RepetitionLevels, column.RepetitionLevels.Length, maxRepetitionLevel); } Array data = column.Data; if (maxDefinitionLevel > 0) { data = column.PackDefinitions(maxDefinitionLevel, out int[] definitionLevels, out int definitionLevelsLength, out int nullCount); //last chance to capture null count as null data is compressed now column.Statistics.NullCount = nullCount; try { WriteLevels(writer, definitionLevels, definitionLevelsLength, maxDefinitionLevel); } finally { if (definitionLevels != null) { ArrayPool <int> .Shared.Return(definitionLevels); } } } else { //no defitions means no nulls column.Statistics.NullCount = 0; } dataTypeHandler.Write(tse, writer, data, column.Statistics); writer.Flush(); } pageStream.Flush(); //extremely important to flush the stream as some compression algorithms don't finish writing pageStream.MarkWriteFinished(); dataPageHeader.Uncompressed_page_size = (int)pageStream.Position; } dataPageHeader.Compressed_page_size = (int)ms.Position; //write the header in dataPageHeader.Data_page_header.Statistics = column.Statistics.ToThriftStatistics(dataTypeHandler, _schemaElement); int headerSize = _thriftStream.Write(dataPageHeader); ms.Position = 0; ms.CopyTo(_stream); var dataTag = new PageTag { HeaderMeta = dataPageHeader, HeaderSize = headerSize }; pages.Add(dataTag); } return(pages); }
/// <summary> /// Save page manage model /// </summary> /// <param name="model"></param> /// <returns></returns> public ResponseModel SavePageManageModel(PageManageModel model) { Page relativePage; ResponseModel response; var page = GetById(model.Id); #region Edit Page if (page != null) { var pageLog = new PageLogManageModel(page); page.Title = model.Title; page.PageTemplateId = model.PageTemplateId; page.FileTemplateId = model.FileTemplateId; page.Status = model.Status; //Set content & caption base on status if (model.Status == (int)PageEnums.PageStatusEnums.Draft) { page.ContentWorking = model.Content; page.CaptionWorking = model.Caption; } else { page.Content = model.Content; page.Caption = model.Caption; } var currentTags = page.PageTags.Select(t => t.TagId).ToList(); foreach (var id in currentTags.Where(id => model.Tags == null || !model.Tags.Contains(id))) { _pageTagRepository.Delete(page.Id, id); } if (model.Tags != null && model.Tags.Any()) { foreach (var tagId in model.Tags) { if (currentTags.All(n => n != tagId)) { var pageTag = new PageTag { PageId = page.Id, TagId = tagId }; _pageTagRepository.Insert(pageTag); } } } page.StartPublishingDate = model.StartPublishingDate; page.EndPublishingDate = model.EndPublishingDate; //Parse friendly url page.FriendlyUrl = string.IsNullOrWhiteSpace(model.FriendlyUrl) ? model.Title.ToUrlString() : model.FriendlyUrl.ToUrlString(); //Get page record order relativePage = GetById(model.RelativePageId); if (relativePage != null) { /* * If position is not changed, donot need to update order of relative pages * If position is changed, check if position is before or after and update the record other of all relative pages */ var relativePages = Fetch(p => p.Id != page.Id && relativePage.ParentId.HasValue ? p.ParentId == relativePage.ParentId : p.ParentId == null) .OrderBy(p => p.RecordOrder); if (model.Position == (int)PageEnums.PositionEnums.Before) { if (page.RecordOrder > relativePage.RecordOrder || relativePages.Any(p => p.RecordOrder > page.RecordOrder && p.RecordOrder < relativePage.RecordOrder)) { page.RecordOrder = relativePage.RecordOrder; var query = string.Format( "Update Pages set RecordOrder = RecordOrder + 1 Where {0} And RecordOrder >= {1}", relativePage.ParentId.HasValue ? string.Format(" ParentId = {0}", relativePage.ParentId) : "ParentId Is NULL", relativePage.RecordOrder); _pageRepository.ExcuteSql(query); } } else { if (page.RecordOrder < relativePage.RecordOrder || relativePages.Any(p => p.RecordOrder < page.RecordOrder && p.RecordOrder > relativePage.RecordOrder)) { page.RecordOrder = relativePage.RecordOrder + 1; var query = string.Format( "Update Pages set RecordOrder = RecordOrder + 1 Where {0} And RecordOrder > {1}", relativePage.ParentId.HasValue ? string.Format(" ParentId = {0}", relativePage.ParentId) : "ParentId Is NULL", relativePage.RecordOrder); _pageRepository.ExcuteSql(query); } } } page.ParentId = model.ParentId; response = HierarchyUpdate(page); if (response.Success) { _clientMenuServices.SavePageToClientMenu(page.Id); _pageLogServices.SavePageLog(pageLog); } return(response.SetMessage(response.Success ? _localizedResourceServices.T("AdminModule:::Pages:::Messages:::UpdateSuccessfully:::Update page successfully.") : _localizedResourceServices.T("AdminModule:::Pages:::Messages:::UpdateFailure:::Update page failed. Please try again later."))); } #endregion page = new Page { Title = model.Title, Status = model.Status, Content = model.Content, Caption = model.Caption, ParentId = model.ParentId, RecordOrder = 0, PageTemplateId = model.PageTemplateId, FileTemplateId = model.FileTemplateId, FriendlyUrl = string.IsNullOrWhiteSpace(model.FriendlyUrl) ? model.Title.ToUrlString() : model.FriendlyUrl.ToUrlString() }; //Set content & caption base on status if (model.Status == (int)PageEnums.PageStatusEnums.Draft) { page.ContentWorking = model.Content; page.CaptionWorking = model.Caption; } //Get page record order relativePage = GetById(model.RelativePageId); if (relativePage != null) { if (model.Position == (int)PageEnums.PositionEnums.Before) { page.RecordOrder = relativePage.RecordOrder; var query = string.Format( "Update Pages set RecordOrder = RecordOrder + 1 Where {0} And RecordOrder >= {1}", relativePage.ParentId.HasValue ? string.Format(" ParentId = {0}", relativePage.ParentId) : "ParentId Is NULL", relativePage.RecordOrder); _pageRepository.ExcuteSql(query); } else { page.RecordOrder = relativePage.RecordOrder + 1; var query = string.Format( "Update Pages set RecordOrder = RecordOrder + 1 Where {0} And RecordOrder > {1}", relativePage.ParentId.HasValue ? string.Format(" ParentId = {0}", relativePage.ParentId) : "ParentId Is NULL", relativePage.RecordOrder); _pageRepository.ExcuteSql(query); } } response = HierarchyInsert(page); if (response.Success) { _clientMenuServices.SavePageToClientMenu(response.Data.ToInt()); } return(response.SetMessage(response.Success ? _localizedResourceServices.T("AdminModule:::Pages:::Messages:::CreateSuccessfully:::Create page successfully.") : _localizedResourceServices.T("AdminModule:::Pages:::Messages:::CreateFailure:::Create page failed. Please try again later."))); }