protected virtual void EnsureInheritanceRelations(TEditorModel model, EntitySchema entity, IGroupUnit <IContentStore> uow) { var currentRelations = uow.Repositories.GetParentRelations(entity.Id, FixedRelationTypes.DefaultRelationType).ToArray(); var selectedInheritFrom = model.InheritFrom.Where( x => x.Selected && !model.InheritFrom.Any(y => y.Selected && y.ParentValues.Contains(x.Value))).Select( x => HiveId.Parse(x.Value)).ToArray(); // Remove relations provided we're not going to remove the only relation to the root if (!selectedInheritFrom.Any()) { foreach (var relation in currentRelations.Where(x => x.SourceId != RootSchemaObj.Id)) { uow.Repositories.RemoveRelation(relation); } // Ensure we have a relation to the root schema uow.Repositories.AddRelation(new Relation(FixedRelationTypes.DefaultRelationType, RootSchemaObj.Id, entity.Id)); } else { foreach (var relation in currentRelations.Where(x => !selectedInheritFrom.Any(hiveId => hiveId == x.SourceId))) { uow.Repositories.RemoveRelation(relation); } } // Go through the selected inheritance and add a relation foreach (var id in selectedInheritFrom.Where(id => !currentRelations.Any(y => y.SourceId == id))) { uow.Repositories.AddRelation(new Relation(FixedRelationTypes.DefaultRelationType, id, entity.Id)); } }
public void Ctor_FromString_ToString_ThenTryParse_AreEqual(string inputUri, bool shouldThrow) { HiveId hiveId = HiveId.Empty; if (shouldThrow) { Assert.Throws <FormatException>(() => hiveId = new HiveId(inputUri)); } else { hiveId = new HiveId(inputUri); } var toStringAsUri = hiveId.ToString(HiveIdFormatStyle.AsUri); var toStringUriSafe = hiveId.ToString(HiveIdFormatStyle.UriSafe); var newHiveIdFromAsUri_ctor = new HiveId(toStringAsUri); AssertCompareHiveIds(hiveId, newHiveIdFromAsUri_ctor); var newHiveIdFromUriSafe_ctor = new HiveId(toStringUriSafe); AssertCompareHiveIds(hiveId, newHiveIdFromUriSafe_ctor); var newHiveIdFromAsUri_Parse = HiveId.Parse(toStringAsUri); AssertCompareHiveIds(hiveId, newHiveIdFromAsUri_Parse); var newHiveIdFromUriSafe_Parse = HiveId.Parse(toStringUriSafe); AssertCompareHiveIds(hiveId, newHiveIdFromUriSafe_Parse); }
public override IEnumerable <T> PerformExecuteMany <T>(QueryDescription query, ObjectBinder objectBinder) { var direction = query.From.HierarchyScope; var criteria = new ExamineQueryVisitor(Helper.ExamineManager).Visit(query.Criteria); // Include revision status in query var revisionCriteria = Helper.ExamineManager.CreateSearchCriteria(); revisionCriteria.Must().Field(FixedRevisionIndexFields.RevisionStatusAlias, query.From.RevisionStatus); IQuery finalQuery = null; // Only include the revision status if it was included in the query if (query.From.RevisionStatus != FromClause.RevisionStatusNotSpecified) { finalQuery = (criteria.ToString() == "") ? revisionCriteria.Compile() : ((LuceneSearchCriteria)criteria).Join((LuceneSearchCriteria)revisionCriteria.Compile(), BooleanClause.Occur.MUST);; } else { finalQuery = criteria; } IEnumerable <SearchResult> results = Helper.ExamineManager.Search(finalQuery); // Now apply an in-memory filter to account for some of the shortcomings of Lucene (e.g. no MAX support) var detailedResults = results.Select(x => new { EntityId = HiveId.Parse(x.Fields.GetValue(FixedIndexedFields.EntityId, HiveId.Empty.ToString())), RevisionId = HiveId.Parse(x.Fields.GetValue(FixedRevisionIndexFields.RevisionId, HiveId.Empty.ToString())), UtcStatusChanged = ExamineHelper.FromExamineDateTime(x.Fields, FixedIndexedFields.UtcStatusChanged), Result = x }).ToArray(); IEnumerable <SearchResult> maxByDate = detailedResults.GroupBy(x => x.EntityId, (key, matches) => matches.OrderByDescending(x => x.UtcStatusChanged).FirstOrDefault()) .Select(x => x.Result) .ToArray(); switch (query.ResultFilter.ResultFilterType) { case ResultFilterType.Any: return(new[] { maxByDate.Count() != 0 }.Cast <T>()); case ResultFilterType.Count: //this is weird but returns an integer return(new[] { maxByDate.Count() }.Cast <T>()); case ResultFilterType.Take: maxByDate = maxByDate.Take(query.ResultFilter.SelectorArgument); break; } if (typeof(T).IsAssignableFrom(query.ResultFilter.ResultType)) { return(maxByDate.Distinct().Select(node => FrameworkContext.TypeMappers.Map <T>(node))); } return(Enumerable.Empty <T>()); }
/// <summary> /// Recursive method for processing paths from a relations collection /// </summary> /// <param name="relations">The relations.</param> /// <param name="destinationId">The destination id.</param> /// <param name="pathIndex">Index of the path.</param> /// <param name="paths">The paths.</param> private static void ProcessPaths(IEnumerable <IRelationById> relations, HiveId destinationId, int pathIndex, ref List <List <HiveId> > paths) { // Add the current destination paths[pathIndex].Add(destinationId); // Process parents var parentIds = relations.Where(x => x.DestinationId == destinationId).Select(x => x.SourceId); var parentIdsLength = parentIds.Count(); if (parentIdsLength >= 1) { // If there is a fork, then clone the current path and add to the newly cloned path if (parentIdsLength > 1) { for (var i = 1; i < parentIdsLength; i++) { paths.Add(paths[pathIndex].Select(x => HiveId.Parse(x.ToString())).ToList()); ProcessPaths(relations, parentIds.ElementAt(i), paths.Count - 1, ref paths); } } // For the first index, just keep appending to the current path ProcessPaths(relations, parentIds.ElementAt(0), pathIndex, ref paths); } else { paths[pathIndex].Reverse(); } }
public override void SetModelValues(IDictionary <string, object> serializedVal) { if (serializedVal.ContainsKey("TemplateId")) { TemplateId = HiveId.Parse((string)serializedVal["TemplateId"]); } }
public ServiceStackSerialiser() { JsConfig.DateHandler = JsonDateHandler.ISO8601; JsConfig.ExcludeTypeInfo = false; JsConfig.IncludeNullValues = true; JsConfig.ThrowOnDeserializationError = true; JsConfig <HiveId> .DeSerializeFn = s => HiveId.Parse(s); }
public void NullConversion_EqualTo_Empty() { HiveId myItem = new HiveId((string)null); string serialized = myItem.ToString(); var myDeserializedItem = HiveId.Parse(serialized); Assert.AreEqual(myItem, HiveId.Empty); Assert.AreEqual(myDeserializedItem, HiveId.Empty); Assert.IsTrue(myItem.IsNullValueOrEmpty()); }
public override void SetModelValues(IDictionary <string, object> serializedVal) { if (serializedVal.ContainsKey("MediaId")) { MediaId = Guid.Parse(serializedVal["MediaId"].ToString()); } if (serializedVal.ContainsKey("Value")) { Value = HiveId.Parse((string)serializedVal["Value"]); } }
public static File GetFile <TProviderFilter>( this IFileStoreStep <File, TProviderFilter> fileStoreStep, string filename) where TProviderFilter : class, IProviderTypeFilter { Mandate.ParameterNotNullOrEmpty(filename, "filename"); using (var unit = fileStoreStep.HiveManager.OpenReader <TProviderFilter>()) { return(unit.Repositories.Get <File>(HiveId.Parse(filename))); } }
public void FromString_RootCharacterOnly_ParsesAsRoot() { // Arrange var input = "/"; // Act var hiveId = HiveId.Parse(input); // Assert Assert.AreEqual("/", (string)hiveId.Value); Assert.AreEqual(HiveIdValueTypes.String, hiveId.Value.Type); }
public override void Execute(TaskExecutionContext context) { if (!ConfigurationTaskContext.Parameters.ContainsKey("id")) { throw new ArgumentException("No id parameter supplied"); } var nodeId = HiveId.Parse(ConfigurationTaskContext.Parameters["id"]); if (nodeId.IsNullValueOrEmpty()) { throw new ArgumentException("The id parameter is not a valid HiveId"); } var includeChildren = ConfigurationTaskContext.Parameters.ContainsKey("includeChildren") && ConfigurationTaskContext.Parameters["includeChildren"].InvariantEquals("true"); using (var uow = ApplicationContext.Hive.OpenWriter <IContentStore>()) { var contentEntity = uow.Repositories.Revisions.GetLatestRevision <TypedEntity>(nodeId); if (contentEntity == null) { throw new ArgumentException(string.Format("No entity found for id: {0}", nodeId)); } //get its children recursively if (includeChildren) { // Get all descendents var descendents = uow.Repositories.GetDescendentRelations(nodeId, FixedRelationTypes.DefaultRelationType); foreach (var descendent in descendents) { //get the revision var revisionEntity = uow.Repositories.Revisions.GetLatestRevision <TypedEntity>(descendent.DestinationId); //publish it if it's already published or if the user has specified to publish unpublished content var publishRevision = revisionEntity.CopyToNewRevision(FixedStatusTypes.Published); uow.Repositories.Revisions.AddOrUpdate(publishRevision); } } //publish this node var toPublish = contentEntity.CopyToNewRevision(FixedStatusTypes.Published); uow.Repositories.Revisions.AddOrUpdate(toPublish); //save uow.Complete(); } }
public void ToUri(string parseFrom, string uriPart, string providerPart, object valuePart) { // Arrange var hiveId = HiveId.Parse(parseFrom); var urlToCheck = uriPart != null ? new Uri(uriPart, UriKind.Absolute) : null; // Act var asUri = hiveId.ToUri(); // Assert Assert.AreEqual(hiveId.ProviderGroupRoot, urlToCheck); Assert.AreEqual(providerPart, hiveId.ProviderId); Assert.AreEqual(valuePart.ToString(), hiveId.Value.ToString()); Assert.AreEqual(parseFrom, asUri.ToString()); }
public void ParsingDynamicLazyStringWorks() { var realId = new HiveId(Guid.NewGuid()); var lazyHiveId = new Lazy <object>(() => realId.ToString()); var hiveId = lazyHiveId.Value; var myBendy = new BendyObject(); myBendy["Item"] = new BendyObject(); myBendy["Item"].Value = new BendyObject(lazyHiveId); var asDynamic = (dynamic)myBendy; var result = HiveId.Parse(asDynamic.Item.Value); Assert.That(result, Is.EqualTo(realId)); }
public virtual ActionResult PublicAccess(HiveId id) { var model = new PublicAccessModel { Id = id }; using (var uow = BackOfficeRequestContext.Application.Hive.OpenReader <IContentStore>()) using (var securityUow = BackOfficeRequestContext.Application.Hive.OpenReader <ISecurityStore>()) { // Get all user groups var userGroups = securityUow.Repositories .GetChildren <UserGroup>(FixedRelationTypes.DefaultRelationType, Framework.Security.Model.FixedHiveIds.MemberGroupVirtualRoot) .OrderBy(x => x.Name); model.AvailableUserGroups = userGroups.Select(x => new SelectListItem { Text = x.Name, Value = x.Id.ToString() }); // Get links var currentPubclicAccessRelation = uow.Repositories.GetParentRelations(id, FixedRelationTypes.PublicAccessRelationType) .SingleOrDefault(); if (currentPubclicAccessRelation != null && currentPubclicAccessRelation.MetaData != null) { var metaData = currentPubclicAccessRelation.MetaData; if (metaData.Any(x => x.Key == "UserGroupIds")) { model.UserGroupIds = metaData.SingleOrDefault(x => x.Key == "UserGroupIds").Value.DeserializeJson <IEnumerable <HiveId> >(); } if (metaData.Any(x => x.Key == "LoginPageId")) { model.LoginPageId = HiveId.Parse(currentPubclicAccessRelation.MetaData.SingleOrDefault(x => x.Key == "LoginPageId").Value); } if (metaData.Any(x => x.Key == "ErrorPageId")) { model.ErrorPageId = HiveId.Parse(currentPubclicAccessRelation.MetaData.SingleOrDefault(x => x.Key == "ErrorPageId").Value); } } } return(View(model)); }
/// <summary> /// Processing deleting a property if the delete property button is pressed /// </summary> /// <param name="model"></param> private DocumentTypeProperty ProcessDeletingProperty(TEditorModel model) { if (ValueProvider.GetValue("submit.DeletePanel") != null) { var propertyId = ValueProvider.GetValue("submit.DeletePanel").AttemptedValue; var toDelete = model.Properties.Where(x => x.Id == HiveId.Parse(propertyId)).Single(); model.Properties.Remove(toDelete); Notifications.Add(new NotificationMessage( "DocumentType.PropertyDeleted.Message".Localize(this), "DocumentType.PropertyDeleted.Title".Localize(this), NotificationType.Success)); return(toDelete); } return(null); }
public override RevisionData GetValue(SearchResult source) { var id = HiveId.Parse(source.Fields[FixedRevisionIndexFields.RevisionId]); var revStatusId = Guid.Parse(source.Fields[FixedRevisionIndexFields.RevisionStatusId]); var revStatus = _helper.GetRevisionStatusType(revStatusId); if (revStatus == null) { throw new NotSupportedException("Could not find a revision status with status id: " + revStatusId.ToString("N")); } //NOTE: all dates on a revision will be the same correct? since they only exist one time. SD. return(new RevisionData(id, revStatus) { UtcCreated = ExamineHelper.FromExamineDateTime(source.Fields, FixedIndexedFields.UtcModified).Value, UtcModified = ExamineHelper.FromExamineDateTime(source.Fields, FixedIndexedFields.UtcModified).Value, UtcStatusChanged = ExamineHelper.FromExamineDateTime(source.Fields, FixedIndexedFields.UtcModified).Value }); }
/// <summary> /// Handles processing of deleting a tab if the delete tab button was clicked /// </summary> /// <param name="model"></param> /// <returns> /// Returns the attribute group def to delete /// </returns> private Tab ProcessDeletingTab(TEditorModel model) { if (ValueProvider.GetValue("submit.DeleteTab") != null) { var tabIdToDelete = ValueProvider.GetValue("submit.DeleteTab").AttemptedValue; //need to see if we have the generic properties tab available to move the properties on this tab to. //generally there should always be a generic properties tab, but we'll double check to make sure var genericTab = model.DefinedTabs.Where(x => x.Alias == FixedGroupDefinitions.GeneralGroupAlias). SingleOrDefault(); if (genericTab == null) { //we need to create the generic tab genericTab = BackOfficeRequestContext.Application.FrameworkContext.TypeMappers.Map <AttributeGroup, Tab>(FixedGroupDefinitions.GeneralGroup); model.DefinedTabs.Add(genericTab); } //now, we need to move all propertie that were on the tab that we want to delete to the generic tab var tabToDelete = model.DefinedTabs.Where(x => x.Id == HiveId.Parse(tabIdToDelete)).Single(); foreach (var prop in model.Properties.Where(x => x.TabId == tabToDelete.Id)) { prop.TabId = genericTab.Id; } //now we can finally delete the tab model.DefinedTabs.Remove(tabToDelete); Notifications.Add(new NotificationMessage( "DocumentType.TabDeleted.Message".Localize(this), "DocumentType.TabDeleted.Title".Localize(this), NotificationType.Success)); return(tabToDelete); } return(null); }
public static IRelationById FromXml(string xmlString) { var xml = XDocument.Parse(xmlString); var relation = new RelationById( HiveId.Parse(xml.Root.Attribute("sourceId").Value), HiveId.Parse(xml.Root.Attribute("destinationId").Value), // TODO: Might be something other than RelationType new RelationType(xml.Root.Attribute("type").Value), Int32.Parse(xml.Root.Attribute("ordinal").Value) ); if (xml.Root.HasElements) { foreach (var metaDatum in xml.Root.Elements("metaDatum")) { relation.MetaData.Add(new RelationMetaDatum(metaDatum.Attribute("key").Value, metaDatum.Attribute("value").Value)); } } return(relation); }
public static IEnumerable <File> GetFiles <TProviderFilter>( this IFileStoreStep <File, TProviderFilter> fileStoreStep, string containerName, bool includeDescendants = false) where TProviderFilter : class, IProviderTypeFilter { Mandate.ParameterNotNullOrEmpty(containerName, "containerName"); using (var unit = fileStoreStep.HiveManager.OpenReader <TProviderFilter>()) { var container = unit.Repositories.Get <File>(HiveId.Parse(containerName)); if (container == null) { throw new ArgumentOutOfRangeException(containerName, containerName, "Directory does not exist"); } if (includeDescendants) { var descendantIds = unit.Repositories.GetDescendantIds(container.Id, FixedRelationTypes.DefaultRelationType); return(unit.Repositories.Get <File>(false, descendantIds).Where(x => !x.IsContainer)); } var childIds = unit.Repositories.GetChildRelations(container.Id, FixedRelationTypes.DefaultRelationType).Select(x => x.DestinationId).ToArray(); return(unit.Repositories.Get <File>(false, childIds).Where(x => !x.IsContainer)); } }
public IEnumerable <T> PerformGetByQuery <T>(QueryDescription query, ObjectBinder objectBinder) { IEnumerable <T> results; var criteria = new MembershipQueryVisitor().Visit(query.Criteria); switch (criteria.Type) { case MembershipQueryType.ByUsername: results = PerformGetByUsername <T>((string)criteria.SearchValue, criteria.SearchTermPredicateType); break; case MembershipQueryType.ById: results = PerformGet <T>(true, new[] { HiveId.Parse(criteria.SearchValue.ToString()) }); break; case MembershipQueryType.ByEmail: results = PerformGetByEmail <T>((string)criteria.SearchValue, criteria.SearchTermPredicateType); break; case MembershipQueryType.Custom: return(criteria.QueryFilter(GetAllPagedData((m, i) => { int totalUsers; var result = m.GetAllUsers(i, 1000, out totalUsers); return new Tuple <MembershipUserCollection, int>(result, totalUsers); })) .Select(x => _frameworkContext.TypeMappers.Map <T>(x))); case MembershipQueryType.None: results = Enumerable.Empty <T>(); break; default: throw new ArgumentOutOfRangeException(); } return(results); }
/// <summary> /// Gets the URL of the file in the upload field with the given property alias on the TypedEntity with the given id, at the specified size /// </summary> /// <param name="url">The URL.</param> /// <param name="id">The id.</param> /// <param name="propertyAlias">The property alias.</param> /// <param name="size">The size (must be a prevalue on the upload property editor).</param> /// <returns></returns> public static string GetMediaUrl(this UrlHelper url, string id, string propertyAlias, int size) { return(url.GetMediaUrl(HiveId.Parse(id), propertyAlias, size)); }
/// <summary> /// Gets the URL of the file in the first upload field found on the TypedEntity with the given id /// </summary> /// <param name="url">The URL.</param> /// <param name="id">The id.</param> /// <returns></returns> public static string GetMediaUrl(this UrlHelper url, string id) { return(url.GetMediaUrl(HiveId.Parse(id))); }
public TUserType GetByProfileId(string profileId, bool userIsOnline = false) { return(GetByProfileId(HiveId.Parse(profileId), userIsOnline)); }
public TUserType GetById(string id, bool userIsOnline = false) { return(GetById(HiveId.Parse(id), userIsOnline)); }
public static CreatePackageResult CreatePackage(string id, PackageDefinition packageDef, IRebelApplicationContext appContext, HttpContextWrapper httpContext) { var result = new CreatePackageResult(); try { var packagePath = string.Format("~/App_Data/Rebel/CreatedPackages/{0}", id); var packageDir = new DirectoryInfo(httpContext.Server.MapPath(packagePath)); var packageFile = new FileInfo(Path.Combine(packageDir.FullName, "package.nupkg")); // Build package var builder = new PackageBuilder { Id = packageDef.Alias, Title = packageDef.Name, Version = new Version(packageDef.Version), Description = packageDef.Description, ProjectUrl = new Uri(packageDef.ProjectUrl) }; builder.Authors.Add(packageDef.Author); if (!string.IsNullOrWhiteSpace(packageDef.Tags)) { builder.Tags.AddRange(packageDef.Tags.Split(' ')); } if (!string.IsNullOrWhiteSpace(packageDef.LicenseUrl)) { builder.LicenseUrl = new Uri(packageDef.LicenseUrl); } using (var uow = appContext.Hive.OpenReader <IContentStore>()) { var uploadedFilesToPackage = new List <HiveId>(); // Content if (!packageDef.ContentNodeId.IsNullValueOrEmpty()) { var relationsToSerialize = new List <IRelationById>(); var nodesToSerialize = new List <TypedEntity>(); var contentNode = uow.Repositories.Get <TypedEntity>(packageDef.ContentNodeId); nodesToSerialize.Add(contentNode); var parentRelations = uow.Repositories.GetParentRelations(contentNode.Id, FixedRelationTypes.DefaultRelationType); relationsToSerialize.AddRange(parentRelations); if (packageDef.IncludeChildContentNodes) { var childrenRelations = uow.Repositories.GetDescendentRelations(contentNode.Id, FixedRelationTypes.DefaultRelationType); nodesToSerialize.AddRange(uow.Repositories.Get <TypedEntity>(true, childrenRelations.Select(x => x.DestinationId).ToArray())); relationsToSerialize.AddRange(childrenRelations); } foreach (var node in nodesToSerialize.Where(x => x.Attributes.Any(y => y.AttributeDefinition.AttributeType.RenderTypeProvider.InvariantEquals(CorePluginConstants.FileUploadPropertyEditorId)))) { var attributes = node.Attributes.Where(x => x.AttributeDefinition.AttributeType.RenderTypeProvider.InvariantEquals(CorePluginConstants.FileUploadPropertyEditorId)); uploadedFilesToPackage.AddRange(attributes.Where(x => x.Values["Value"] != null).Select(attribute => HiveId.Parse(attribute.Values["Value"].ToString()))); } CopySerializedObjectsToPackage(builder, appContext, nodesToSerialize, node => "Data/Content/" + node.Id.ToString().ToMd5() + ".json"); // Relations CopySerializedObjectsToPackage(builder, appContext, relationsToSerialize, relation => "Data/Content/Relations/" + relation.SourceId.ToString().ToMd5() + "-" + relation.DestinationId.ToString().ToMd5() + ".json"); } // Media if (!packageDef.MediaNodeId.IsNullValueOrEmpty()) { var relationsToSerialize = new List <IRelationById>(); var nodesToSerialize = new List <TypedEntity>(); var mediaNode = uow.Repositories.Get <TypedEntity>(packageDef.MediaNodeId); if (mediaNode != null) { nodesToSerialize.Add(mediaNode); var parentRelations = uow.Repositories.GetParentRelations(mediaNode.Id, FixedRelationTypes.DefaultRelationType); relationsToSerialize.AddRange(parentRelations); if (packageDef.IncludeChildContentNodes) { var childrenRelations = uow.Repositories.GetDescendentRelations(mediaNode.Id, FixedRelationTypes.DefaultRelationType); nodesToSerialize.AddRange(uow.Repositories.Get <TypedEntity>(true, childrenRelations.Select(x => x.DestinationId).ToArray())); relationsToSerialize.AddRange(childrenRelations); } } foreach (var node in nodesToSerialize.Where(x => x.Attributes.Any(y => y.AttributeDefinition.AttributeType.RenderTypeProvider.InvariantEquals(CorePluginConstants.FileUploadPropertyEditorId)))) { var attributes = node.Attributes.Where(x => x.AttributeDefinition.AttributeType.RenderTypeProvider.InvariantEquals(CorePluginConstants.FileUploadPropertyEditorId)); uploadedFilesToPackage.AddRange(attributes.Where(x => x.Values["Value"] != null).Select(attribute => HiveId.Parse(attribute.Values["Value"].ToString()))); } CopySerializedObjectsToPackage(builder, appContext, nodesToSerialize, node => "Data/Media/" + node.Id.ToString().ToMd5() + ".json"); // Relations CopySerializedObjectsToPackage(builder, appContext, relationsToSerialize, relation => "Data/Media/Relations/" + relation.SourceId.ToString().ToMd5() + "-" + relation.DestinationId.ToString().ToMd5() + ".json"); } // Files CopyFilesToPackage(builder, appContext, "storage://file-uploader", uploadedFilesToPackage, "Content/Content/Media/"); // Dictionary Items var dictionaryRelationsToSerialize = new List <IRelationById>(); var dictionaryItems = uow.Repositories.Get <TypedEntity>(true, packageDef.DictionaryItemIds.ToArray()); CopySerializedObjectsToPackage(builder, appContext, dictionaryItems, dictionaryItem => "Data/DictionaryItems/" + dictionaryItem.Id.ToString().ToMd5() + ".json"); foreach (var parentRelations in dictionaryItems.Select(dictionaryItem => uow.Repositories.GetParentRelations(dictionaryItem.Id))) { dictionaryRelationsToSerialize.AddRange(parentRelations); } CopySerializedObjectsToPackage(builder, appContext, dictionaryRelationsToSerialize, relation => "Data/DictionaryItems/Relations/" + relation.SourceId.ToString().ToMd5() + "-" + relation.DestinationId.ToString().ToMd5() + ".json"); // Doc Types var docTypeRelationsToSerialize = new List <IRelationById>(); var docTypes = uow.Repositories.Schemas.Get <EntitySchema>(true, packageDef.DocumentTypeIds.ToArray()); CopySerializedObjectsToPackage(builder, appContext, docTypes, docType => "Data/DocumentTypes/" + docType.Alias + ".json"); foreach (var parentRelations in docTypes.Select(docType => uow.Repositories.Schemas.GetParentRelations(docType.Id))) { docTypeRelationsToSerialize.AddRange(parentRelations); } CopySerializedObjectsToPackage(builder, appContext, docTypeRelationsToSerialize, relation => "Data/DocumentTypes/Relations/" + relation.SourceId.ToString().ToMd5() + "-" + relation.DestinationId.ToString().ToMd5() + ".json"); // Media Types var mediaTypeRelationsToSerialize = new List <IRelationById>(); var mediaTypes = uow.Repositories.Schemas.Get <EntitySchema>(true, packageDef.MediaTypeIds.ToArray()); CopySerializedObjectsToPackage(builder, appContext, mediaTypes, mediaType => "Data/MediaTypes/" + mediaType.Alias + ".json"); foreach (var parentRelations in mediaTypes.Select(mediaType => uow.Repositories.Schemas.GetParentRelations(mediaType.Id))) { mediaTypeRelationsToSerialize.AddRange(parentRelations); } CopySerializedObjectsToPackage(builder, appContext, mediaTypeRelationsToSerialize, relation => "Data/MediaTypes/Relations/" + relation.SourceId.ToString().ToMd5() + "-" + relation.DestinationId.ToString().ToMd5() + ".json"); // Data Types var dataTypes = uow.Repositories.Schemas.Get <AttributeType>(true, packageDef.DataTypeIds.ToArray()); CopySerializedObjectsToPackage(builder, appContext, dataTypes, dataType => "Data/DataTypes/" + dataType.Alias + ".json"); } // Templates CopyFilesToPackage(builder, appContext, "storage://templates", packageDef.TemplateIds, "Content/Views/"); // Partials CopyFilesToPackage(builder, appContext, "storage://partials", packageDef.PartialIds, "Content/Views/Partials/"); // Stylesheets CopyFilesToPackage(builder, appContext, "storage://stylesheets", packageDef.StylesheetIds, "Content/Content/Styles/"); // Scripts CopyFilesToPackage(builder, appContext, "storage://scripts", packageDef.ScriptIds, "Content/Scripts/"); // Macros CopyFilesToPackage(builder, appContext, "storage://macros", packageDef.MacroIds, "Content/App_Data/Rebel/Macros/", (file, packageBuilder) => { var macro = MacroSerializer.FromFile(file); if (macro.MacroType == "PartialView") { var macroParts = macro.SelectedItem.Split('-'); var areaName = macroParts.Length > 1 ? macroParts[0] : ""; var macroName = (areaName.IsNullOrWhiteSpace()) ? string.Join("", macroParts) : macroParts[1]; var relativePath = (areaName.IsNullOrWhiteSpace()) ? "~/Views/MacroPartials/" + macroName + ".cshtml" : "~/App_Plugins/Packages/" + areaName + "/Views/MacroPartials/" + macroName + ".cshtml"; var path = httpContext.Server.MapPath(relativePath); packageBuilder.Files.Add(new PhysicalPackageFile { SourcePath = path, TargetPath = "Content/Views/MacroPartials/" + macroName + ".cshtml" }); } }); // Languages var languages = appContext.Settings.Languages.Where(x => packageDef.LanguageIds.Contains(x.IsoCode)); CopySerializedObjectsToPackage(builder, appContext, languages, lang => "Data/Languages/" + lang.IsoCode + ".json"); // Misc files foreach (var file in packageDef.AdditionalFiles) { var cleanFile = "~/" + file.Replace('\\', '/').TrimStart('~', '/'); var cleanFilePath = httpContext.Server.MapPath(cleanFile); if (!File.Exists(cleanFilePath) && !Directory.Exists(cleanFilePath)) { continue; } var fileInfo = File.GetAttributes(cleanFilePath); var isDirectory = (fileInfo & FileAttributes.Directory) == FileAttributes.Directory; if (cleanFile.StartsWith("~/App_Plugins/Packages/" + packageDef.Alias + "/", true, CultureInfo.InvariantCulture)) { if (isDirectory) { CopyFolderToPackage(builder, appContext, httpContext.Server.MapPath("~/App_Plugins/Packages/" + packageDef.Alias + "/"), cleanFilePath, (rootPath, path) => path.TrimStart(rootPath).Replace('\\', '/')); } else { builder.Files.Add(new PhysicalPackageFile { SourcePath = httpContext.Server.MapPath(cleanFile), TargetPath = Regex.Replace(cleanFile, "^~/App_Plugins/Packages/" + packageDef.Alias + "/", "", RegexOptions.IgnoreCase) }); } } else if (cleanFile.StartsWith("~/Bin/", true, CultureInfo.InvariantCulture)) { if (isDirectory) { CopyFolderToPackage(builder, appContext, httpContext.Server.MapPath("~/Bin/"), cleanFilePath, (rootPath, path) => "lib/" + path.TrimStart(rootPath).Replace('\\', '/')); } else { builder.Files.Add(new PhysicalPackageFile { SourcePath = httpContext.Server.MapPath(cleanFile), TargetPath = "lib/" + Regex.Replace(cleanFile, "^~/Bin/", "", RegexOptions.IgnoreCase) }); } } else { if (isDirectory) { CopyFolderToPackage(builder, appContext, httpContext.Server.MapPath("~/"), cleanFilePath, (rootPath, path) => "Content/" + path.TrimStart(rootPath).Replace('\\', '/')); } else { builder.Files.Add(new PhysicalPackageFile { SourcePath = httpContext.Server.MapPath(cleanFile), TargetPath = "Content/" + Regex.Replace(cleanFile, "^~/", "", RegexOptions.IgnoreCase) }); } } } // Web.config if (!string.IsNullOrWhiteSpace(packageDef.Config)) { builder.Files.Add(new ByteArrayPackageFile { Contents = Encoding.UTF8.GetBytes(packageDef.Config), TargetPath = "Web.config" }); } // Write package to disc using (Stream stream = File.Create(packageFile.FullName)) { builder.Save(stream); } // If we've gotten this far, everything must have gone ok result.Success = true; } catch (global::System.Exception ex) { result.Success = false; result.ErrorMessage = ex.Message; } return(result); }
public override void ConfigureMappings() { #region MembershipUser -> TypedEntity this.CreateMap <MembershipUser, TypedEntity>(true) .CreateUsing(x => new TypedEntity()) .ForMember(x => x.Id, opt => opt.MapFrom(y => HiveId.Parse(y.ProviderUserKey.ToString()))) .MapMemberFrom(x => x.UtcCreated, x => x.CreationDate) .AfterMap((x, t) => { var textType = _attributeTypeRegistry.GetAttributeType(StringAttributeType.AliasValue); var readOnlyType = _attributeTypeRegistry.GetAttributeType(ReadOnlyAttributeType.AliasValue); var boolType = _attributeTypeRegistry.GetAttributeType(BoolAttributeType.AliasValue); var group = FixedGroupDefinitions.MemberDetails; //Need to line up the attribute name fields to match that of Member //TODO: We need to store our own version of the name as seperate data than what the underlying membership provider can store t.Attributes.SetValueOrAdd(new TypedAttribute(new AttributeDefinition(NodeNameAttributeDefinition.AliasValue, "Name") { AttributeGroup = group, AttributeType = textType }, x.UserName)); t.Attributes.SetValueOrAdd(new TypedAttribute(new AttributeDefinition(MemberSchema.UsernameAlias, "Username") { AttributeGroup = group, AttributeType = textType }, x.UserName)); t.Attributes.SetValueOrAdd(new TypedAttribute(new AttributeDefinition(MemberSchema.EmailAlias, "Email") { AttributeGroup = group, AttributeType = textType }, x.Email)); t.Attributes.SetValueOrAdd(new TypedAttribute(new AttributeDefinition(MemberSchema.PasswordQuestionAlias, "Password Question") { AttributeGroup = group, AttributeType = textType }, x.PasswordQuestion)); t.Attributes.SetValueOrAdd(new TypedAttribute(new AttributeDefinition(MemberSchema.CommentsAlias, "Comment") { AttributeGroup = group, AttributeType = textType }, x.Comment)); t.Attributes.SetValueOrAdd(new TypedAttribute(new AttributeDefinition(MemberSchema.IsApprovedAlias, "Is Approved") { AttributeGroup = group, AttributeType = boolType }, x.IsApproved)); t.Attributes.SetValueOrAdd(new TypedAttribute(new AttributeDefinition(MemberSchema.IsLockedOutAlias, "Is Locked Out") { AttributeGroup = group, AttributeType = boolType }, x.IsLockedOut)); t.Attributes.SetValueOrAdd(new TypedAttribute(new AttributeDefinition(MemberSchema.LastLockoutDateAlias, "Last Lockout Date") { AttributeGroup = group, AttributeType = readOnlyType }, new DateTimeOffset(x.LastLockoutDate))); t.Attributes.SetValueOrAdd(new TypedAttribute(new AttributeDefinition(MemberSchema.LastLoginDateAlias, "Last Login Date") { AttributeGroup = group, AttributeType = readOnlyType }, new DateTimeOffset(x.LastLoginDate))); t.Attributes.SetValueOrAdd(new TypedAttribute(new AttributeDefinition(MemberSchema.LastActivityDateAlias, "Last Activity Date") { AttributeGroup = group, AttributeType = readOnlyType }, new DateTimeOffset(x.LastActivityDate))); t.Attributes.SetValueOrAdd(new TypedAttribute(new AttributeDefinition(MemberSchema.LastPasswordChangeDateAlias, "Last Password Changed Date") { AttributeGroup = group, AttributeType = readOnlyType }, new DateTimeOffset(x.LastPasswordChangedDate))); t.Attributes.SetValueOrAdd(new TypedAttribute(new AttributeDefinition(MemberSchema.IsOnlineAlias, "Is Online") { AttributeGroup = group, AttributeType = readOnlyType }, x.IsOnline)); }); #endregion #region MembershipUser -> Member this.CreateMap <MembershipUser, Member>(true) .CreateUsing(x => new Member()) //TODO: We need to store our own version of the name as seperate data than what the underlying membership provider can store .MapMemberFrom(x => x.Name, x => x.UserName) .MapMemberFrom(x => x.Username, x => x.UserName) .MapMemberFrom(x => x.Comments, x => x.Comment) .MapMemberFrom(x => x.UtcCreated, x => x.CreationDate) .MapMemberFrom(x => x.LastPasswordChangeDate, x => x.LastPasswordChangedDate) .ForMember(x => x.Id, opt => opt.MapFrom(y => HiveId.Parse(y.ProviderUserKey.ToString()))); #endregion #region TypedEntity -> MembershipUser this.CreateMap <TypedEntity, MembershipUser>(true) //NOTE: This will NOT work for new entities, only existing ones since we'll never know the provider name .CreateUsing(x => new MembershipUser("", x.Attribute <string>(NodeNameAttributeDefinition.AliasValue), x.Id.Value.Value, x.Attribute <string>(MemberSchema.EmailAlias), x.Attribute <string>(MemberSchema.PasswordQuestionAlias), x.Attribute <string>(MemberSchema.CommentsAlias), x.Attribute <bool>(MemberSchema.IsApprovedAlias), x.Attribute <bool>(MemberSchema.IsLockedOutAlias), x.UtcCreated.UtcDateTime, x.Attribute <DateTimeOffset>(MemberSchema.LastLoginDateAlias).UtcDateTime, x.Attribute <DateTimeOffset>(MemberSchema.LastActivityDateAlias).UtcDateTime, x.Attribute <DateTimeOffset>(MemberSchema.LastPasswordChangeDateAlias).UtcDateTime, x.Attribute <DateTimeOffset>(MemberSchema.LastLockoutDateAlias).UtcDateTime )) //these are the only writable properties for MembershipUser .MapMemberFrom(x => x.Email, x => x.Attribute <string>(MemberSchema.EmailAlias)) .MapMemberFrom(x => x.Comment, x => x.Attribute <string>(MemberSchema.CommentsAlias)) .MapMemberFrom(x => x.IsApproved, x => x.Attribute <bool>(MemberSchema.IsApprovedAlias)) .MapMemberFrom(x => x.LastLoginDate, x => x.Attribute <DateTimeOffset>(MemberSchema.LastLoginDateAlias).UtcDateTime) .MapMemberFrom(x => x.LastActivityDate, x => x.Attribute <DateTimeOffset>(MemberSchema.LastActivityDateAlias).UtcDateTime); #endregion }
/// <summary> /// Imports serialized data from a package by checking if its valid /// and already exists. /// </summary> public SerializedDataImport ImportData() { //Check if data folder exists var absoluteDataFolderPath = Path.Combine(_absolutePackagePath, "Data"); if (!Directory.Exists(absoluteDataFolderPath)) { return(new SerializedDataImport()); } //Load serialized data and deserialize to objects for import var serialization = _context.Application.FrameworkContext.Serialization; SerializedDataImport dataImport = new SerializedDataImport(); var directories = new DirectoryInfo(absoluteDataFolderPath).GetDirectories("*", SearchOption.TopDirectoryOnly); foreach (var directory in directories) { var fileInfos = directory.GetFiles("*", SearchOption.AllDirectories); foreach (var fileInfo in fileInfos) { if (fileInfo.DirectoryName == null) { continue; } //Deserialize content if (fileInfo.DirectoryName.EndsWith("data\\content", StringComparison.InvariantCultureIgnoreCase)) { object o = serialization.FromStream(fileInfo.OpenRead(), typeof(TypedEntity)); dataImport.Entities.Add(new DeseriazliedDataResult(o, typeof(TypedEntity))); } else if (fileInfo.DirectoryName.EndsWith("data\\content\\relations", StringComparison.InvariantCultureIgnoreCase)) { object o = serialization.FromStream(fileInfo.OpenRead(), typeof(Relation)); dataImport.EntityRelations.Add(new DeseriazliedDataResult(o, typeof(Relation))); } //Deserialize DataTypes if (fileInfo.DirectoryName.EndsWith("data\\datatypes", StringComparison.InvariantCultureIgnoreCase)) { object o = serialization.FromStream(fileInfo.OpenRead(), typeof(AttributeType)); dataImport.AttributeTypes.Add(new DeseriazliedDataResult(o, typeof(AttributeType))); } //Deserialize Dictionary Items if (fileInfo.DirectoryName.EndsWith("data\\dictionaryitems", StringComparison.InvariantCultureIgnoreCase)) { object o = serialization.FromStream(fileInfo.OpenRead(), typeof(TypedEntity)); dataImport.Entities.Add(new DeseriazliedDataResult(o, typeof(TypedEntity))); } else if (fileInfo.DirectoryName.EndsWith("data\\dictionaryitems\\relations", StringComparison.InvariantCultureIgnoreCase)) { object o = serialization.FromStream(fileInfo.OpenRead(), typeof(Relation)); dataImport.EntityRelations.Add(new DeseriazliedDataResult(o, typeof(Relation))); } //Deserialize DocumentTypes if (fileInfo.DirectoryName.EndsWith("data\\documenttypes", StringComparison.InvariantCultureIgnoreCase)) { object o = serialization.FromStream(fileInfo.OpenRead(), typeof(EntitySchema)); dataImport.Schemas.Add(new DeseriazliedDataResult(o, typeof(EntitySchema))); } else if (fileInfo.DirectoryName.EndsWith("data\\documenttypes\\relations", StringComparison.InvariantCultureIgnoreCase)) { object o = serialization.FromStream(fileInfo.OpenRead(), typeof(Relation)); dataImport.SchemaRelations.Add(new DeseriazliedDataResult(o, typeof(Relation))); } //Deserialize Media if (fileInfo.DirectoryName.EndsWith("data\\media", StringComparison.InvariantCultureIgnoreCase)) { object o = serialization.FromStream(fileInfo.OpenRead(), typeof(TypedEntity)); dataImport.Entities.Add(new DeseriazliedDataResult(o, typeof(TypedEntity))); } else if (fileInfo.DirectoryName.EndsWith("data\\media\\relations", StringComparison.InvariantCultureIgnoreCase)) { object o = serialization.FromStream(fileInfo.OpenRead(), typeof(Relation)); dataImport.EntityRelations.Add(new DeseriazliedDataResult(o, typeof(Relation))); } //Deserialize MediaTypes if (fileInfo.DirectoryName.EndsWith("data\\mediatypes", StringComparison.InvariantCultureIgnoreCase)) { object o = serialization.FromStream(fileInfo.OpenRead(), typeof(EntitySchema)); dataImport.Schemas.Add(new DeseriazliedDataResult(o, typeof(EntitySchema))); } else if (fileInfo.DirectoryName.EndsWith("data\\mediatypes\\relations", StringComparison.InvariantCultureIgnoreCase)) { object o = serialization.FromStream(fileInfo.OpenRead(), typeof(Relation)); dataImport.SchemaRelations.Add(new DeseriazliedDataResult(o, typeof(Relation))); } //Deserialize Languages if (fileInfo.DirectoryName.EndsWith("data\\languages", StringComparison.InvariantCultureIgnoreCase)) { object o = serialization.FromStream(fileInfo.OpenRead(), typeof(LanguageElement)); dataImport.Languages.Add(new DeseriazliedDataResult(o, typeof(LanguageElement))); } } } //Check entities and relations CheckImportableEntities(dataImport); CheckImportableRelations(dataImport); //Open writer and save deserialized attribute types, schemas, entities and relations using (var uow = _context.Application.Hive.OpenWriter <IContentStore>()) { foreach (var attributeType in dataImport.AttributeTypes.Where(attributeType => attributeType.IsImportable)) { uow.Repositories.Schemas.AddOrUpdate(attributeType.DeserializedObject as AttributeType); } foreach (var schema in dataImport.Schemas.Where(schema => schema.IsImportable)) { uow.Repositories.Schemas.AddOrUpdate(schema.DeserializedObject as EntitySchema); } foreach (var schemaRelation in dataImport.SchemaRelations.Where(schema => schema.IsImportable)) { var relation = schemaRelation.DeserializedObject as IRelationById; uow.Repositories.Schemas.AddRelation(relation.SourceId, relation.DestinationId, relation.Type, relation.Ordinal, relation.MetaData.ToArray()); } foreach (var entity in dataImport.Entities.Where(entity => entity.IsImportable)) { uow.Repositories.AddOrUpdate(entity.DeserializedObject as TypedEntity); } foreach (var entityRelation in dataImport.EntityRelations.Where(schema => schema.IsImportable)) { var relation = entityRelation.DeserializedObject as IRelationById; uow.Repositories.AddRelation(relation.SourceId, relation.DestinationId, relation.Type, relation.Ordinal, relation.MetaData.ToArray()); } uow.Complete(); } // Regenerate any image thubnails var entitiesWithFiles = dataImport.Entities.Where(x => x.IsImportable) .Select(x => x.DeserializedObject as TypedEntity) .Where(x => x.Attributes .Any(y => y.AttributeDefinition.AttributeType.RenderTypeProvider.InvariantEquals(CorePluginConstants.FileUploadPropertyEditorId))); if (entitiesWithFiles.Any()) { // Get a list of attribute type ids that use the file uploader property editor var attributeTypeIds = entitiesWithFiles .SelectMany(x => x.Attributes .Where(y => y.AttributeDefinition.AttributeType.RenderTypeProvider.InvariantEquals(CorePluginConstants.FileUploadPropertyEditorId)) .Select(y => y.AttributeDefinition.AttributeType.Id)) .Distinct().ToArray(); using (var contentUow = _context.Application.Hive.OpenWriter <IContentStore>()) using (var fileUow = _context.Application.Hive.OpenWriter <IFileStore>(new Uri("storage://file-uploader/"))) { // Load attribute types from hive incase prevalues are different to those serialized (hive take presidence) var attributeTypes = contentUow.Repositories.Schemas.Get <AttributeType>(true, attributeTypeIds); foreach (var entity in entitiesWithFiles) { var uploadAttributes = entity.Attributes .Where(x => x.AttributeDefinition.AttributeType.RenderTypeProvider.InvariantEquals(CorePluginConstants.FileUploadPropertyEditorId)) .ToList(); foreach (var uploadAttribute in uploadAttributes) { var mediaId = uploadAttribute.Values["MediaId"].ToString(); var fileId = HiveId.Parse(uploadAttribute.Values["Value"].ToString()); if (!string.IsNullOrWhiteSpace(mediaId) && !fileId.IsNullValueOrEmpty()) { // Refetch the attribute type var attributeType = attributeTypes.SingleOrDefault(x => x.Id == uploadAttribute.AttributeDefinition.AttributeType.Id); var dataType = _context.Application.FrameworkContext.TypeMappers.Map <DataType>(attributeType); var preValue = dataType.GetPreValueModel() as dynamic; var sizes = preValue.Sizes; var file = fileUow.Repositories.Get <Rebel.Framework.Persistence.Model.IO.File>(fileId); if (file.IsImage()) { ContentExtensions.CreateThumbnails(fileUow, file, mediaId, sizes); } } } } fileUow.Complete(); } } if (dataImport.Languages.Any()) { // Persist the language entity var configFile = Path.Combine(_httpContext.Server.MapPath("~/App_Data/Rebel/Config"), "rebel.cms.languages.config"); var configXml = XDocument.Load(configFile); foreach (var language in dataImport.Languages) { var lang = language.DeserializedObject as LanguageElement; if (lang == null) { continue; } // Remove previous entry configXml.Descendants("language").Where(x => x.Attribute("isoCode").Value == lang.IsoCode).Remove(); // Add new entry configXml.Element("languages").Add(XElement.Parse(lang.ToXmlString())); language.IsImportable = true; language.ObjectId = new HiveId(lang.IsoCode.EncodeAsGuid()); } configXml.Save(configFile); } return(dataImport); }
public override void ConfigureMappings() { #region LinearHiveIndexOperation -> IndexOperation this.CreateMap <LinearHiveIndexOperation, IndexOperation>(true) .CreateUsing(x => new IndexOperation()) .MapMemberFrom(x => x.Item, x => new IndexItem { Fields = x.Fields, Id = x.Id.Value, ItemCategory = x.ItemCategory }) .MapMemberFrom(x => x.Operation, x => x.OperationType); #endregion #region SearchResult -> Relation Func <string, int> getOrdinal = (s) => { //need to safe parse the ordinal int ordinal; return(int.TryParse(s, out ordinal) ? ordinal : 0); }; this.CreateMap <SearchResult, IRelationById>() .CreateUsing(x => new RelationById( new HiveId(x.Fields[FixedRelationIndexFields.SourceId]), new HiveId(x.Fields[FixedRelationIndexFields.DestinationId]), new RelationType(x.Fields[FixedRelationIndexFields.RelationType]), getOrdinal(x.Fields[FixedIndexedFields.Ordinal]))) .AfterMap((s, t) => { //need to setup the metadata foreach (var m in s.Fields.Where(x => x.Key.StartsWith(FixedRelationIndexFields.MetadatumPrefix))) { t.MetaData.Add(new RelationMetaDatum(m.Key.Split('.')[1], m.Value)); } }); #endregion #region SearchResult -> Revision<TypedEntity> this.CreateMap <SearchResult, Revision <TypedEntity> >(true) .CreateUsing(x => new Revision <TypedEntity>()) .ForMember(x => x.MetaData, x => x.MapUsing(new SearchResultToRevisionData(_helper))) .MapMemberFrom(x => x.Item, Map <SearchResult, TypedEntity>) .AfterMap((s, t) => { }); #endregion #region SearchResult -> TypedEntity this.CreateMap(new SearchResultToTypedEntity(_helper, this), true) .CreateUsing(x => new TypedEntity()) .ForMember(x => x.Id, opt => opt.MapFrom(y => HiveId.Parse(y.Fields[FixedIndexedFields.EntityId]))) .AfterMap((s, t) => { ExamineHelper.SetEntityDatesFromSearchResult(t, s); }); #endregion #region SearchResult -> EntitySchema this.CreateMap <SearchResult, EntitySchema>() .CreateUsing(x => new EntitySchema()) .ForMember(x => x.Id, opt => opt.MapFrom(y => HiveId.Parse(y.Id))) .ForMember(x => x.Alias, opt => opt.MapFrom(y => y.Fields.GetValueAsString("Alias"))) .ForMember(x => x.Name, opt => opt.MapFrom(y => new LocalizedString(y.Fields.GetValueAsString("Name")))) .ForMember(x => x.SchemaType, opt => opt.MapFrom(y => y.Fields.GetValueAsString("SchemaType"))) .MapMemberFrom(x => x.XmlConfiguration, y => y.Fields.GetValueAsString("XmlConfiguration").IsNullOrWhiteSpace() ? new XDocument() : XDocument.Parse(y.Fields.GetValueAsString("XmlConfiguration"))) .AfterMap((s, t) => { var groups = _helper.GetMappedGroupsForSchema(t.Id); foreach (var g in groups) { t.AttributeGroups.Add(g.Item1); } //find all attribute definitions with this schema id var attDefs = _helper.GetAttributeDefinitionsForSchema(t.Id); //declare a local cache for found attribute types, see notes below. var attributeTypeCache = new List <AttributeType>(); foreach (var a in attDefs) { //ok, we've already looked up the groups and its very IMPORTANT that the same group object is applied //to the AttributeDefinition otherwise problems will occur. //So instead of re-coding the mapping operation for SearchResult -> AttributeDefinition, we'll re-use our //current Map, but we'll ensure that it does not go re-lookup the AttributeGroup. We can do this by removing //the FixedIndexFieldsGroupId item from the fields so it won't think it has a gruop, then we will add the group back. //NOTE: this procedure can be avoided when the ScopedCache is turned on in the ExamineHelper, however // i don't feel that relying on that mechanism is robust. Once we implement an ExamineDataContext to do // the lookup caching instead, then this could be removed. var groupId = a.Fields.ContainsKey(FixedIndexedFields.GroupId) ? a.Fields[FixedIndexedFields.GroupId] : null; a.Fields.Remove(FixedIndexedFields.GroupId); //similar to the above, it is very IMPORTANT that the same AttributeType object is applied to each //of the AttributeDefinitions if they reference the same alias/id. In order to acheive this we will //remove the FixedIndexedFields.AttributeTypeId from the fields so the mapping operation thinks that it //doesn't have one assigned and won't go look it up. We will store the AttributeTypeId locally and look //it up manually and create a local cache to re-assign to the AttributeDefinitions. //NOTE: this procedure can be avoided when the ScopedCache is turned on in the ExamineHelper, however // i don't feel that relying on that mechanism is robust. Once we implement an ExamineDataContext to do // the lookup caching instead, then this could be removed. var attributeTypeId = a.Fields.ContainsKey(FixedIndexedFields.AttributeTypeId) ? a.Fields[FixedIndexedFields.AttributeTypeId] : null; a.Fields.Remove(FixedIndexedFields.AttributeTypeId); //now do the mapping var mappedAttributeDefinition = Map <SearchResult, AttributeDefinition>(a); //now see if we can find the already found group by id if (groupId != null) { var group = t.AttributeGroups.SingleOrDefault(x => x.Id.Value.ToString() == groupId); mappedAttributeDefinition.AttributeGroup = group; } //now see if we can find an attribute type from our cache or from the helper if (attributeTypeId != null) { var attType = attributeTypeCache.SingleOrDefault(x => x.Id.Value.ToString() == attributeTypeId); if (attType == null) { //its not in our cache so look it up and add to cache attType = _helper.PerformGet <AttributeType>(true, LuceneIndexer.IndexNodeIdFieldName, new HiveId(attributeTypeId)) .SingleOrDefault(); if (attType != null) { attributeTypeCache.Add(attType); } } mappedAttributeDefinition.AttributeType = attType; } //add the attribute definition t.AttributeDefinitions.Add(mappedAttributeDefinition); } ExamineHelper.SetEntityDatesFromSearchResult(t, s); }); #endregion #region SearchResult -> AttributeDefinition this.CreateMap <SearchResult, AttributeDefinition>() .CreateUsing(x => new AttributeDefinition()) .ForMember(x => x.Id, opt => opt.MapFrom(y => HiveId.Parse(y.Id))) .ForMember(x => x.Alias, opt => opt.MapFrom(y => y.Fields.GetValueAsString("Alias"))) .ForMember(x => x.Name, opt => opt.MapFrom(y => new LocalizedString(y.Fields.GetValueAsString("Name")))) .ForMember(x => x.Description, opt => opt.MapFrom(y => new LocalizedString(y.Fields.GetValueAsString("Description")))) .ForMember(x => x.RenderTypeProviderConfigOverride, opt => opt.MapFrom(y => y.Fields.GetValueAsString("RenderTypeProviderConfigOverride"))) .AfterMap((s, t) => { //need to do Ordinal safely int ordinal; if (int.TryParse(s.Fields.GetValueAsString(FixedIndexedFields.Ordinal), out ordinal)) { t.Ordinal = ordinal; } //lookup the attribute def & group from hive for the attribute def if (s.Fields.ContainsKey(FixedIndexedFields.GroupId)) { var group = _helper.PerformGet <AttributeGroup>(true, LuceneIndexer.IndexNodeIdFieldName, new HiveId(s.Fields[FixedIndexedFields.GroupId])); t.AttributeGroup = group.SingleOrDefault(); } if (s.Fields.ContainsKey(FixedIndexedFields.AttributeTypeId)) { var attType = _helper.PerformGet <AttributeType>(true, LuceneIndexer.IndexNodeIdFieldName, new HiveId(s.Fields[FixedIndexedFields.AttributeTypeId])); t.AttributeType = attType.SingleOrDefault(); } ExamineHelper.SetEntityDatesFromSearchResult(t, s); }); #endregion #region SearchResult -> AttributeGroup this.CreateMap <SearchResult, AttributeGroup>() .CreateUsing(x => new AttributeGroup()) .ForMember(x => x.Id, opt => opt.MapFrom(y => HiveId.Parse(y.Id))) .ForMember(x => x.Alias, opt => opt.MapFrom(y => y.Fields.GetValueAsString("Alias"))) .ForMember(x => x.Name, opt => opt.MapFrom(y => new LocalizedString(y.Fields.GetValueAsString("Name")))) .AfterMap((s, t) => { //need to do Ordinal safely int ordinal; if (int.TryParse(s.Fields.GetValueAsString(FixedIndexedFields.Ordinal), out ordinal)) { t.Ordinal = ordinal; } ExamineHelper.SetEntityDatesFromSearchResult(t, s); }); #endregion #region SearchResult -> AttributeType this.CreateMap <SearchResult, AttributeType>() .CreateUsing(x => new AttributeType()) .ForMember(x => x.Id, opt => opt.MapFrom(y => HiveId.Parse(y.Id))) .ForMember(x => x.Alias, opt => opt.MapFrom(y => y.Fields.GetValueAsString("Alias"))) .ForMember(x => x.Name, opt => opt.MapFrom(y => new LocalizedString(y.Fields.GetValueAsString("Name")))) .ForMember(x => x.Description, opt => opt.MapFrom(y => new LocalizedString(y.Fields.GetValueAsString("Description")))) .ForMember(x => x.RenderTypeProvider, opt => opt.MapFrom(y => y.Fields.GetValueAsString("RenderTypeProvider"))) .ForMember(x => x.RenderTypeProviderConfig, opt => opt.MapFrom(y => y.Fields.GetValueAsString("RenderTypeProviderConfig"))) .AfterMap((s, t) => { //need to do Ordinal safely int ordinal; if (int.TryParse(s.Fields.GetValueAsString(FixedIndexedFields.Ordinal), out ordinal)) { t.Ordinal = ordinal; } //create the serialization type based on the FQN stored in the index var serializationType = Type.GetType(s.Fields[FixedIndexedFields.SerializationType]); if (serializationType == null) { //this shouldn't happen but in case something has changed in the index, then we'll default to string t.SerializationType = new StringSerializationType(); } else { t.SerializationType = (IAttributeSerializationDefinition)Activator.CreateInstance(serializationType); } ExamineHelper.SetEntityDatesFromSearchResult(t, s); }); #endregion #region EntitySchema -> NestedHiveIndexOperation //create a map that supports inheritance as we don't want to create a map for all EntitySchemas this.CreateMap <EntitySchema, NestedHiveIndexOperation>(true) .CreateUsing(x => new NestedHiveIndexOperation()) .ForMember(x => x.Entity, opt => opt.MapFrom(y => y)) .ForMember(x => x.OperationType, opt => opt.MapFrom(y => IndexOperationType.Add)) .ForMember(x => x.Id, opt => opt.MapFrom(y => new Lazy <string>(() => y.Id.Value.ToString()))) //need to lazy load as it might not be set .ForMember(x => x.ItemCategory, opt => opt.MapFrom(y => typeof(EntitySchema).Name)) .ForMember(x => x.Fields, opt => opt.MapUsing <EntitySchemaToIndexFields>()) .AfterMap((s, t) => { //Create sub operations for each of its children (both attribute definitions and groups) foreach (var op in s.AttributeDefinitions.Select(Map <AttributeDefinition, NestedHiveIndexOperation>) .Concat(s.AttributeGroups.Select(Map <AttributeGroup, NestedHiveIndexOperation>))) { //NOTE: we need to add this schema id to the fields otherwise we would just add this in the mapping operation for AttributeDefinition if it exposed the schema it belonged to op.Fields.Add(FixedIndexedFields.SchemaId, new Lazy <ItemField>(() => new ItemField(s.Id.Value.ToString()))); //need to add it as lazy since the id might not exist yet t.SubIndexOperations.Add(op); } //get the relations s.MapRelations(t, this); }); #endregion #region TypedEntity -> NestedHiveIndexOperation this.CreateMap(new TypedEntityToIndexOperation(this), true); #endregion #region Relation -> NestedHiveIndexOperation this.CreateMap <IReadonlyRelation <IRelatableEntity, IRelatableEntity>, NestedHiveIndexOperation>(true) .CreateUsing(x => new NestedHiveIndexOperation()) .ForMember(x => x.Entity, opt => opt.MapFrom(y => y)) .ForMember(x => x.OperationType, opt => opt.MapFrom(y => IndexOperationType.Add)) //need to lazy load as ids might not be set yet, this is also a 'composite' id of Source,Dest,Type .ForMember(x => x.Id, opt => opt.MapFrom(y => new Lazy <string>(y.GetCompositeId))) .ForMember(x => x.ItemCategory, opt => opt.MapFrom(y => "Relation")) .ForMember(x => x.Fields, opt => opt.MapUsing <RelationToIndexFields>()); #endregion #region Revision<TypedEntity> -> NestedHiveIndexOperation this.CreateMap <Revision <TypedEntity>, NestedHiveIndexOperation>(true) .CreateUsing(x => new NestedHiveIndexOperation()) .AfterMap((s, t) => { //first, map the underlying TypedEntity var op = Map <TypedEntity, NestedHiveIndexOperation>(s.Item); //map all of the properties... we don't have to explicitly declare a map for this, the engine will automatically just create one Map(op, t); //ensure the rest of the revision data _helper.EnsureRevisionDataForIndexOperation(s, t); }); #endregion #region AttributeType -> NestedHiveIndexOperation //create a map that supports inheritance as we don't want to create a map for all EntitySchemas... this would also be impossible this.CreateMap <AttributeType, NestedHiveIndexOperation>(true) .CreateUsing(x => new NestedHiveIndexOperation()) .ForMember(x => x.Entity, opt => opt.MapFrom(y => y)) .ForMember(x => x.OperationType, opt => opt.MapFrom(y => IndexOperationType.Add)) .ForMember(x => x.Id, opt => opt.MapFrom(y => new Lazy <string>(() => y.Id.Value.ToString()))) //need to lazy load as it might not be set .ForMember(x => x.ItemCategory, opt => opt.MapFrom(y => typeof(AttributeType).Name)) .ForMember(x => x.Fields, opt => opt.MapUsing(new AttributeTypeToIndexFields(_helper))); #endregion #region AttributeGroup -> NestedHiveIndexOperation //create a map that supports inheritance as we don't want to create a map for all EntitySchemas... this would also be impossible this.CreateMap <AttributeGroup, NestedHiveIndexOperation>(true) .CreateUsing(x => new NestedHiveIndexOperation()) .ForMember(x => x.Entity, opt => opt.MapFrom(y => y)) .ForMember(x => x.OperationType, opt => opt.MapFrom(y => IndexOperationType.Add)) .ForMember(x => x.Id, opt => opt.MapFrom(y => new Lazy <string>(() => y.Id.Value.ToString()))) //need to lazy load as it might not be set .ForMember(x => x.ItemCategory, opt => opt.MapFrom(y => typeof(AttributeGroup).Name)) .ForMember(x => x.Fields, opt => opt.MapUsing <AttributeGroupToIndexFields>()); #endregion #region AttributeDefinition -> NestedHiveIndexOperation //create a map that supports inheritance as we don't want to create a map for all EntitySchemas... this would also be impossible this.CreateMap <AttributeDefinition, NestedHiveIndexOperation>(true) .CreateUsing(x => new NestedHiveIndexOperation()) .ForMember(x => x.Entity, opt => opt.MapFrom(y => y)) .ForMember(x => x.OperationType, opt => opt.MapFrom(y => IndexOperationType.Add)) .ForMember(x => x.Id, opt => opt.MapFrom(y => new Lazy <string>(() => y.Id.Value.ToString()))) //need to lazy load as it might not be set .ForMember(x => x.ItemCategory, opt => opt.MapFrom(y => typeof(AttributeDefinition).Name)) .ForMember(x => x.Fields, opt => opt.MapUsing <AttributeDefinitionToIndexFields>()) .AfterMap((s, t) => { //Add sub operation for it's AttributeType t.SubIndexOperations.Add(Map <AttributeType, NestedHiveIndexOperation>(s.AttributeType)); }); #endregion }
protected override void PerformMap(SearchResult source, TypedEntity target, MappingExecutionScope scope) { base.PerformMap(source, target, scope); //lookup the document type from examine var entitySchema = _helper.PerformGet <EntitySchema>(true, LuceneIndexer.IndexNodeIdFieldName, new HiveId(source.Fields[FixedIndexedFields.SchemaId])).ToArray(); if (!entitySchema.Any()) { throw new DataException("Could not find an item in the index with id " + source.Fields[FixedIndexedFields.SchemaId]); } target.EntitySchema = entitySchema.SingleOrDefault(); var ancestorSchemaIds = _helper.PeformGetParentRelations(target.EntitySchema.Id, FixedRelationTypes.DefaultRelationType). SelectRecursive( x => _helper.PeformGetParentRelations(x.SourceId, FixedRelationTypes.DefaultRelationType)). ToArray(); if (ancestorSchemaIds.Any()) { var ancestorSchemas = _helper.PerformGet <EntitySchema>(true, LuceneIndexer.IndexNodeIdFieldName, ancestorSchemaIds.Select(x => x.SourceId).ToArray()).ToArray(); target.EntitySchema = new CompositeEntitySchema(target.EntitySchema, ancestorSchemas); } // We'll check this later if an attribute definition doesn't exist on the current schema so we can check parents var compositeSchema = target.EntitySchema as CompositeEntitySchema; //now we need to build up the attributes, get all attribute aliases and go from there foreach (var f in source.Fields.Where(x => x.Key.StartsWith(FixedAttributeIndexFields.AttributePrefix) && x.Key.EndsWith(FixedAttributeIndexFields.AttributeAlias))) { //get the alias for the attribute var alias = f.Value; //now we can use this alias to find the rest of the attributes values //var nameKey = FixedAttributeIndexFields.AttributePrefix + alias + "." + FixedAttributeIndexFields.AttributeName; var valueKey = FixedAttributeIndexFields.AttributePrefix + alias; var idKey = FixedAttributeIndexFields.AttributePrefix + alias + "." + FixedAttributeIndexFields.AttributeId; //find the associated definition in the schema and set it var def = target.EntitySchema.AttributeDefinitions.SingleOrDefault(x => x.Alias == alias); // Check if the definition is "inherited" because it exists on a parent schema if (def == null) { if (compositeSchema != null) { def = compositeSchema.AllAttributeDefinitions.SingleOrDefault(x => x.Alias == alias); } } if (def != null) { //get all values for the current value (as some attributes can store multiple named values, not just one) var values = source.Fields .Where(k => k.Key.StartsWith(valueKey) //&& !k.Key.EndsWith(FixedAttributeIndexFields.AttributeName) && !k.Key.EndsWith(FixedAttributeIndexFields.AttributeAlias) && !k.Key.EndsWith(FixedAttributeIndexFields.AttributeId)); var attribute = new TypedAttribute(def) { Id = HiveId.Parse(source.Fields[idKey]) }; foreach (var v in values) { //get the value name, it could be blank if this attribute is only storing one value var valueName = v.Key.Substring(valueKey.Length, v.Key.Length - valueKey.Length); if (valueName.IsNullOrWhiteSpace()) { //if its a null value name, then set the dynamic value attribute.DynamicValue = GetRealValueFromField(def.AttributeType.SerializationType, v.Value); } else { //if its a named value, then set it by name attribute.Values.Add(valueName.TrimStart('.'), GetRealValueFromField(def.AttributeType.SerializationType, v.Value)); } } target.Attributes.SetValueOrAdd(attribute); } } }
/// <summary> /// TEMPORARY method to install all data required for dev data set excluding all of the core data /// </summary> /// <param name="manager"></param> /// <param name="framework"></param> internal void InstallDevDataset(IHiveManager manager, IFrameworkContext framework) { //a list of all the schemas we've added var schemas = new List <EntitySchema>(); using (var writer = manager.OpenWriter <IContentStore>()) { //create all of the document type's and their associated tabs first //foreach (var d in _devDataSet.ContentData.Select(x => x.DocumentType).DistinctBy(x => x.Id)) foreach (var d in DocTypes) { var schema = new EntitySchema(d.Alias, d.Name); schema.Id = d.Id; schema.AttributeGroups.AddRange( framework.TypeMappers.Map <IEnumerable <Tab>, IEnumerable <AttributeGroup> >( d.DefinedTabs)); writer.Repositories.Schemas.AddOrUpdate(schema); schemas.Add(schema); foreach (var parentId in d.InheritFrom.Where(x => x.Selected).Select(x => HiveId.Parse(x.Value))) { writer.Repositories.AddRelation(new Relation(FixedRelationTypes.DefaultRelationType, parentId, d.Id)); } } writer.Complete(); } using (var writer = manager.OpenWriter <IContentStore>()) { //now we can hopefully just map the schema and re-save it so it maps all properties //foreach (var d in _devDataSet.ContentData.Select(x => x.DocumentType).DistinctBy(x => x.Id)) foreach (var d in DocTypes) { var schema = framework.TypeMappers.Map <DocumentTypeEditorModel, EntitySchema>(d); writer.Repositories.Schemas.AddOrUpdate(schema); } writer.Complete(); } using (var writer = manager.OpenWriter <IContentStore>()) { //now just map the entire content entities and persist them, since the attribution definitions and attribution //groups are created these should just map up to the entities in the database. var mappedCollection = framework .TypeMappers.Map <IEnumerable <ContentEditorModel>, IEnumerable <Revision <TypedEntity> > >(ContentData) .ToArray(); mappedCollection.ForEach(x => x.MetaData.StatusType = FixedStatusTypes.Published); //var allAttribTypes = AllAttribTypes(mappedCollection); writer.Repositories.Revisions.AddOrUpdate(mappedCollection); writer.Complete(); } ////now that the data is in there, we need to setup some structure... probably a nicer way to do this but whatevs... its just for testing //using (var writer = mappingGroup.CreateReadWriteUnitOfWork()) //{ // var homeSchema = writer.ReadWriteRepository.GetEntity<EntitySchema>(HiveId.ConvertIntToGuid(1045)); // var contentSchema = writer.ReadWriteRepository.GetEntity<EntitySchema>(HiveId.ConvertIntToGuid(1045)); // var faqContainerSchema = writer.ReadWriteRepository.GetEntity<EntitySchema>(HiveId.ConvertIntToGuid(1055)); // var faqCatSchema = writer.ReadWriteRepository.GetEntity<EntitySchema>(HiveId.ConvertIntToGuid(1056)); // var faqSchema = writer.ReadWriteRepository.GetEntity<EntitySchema>(HiveId.ConvertIntToGuid(1057)); //} }