public override ICollection GetSnapshot(ICollectionPersister persister)
		{
			EntityMode entityMode = Session.EntityMode;

			List<object> clonedList = new List<object>(list.Count);
			foreach (object current in list)
			{
				object deepCopy = persister.ElementType.DeepCopy(current, entityMode, persister.Factory);
				clonedList.Add(deepCopy);
			}
			return clonedList;
		}
		// For a one-to-many, a <bag> is not really a bag;
		// it is *really* a set, since it can't contain the
		// same element twice. It could be considered a bug
		// in the mapping dtd that <bag> allows <one-to-many>.

		// Anyway, here we implement <set> semantics for a
		// <one-to-many> <bag>!

		public override IEnumerable GetDeletes(ICollectionPersister persister, bool indexIsFormula)
		{
			IType elementType = persister.ElementType;
			EntityMode entityMode = Session.EntityMode;
			List<object> deletes = new List<object>();
			IList sn = (IList) GetSnapshot();
			int i = 0;
			foreach (object old in sn)
			{
				bool found = false;
				if (bag.Count > i && elementType.IsSame(old, bag[i++], entityMode))
				{
					//a shortcut if its location didn't change!
					found = true;
				}
				else
				{
					foreach (object newObject in bag)
					{
						if (elementType.IsSame(old, newObject, entityMode))
						{
							found = true;
							break;
						}
					}
				}
				if (!found)
				{
					deletes.Add(old);
				}
			}
			return deletes;
		}
		protected internal virtual string RenderLoggableString(object value, ISessionFactoryImplementor factory)
		{
			IList list = new List<object>();
			IType elemType = GetElementType(factory);
			IEnumerable iter = GetElementsIterator(value);
			foreach (object o in iter)
				list.Add(elemType.ToLoggableString(o, factory));

			return CollectionPrinter.ToString(list);
		}
		private ICollectionInitializer GetSubselectInitializer(object key, ISessionImplementor session)
		{
			if (!IsSubselectLoadable)
			{
				return null;
			}

			IPersistenceContext persistenceContext = session.PersistenceContext;

			SubselectFetch subselect =
				persistenceContext.BatchFetchQueue.GetSubselect(new EntityKey(key, OwnerEntityPersister, session.EntityMode));

			if (subselect == null)
			{
				return null;
			}
			else
			{
				// Take care of any entities that might have
				// been evicted!
				List<EntityKey> keysToRemove = new List<EntityKey>(subselect.Result.Count);
				foreach (EntityKey entityKey in subselect.Result)
				{
					if (!persistenceContext.ContainsEntity(entityKey))
					{
						keysToRemove.Add(entityKey);
					}
				}
				subselect.Result.RemoveAll(keysToRemove);

				// Run a subquery loader
				return CreateSubselectInitializer(subselect, session);
			}
		}
		public override string ToLoggableString(object value, ISessionFactoryImplementor factory)
		{
			if (value == null)
			{
				return "null";
			}
			Array array = (Array) value;
			int length = array.Length;
			IList list = new List<object>(length);
			IType elemType = GetElementType(factory);
			for (int i = 0; i < length; i++)
			{
				list.Add(elemType.ToLoggableString(array.GetValue(i), factory));
			}
			return CollectionPrinter.ToString(list);
		}
Beispiel #6
0
		/// <summary>
		/// Adds an association and extracts the aliases the association's 'with clause' is dependent on
		/// </summary>
		private void AddAssociation(string subalias, OuterJoinableAssociation association)
		{
			subalias = subalias.ToLower();

			var dependencies = new List<string>();
			var on = association.On.ToString();
			if (!String.IsNullOrEmpty(on))
			{
				foreach (Match match in aliasRegex.Matches(on))
				{
					string alias = match.Groups[1].Value;
					if (alias == subalias) continue;
					dependencies.Add(alias.ToLower());
				}
			}

			_dependentAliases.Add(new DependentAlias
			{
				Alias = subalias,
				DependsOn = dependencies.ToArray()
			});

			associations.Add(association);
		}
		/// <summary> 
		/// Finish the process of collection-loading for this bound result set.  Mainly this
		/// involves cleaning up resources and notifying the collections that loading is
		/// complete. 
		/// </summary>
		/// <param name="persister">The persister for which to complete loading. </param>
		public void EndLoadingCollections(ICollectionPersister persister)
		{
			if (!loadContexts.HasLoadingCollectionEntries && (localLoadingCollectionKeys.Count == 0))
			{
				return;
			}

			// in an effort to avoid concurrent-modification-exceptions (from
			// potential recursive calls back through here as a result of the
			// eventual call to PersistentCollection#endRead), we scan the
			// internal loadingCollections map for matches and store those matches
			// in a temp collection.  the temp collection is then used to "drive"
			// the #endRead processing.
			List<CollectionKey> toRemove = new List<CollectionKey>();
			List<LoadingCollectionEntry> matches =new List<LoadingCollectionEntry>();
			foreach (CollectionKey collectionKey in localLoadingCollectionKeys)
			{
				ISessionImplementor session = LoadContext.PersistenceContext.Session;

				LoadingCollectionEntry lce = loadContexts.LocateLoadingCollectionEntry(collectionKey);
				if (lce == null)
				{
					log.Warn("In CollectionLoadContext#endLoadingCollections, localLoadingCollectionKeys contained [" + collectionKey + "], but no LoadingCollectionEntry was found in loadContexts");
				}
				else if (lce.ResultSet == resultSet && lce.Persister == persister)
				{
					matches.Add(lce);
					if (lce.Collection.Owner == null)
					{
						session.PersistenceContext.AddUnownedCollection(new CollectionKey(persister, lce.Key, session.EntityMode),
						                                                lce.Collection);
					}
					if (log.IsDebugEnabled)
					{
						log.Debug("removing collection load entry [" + lce + "]");
					}

					// todo : i'd much rather have this done from #endLoadingCollection(CollectionPersister,LoadingCollectionEntry)...
					loadContexts.UnregisterLoadingCollectionXRef(collectionKey);
					toRemove.Add(collectionKey);
				}
			}
			localLoadingCollectionKeys.ExceptWith(toRemove);

			EndLoadingCollections(persister, matches);
			if ((localLoadingCollectionKeys.Count == 0))
			{
				// todo : hack!!!
				// NOTE : here we cleanup the load context when we have no more local
				// LCE entries.  This "works" for the time being because really
				// only the collection load contexts are implemented.  Long term,
				// this cleanup should become part of the "close result set"
				// processing from the (sandbox/jdbc) jdbc-container code.
				loadContexts.Cleanup(resultSet);
			}
		}
		/// <summary> 
		/// Remove an entity from the session cache, also clear
		/// up other state associated with the entity, all except
		/// for the <tt>EntityEntry</tt>
		/// </summary>
		public object RemoveEntity(EntityKey key)
		{
			object tempObject = entitiesByKey[key];
			entitiesByKey.Remove(key);
			object entity = tempObject;
			List<EntityUniqueKey> toRemove = new List<EntityUniqueKey>();
			foreach (KeyValuePair<EntityUniqueKey, object> pair in entitiesByUniqueKey)
			{
				if (pair.Value == entity) toRemove.Add(pair.Key);
			}
			foreach (EntityUniqueKey uniqueKey in toRemove)
			{
				entitiesByUniqueKey.Remove(uniqueKey);
			}

			entitySnapshotsByKey.Remove(key);
			nullifiableEntityKeys.Remove(key);
			BatchFetchQueue.RemoveBatchLoadableEntityKey(key);
			BatchFetchQueue.RemoveSubselect(key);
			return entity;
		}
        public override IAuditWorkUnit Dispatch(IWorkUnitMergeVisitor first)
        {
            if (first is PersistentCollectionChangeWorkUnit) {
                PersistentCollectionChangeWorkUnit original = (PersistentCollectionChangeWorkUnit) first;

                // Merging the collection changes in both work units.

                // First building a map from the ids of the collection-entry-entities from the "second" collection changes,
                // to the PCCD objects. That way, we will be later able to check if an "original" collection change
                // should be added, or if it is overshadowed by a new one.
                IDictionary<Object, PersistentCollectionChangeData> newChangesIdMap = new Dictionary<Object, PersistentCollectionChangeData>();
                foreach (PersistentCollectionChangeData persistentCollectionChangeData in getCollectionChanges()) {
                    newChangesIdMap.Add(
                            getOriginalId(persistentCollectionChangeData),
                            persistentCollectionChangeData);
                }

                // This will be the list with the resulting (merged) changes.
                List<PersistentCollectionChangeData> mergedChanges = new List<PersistentCollectionChangeData>();

                // Including only those original changes, which are not overshadowed by new ones.
                foreach (PersistentCollectionChangeData originalCollectionChangeData in original.getCollectionChanges()) {
                    if (!newChangesIdMap.ContainsKey(getOriginalId(originalCollectionChangeData))) {
                        mergedChanges.Add(originalCollectionChangeData);
                    }
                }

                // Finally adding all of the new changes to the end of the list
                mergedChanges = (List<PersistentCollectionChangeData>)mergedChanges.Concat(getCollectionChanges());

                return new PersistentCollectionChangeWorkUnit(sessionImplementor, EntityName, verCfg, EntityId, mergedChanges,
                        ReferencingPropertyName);
            } else {
                throw new Exception("Trying to merge a " + first + " with a PersitentCollectionChangeWorkUnit. " +
                        "This is not really possible.");
            }
        }
		/// <summary> 
		/// 1. Recreate the collection key -> collection map
		/// 2. rebuild the collection entries
		/// 3. call Interceptor.postFlush()
		/// </summary>
		protected virtual void PostFlush(ISessionImplementor session)
		{
			if (log.IsDebugEnabled)
			{
				log.Debug("post flush");
			}

			IPersistenceContext persistenceContext = session.PersistenceContext;
			persistenceContext.CollectionsByKey.Clear();
			persistenceContext.BatchFetchQueue.ClearSubselects();
			//the database has changed now, so the subselect results need to be invalidated

			// NH Different implementation: In NET an iterator is immutable;
			// we need something to hold the persistent collection to remove, and it must be less intrusive as possible
			IDictionary cEntries = persistenceContext.CollectionEntries;
			List<IPersistentCollection> keysToRemove = new List<IPersistentCollection>(cEntries.Count);
			foreach (DictionaryEntry me in cEntries)
			{
				CollectionEntry collectionEntry = (CollectionEntry) me.Value;
				IPersistentCollection persistentCollection = (IPersistentCollection) me.Key;
				collectionEntry.PostFlush(persistentCollection);
				if (collectionEntry.LoadedPersister == null)
				{
					keysToRemove.Add(persistentCollection);
				}
				else
				{
					//otherwise recreate the mapping between the collection and its key
					CollectionKey collectionKey =
						new CollectionKey(collectionEntry.LoadedPersister, collectionEntry.LoadedKey, session.EntityMode);
					persistenceContext.CollectionsByKey[collectionKey] = persistentCollection;
				}
			}
			foreach (IPersistentCollection key in keysToRemove)
			{
				persistenceContext.CollectionEntries.Remove(key);
			}
			session.Interceptor.PostFlush((ICollection) persistenceContext.EntitiesByKey.Values);
		}
		public override IEnumerable GetDeletes(ICollectionPersister persister, bool indexIsFormula)
		{
			IList deletes = new List<object>();
			IList sn = (IList) GetSnapshot();
			int end;
			if (sn.Count > list.Count)
			{
				for (int i = list.Count; i < sn.Count; i++)
				{
					deletes.Add(indexIsFormula ? sn[i] : i);
				}
				end = list.Count;
			}
			else
			{
				end = sn.Count;
			}
			for (int i = 0; i < end; i++)
			{
				if (list[i] == null && sn[i] != null)
				{
					deletes.Add(indexIsFormula ? sn[i] : i);
				}
			}
			return deletes;
		}
		public override IEnumerable GetDeletes(ICollectionPersister persister, bool indexIsFormula)
		{
			IList deletes = new List<object>();
			IDictionary sn = (IDictionary) GetSnapshot();
			foreach (DictionaryEntry e in sn)
			{
				object key = e.Key;
				if (!map.Contains(key))
				{
					deletes.Add(indexIsFormula ? e.Value : key);
				}
			}
			return deletes;
		}
Beispiel #13
0
        /// <summary>
        /// Initialize the <see cref="ClassValidator"/> type.
        /// </summary>
        /// <param name="clazz"></param>
        /// <param name="nestedClassValidators"></param>
        private void InitValidator(System.Type clazz, IDictionary<System.Type, IClassValidator> nestedClassValidators)
        {
            entityValidators = new List<ValidatorDef>();
            membersToValidate = new List<Member>();
            childGetters = new List<MemberInfo>();
            defaultInterpolator = new DefaultMessageInterpolatorAggregator();
            defaultInterpolator.Initialize(messageBundle, defaultMessageBundle, culture);

            //build the class hierarchy to look for members in
            nestedClassValidators.Add(clazz, this);
            HashSet<System.Type> classes = new HashSet<System.Type>();
            AddSuperClassesAndInterfaces(clazz, classes);

            // Create the IClassMapping for each class of the validator
            var classesMaps = new List<IClassMapping>(classes.Count);
            foreach (System.Type type in classes)
            {
                IClassMapping mapping = factory.ClassMappingFactory.GetClassMapping(type, validatorMode);
                if (mapping != null)
                    classesMaps.Add(mapping);
                else
                    log.Warn("Validator not found in mode " + validatorMode + " for class " + clazz.AssemblyQualifiedName);
            }

            //Check on all selected classes
            foreach (IClassMapping map in classesMaps)
            {
                foreach (Attribute classAttribute in map.GetClassAttributes())
                {
                    ValidateClassAtribute(classAttribute);
                }

                foreach (MemberInfo member in map.GetMembers())
                {
                    var memberAttributes = map.GetMemberAttributes(member);
                    CreateMemberAttributes(member, memberAttributes);
                    CreateChildValidator(member, memberAttributes);

                    foreach (Attribute memberAttribute in memberAttributes)
                    {
                        IValidator propertyValidator = CreateOrGetValidator(memberAttribute);

                        if (propertyValidator != null)
                        {
                            var tagable = memberAttribute as ITagableRule;
                            membersToValidate.Add(new Member
                                                  	{
                                                  		ValidatorDef =
                                                  			new ValidatorDef(propertyValidator, tagable != null ? tagable.TagCollection : null),
                                                  		Getter = member
                                                  	});
                        }
                    }
                }
            }
        }
Beispiel #14
0
		/// <returns><see cref="IList" /> of <see cref="IType" /></returns>
		protected SqlType[] GetParameterTypes(QueryParameters parameters, bool addLimit, bool addOffset)
		{
			List<IType> paramTypeList = new List<IType>();
			int span = 0;

			for (int index = 0; index < parameters.PositionalParameterTypes.Length; index++)
			{
				int location = parameters.PositionalParameterLocations[index];
				location = parameters.FindAdjustedParameterLocation(location);
				IType type = parameters.PositionalParameterTypes[index];
				ArrayHelper.SafeSetValue(paramTypeList, location, type);
				span += type.GetColumnSpan(Factory);
			}

			for (int index = 0; index < parameters.FilteredParameterTypes.Count; index++)
			{
				int location = parameters.FilteredParameterLocations[index];
				IType type = parameters.FilteredParameterTypes[index];
				ArrayHelper.SafeSetValue(paramTypeList, location, type);
				span += type.GetColumnSpan(Factory);
			}

			if (parameters.NamedParameters != null && parameters.NamedParameters.Count > 0)
			{
				// convert the named parameters to an array of types
				foreach (KeyValuePair<string, TypedValue> namedParameter in parameters.NamedParameters)
				{
					string name = namedParameter.Key;
					TypedValue typedval = namedParameter.Value;
					int[] locs = GetNamedParameterLocs(name);
					span += typedval.Type.GetColumnSpan(Factory) * locs.Length;

					for (int i = 0; i < locs.Length; i++)
					{
						int location = locs[i];
						location = parameters.FindAdjustedParameterLocation(location);

						// can still clash with positional parameters
						//  could consider throwing an exception to locate problem (NH-1098)
						while ((location < paramTypeList.Count) && (paramTypeList[location] != null))
							location++;

						ArrayHelper.SafeSetValue(paramTypeList, location, typedval.Type);
					}
				}
			}

			if (addLimit && Factory.Dialect.SupportsVariableLimit)
			{
				if (Factory.Dialect.BindLimitParametersFirst)
				{
					paramTypeList.Insert(0, NHibernateUtil.Int32);
					if (addOffset)
					{
						paramTypeList.Insert(0, NHibernateUtil.Int32);
					}
				}
				else
				{
					paramTypeList.Add(NHibernateUtil.Int32);
					if (addOffset)
					{
						paramTypeList.Add(NHibernateUtil.Int32);
					}
				}

				span += addOffset ? 2 : 1;
			}

			return ConvertITypesToSqlTypes(paramTypeList, span);
		}
		public override bool AfterInitialize(ICollectionPersister persister)
		{
			// NH Different behavior : NH-739
			// would be nice to prevent this overhead but the operation is managed where the ICollectionPersister is not available
			bool result;
			if (persister.IsOneToMany && HasQueuedOperations)
			{
				int additionStartFrom = bag.Count;
				IList additionQueue = new List<object>(additionStartFrom);
				foreach (object o in QueuedAdditionIterator)
				{
					if (o != null)
					{
						for (int i = 0; i < bag.Count; i++)
						{
							// we are using ReferenceEquals to be sure that is exactly the same queued instance 
							if (ReferenceEquals(o, bag[i]))
							{
								additionQueue.Add(o);
								break;
							}
						}
					}
				}

				result = base.AfterInitialize(persister);

				if(!result)
				{
					// removing duplicated additions
					foreach (object o in additionQueue)
					{
						for (int i = additionStartFrom; i < bag.Count; i++)
						{
							if (ReferenceEquals(o, bag[i]))
							{
								bag.RemoveAt(i);
								break;
							}
						}
					}
				}
			}
			else
			{
				result = base.AfterInitialize(persister);
			}
			return result;
		}
		void IDeserializationCallback.OnDeserialization(object sender)
		{
			log.Debug("Deserialization callback persistent-context");

			// during deserialization, we need to reconnect all proxies and
			// collections to this session, as well as the EntityEntry and
			// CollectionEntry instances; these associations are transient
			// because serialization is used for different things.
			parentsByChild = IdentityMap.Instantiate(InitCollectionSize);

			// OnDeserialization() must be called manually on all Dictionaries and Hashtables,
			// otherwise they are still empty at this point (the .NET deserialization code calls
			// OnDeserialization() on them AFTER it calls the current method).
			entitiesByKey.OnDeserialization(sender);
			entitiesByUniqueKey.OnDeserialization(sender);
			((IDeserializationCallback)entityEntries).OnDeserialization(sender);
			proxiesByKey.OnDeserialization(sender);
			entitySnapshotsByKey.OnDeserialization(sender);
			((IDeserializationCallback)arrayHolders).OnDeserialization(sender);
			((IDeserializationCallback)collectionEntries).OnDeserialization(sender);
			collectionsByKey.OnDeserialization(sender);

			// If nullifiableEntityKeys is once used in the current method, HashedSets will need
			// an OnDeserialization() method.
			//nullifiableEntityKeys.OnDeserialization(sender);

			if (unownedCollections != null)
			{
				unownedCollections.OnDeserialization(sender);
			}

			// TODO NH: "reconnect" EntityKey with session.factory and create a test for serialization of StatefulPersistenceContext
			foreach (DictionaryEntry collectionEntry in collectionEntries)
			{
				try
				{
					((IPersistentCollection)collectionEntry.Key).SetCurrentSession(session);
					CollectionEntry ce = (CollectionEntry)collectionEntry.Value;
					if (ce.Role != null)
					{
						ce.AfterDeserialize(Session.Factory);
					}

				}
				catch (HibernateException he)
				{
					throw new InvalidOperationException(he.Message);
				}
			}

			List<EntityKey> keysToRemove = new List<EntityKey>();
			foreach (KeyValuePair<EntityKey, INHibernateProxy> p in proxiesByKey)
			{
				if (p.Value != null)
				{
					(p.Value).HibernateLazyInitializer.Session = session;
				}
				else
				{
					// the proxy was pruned during the serialization process because the target had been instantiated.
					keysToRemove.Add(p.Key);
				}
			}
			for (int i = 0; i < keysToRemove.Count; i++)
				proxiesByKey.Remove(keysToRemove[i]);

			foreach (EntityEntry e in entityEntries.Values)
			{
				try
				{
					e.Persister = session.Factory.GetEntityPersister(e.EntityName);
				}
				catch (MappingException me)
				{
					throw new InvalidOperationException(me.Message);
				}
			}
		}
		void IDeserializationCallback.OnDeserialization(object sender)
		{
			log.Debug("Deserialization callback persistent-context");
			// during deserialization, we need to reconnect all proxies and
			// collections to this session, as well as the EntityEntry and
			// CollectionEntry instances; these associations are transient
			// because serialization is used for different things.

			// TODO NH: "reconnect" EntityKey with session.factory and create a test for serialization of StatefulPersistenceContext
			foreach (DictionaryEntry collectionEntry in collectionEntries)
			{
				try
				{
					((IPersistentCollection)collectionEntry.Key).SetCurrentSession(session);
					CollectionEntry ce = (CollectionEntry)collectionEntry.Value;
					if (ce.Role != null)
					{
						ce.AfterDeserialize(Session.Factory);
					}

				}
				catch (HibernateException he)
				{
					throw new InvalidOperationException(he.Message);
				}
			}

			List<EntityKey> keysToRemove = new List<EntityKey>();
			foreach (KeyValuePair<EntityKey, INHibernateProxy> p in proxiesByKey)
			{
				if (p.Value != null)
				{
					(p.Value).HibernateLazyInitializer.Session = session;
				}
				else
				{
					// the proxy was pruned during the serialization process because the target had been instantiated.
					keysToRemove.Add(p.Key);
				}
			}
			for (int i = 0; i < keysToRemove.Count; i++)
				proxiesByKey.Remove(keysToRemove[i]);

			foreach (EntityEntry e in entityEntries.Values)
			{
				try
				{
					e.Persister = session.Factory.GetEntityPersister(e.EntityName);
				}
				catch (MappingException me)
				{
					throw new InvalidOperationException(me.Message);
				}
			}
		}
		public ICollection GetQueuedOrphans(string entityName)
		{
			if (HasQueuedOperations)
			{
				List<object> additions = new List<object>(operationQueue.Count);
				List<object> removals = new List<object>(operationQueue.Count);
				for (int i = 0; i < operationQueue.Count; i++)
				{
					IDelayedOperation op = operationQueue[i];
					if (op.AddedInstance != null)
					{
						additions.Add(op.AddedInstance);
					}
					if (op.Orphan != null)
					{
						removals.Add(op.Orphan);
					}
				}
				return GetOrphans(removals, additions, entityName, session);
			}

			return CollectionHelper.EmptyCollection;
		}
		public override IEnumerable GetDeletes(ICollectionPersister persister, bool indexIsFormula)
		{
			IList deletes = new List<object>();
			Array sn = (Array) GetSnapshot();
			int snSize = sn.Length;
			int arraySize = array.Length;
			int end;
			if (snSize > arraySize)
			{
				for (int i = arraySize; i < snSize; i++)
				{
					deletes.Add(i);
				}
				end = arraySize;
			}
			else
			{
				end = snSize;
			}
			for (int i = 0; i < end; i++)
			{
				if (array.GetValue(i) == null && sn.GetValue(i) != null)
				{
					deletes.Add(i);
				}
			}
			return deletes;
		}
		/// <summary> 
		/// Given a collection of entity instances that used to
		/// belong to the collection, and a collection of instances
		/// that currently belong, return a collection of orphans
		/// </summary>
		protected static ICollection GetOrphans(ICollection oldElements, ICollection currentElements, string entityName,
		                                        ISessionImplementor session)
		{
			// short-circuit(s)
			if (currentElements.Count == 0)
			{
				// no new elements, the old list contains only Orphans
				return oldElements;
			}
			if (oldElements.Count == 0)
			{
				// no old elements, so no Orphans neither
				return oldElements;
			}

			IType idType = session.Factory.GetEntityPersister(entityName).IdentifierType;

			// create the collection holding the orphans
			List<object> res = new List<object>();

			// collect EntityIdentifier(s) of the *current* elements - add them into a HashSet for fast access
			HashedSet<TypedValue> currentIds = new HashedSet<TypedValue>();
			foreach (object current in currentElements)
			{
				if (current != null && ForeignKeys.IsNotTransient(entityName, current, null, session))
				{
					object currentId = ForeignKeys.GetEntityIdentifierIfNotUnsaved(entityName, current, session);
					currentIds.Add(new TypedValue(idType, currentId, session.EntityMode));
				}
			}

			// iterate over the *old* list
			foreach (object old in oldElements)
			{
				object oldId = ForeignKeys.GetEntityIdentifierIfNotUnsaved(entityName, old, session);
				if (!currentIds.Contains(new TypedValue(idType, oldId, session.EntityMode)))
				{
					res.Add(old);
				}
			}

			return res;
		}
Beispiel #21
0
		private IList DoQuery(ISessionImplementor session, QueryParameters queryParameters, bool returnProxies)
		{
			RowSelection selection = queryParameters.RowSelection;
			int maxRows = HasMaxRows(selection) ? selection.MaxRows : int.MaxValue;

			int entitySpan = EntityPersisters.Length;

			List<object> hydratedObjects = entitySpan == 0 ? null : new List<object>(entitySpan * 10);

			IDbCommand st = PrepareQueryCommand(queryParameters, false, session);

			IDataReader rs = GetResultSet(st, queryParameters.HasAutoDiscoverScalarTypes, queryParameters.Callable, selection,
			                              session);

			// would be great to move all this below here into another method that could also be used
			// from the new scrolling stuff.
			//
			// Would need to change the way the max-row stuff is handled (i.e. behind an interface) so
			// that I could do the control breaking at the means to know when to stop
			LockMode[] lockModeArray = GetLockModes(queryParameters.LockModes);
			EntityKey optionalObjectKey = GetOptionalObjectKey(queryParameters, session);

			bool createSubselects = IsSubselectLoadingEnabled;
			List<EntityKey[]> subselectResultKeys = createSubselects ? new List<EntityKey[]>() : null;
			IList results = new List<object>();

			try
			{
				HandleEmptyCollections(queryParameters.CollectionKeys, rs, session);
				EntityKey[] keys = new EntityKey[entitySpan]; // we can reuse it each time

				if (log.IsDebugEnabled)
				{
					log.Debug("processing result set");
				}

				int count;
				for (count = 0; count < maxRows && rs.Read(); count++)
				{
					if (log.IsDebugEnabled)
					{
						log.Debug("result set row: " + count);
					}

					object result = GetRowFromResultSet(rs, session, queryParameters, lockModeArray, optionalObjectKey, hydratedObjects,
					                                    keys, returnProxies);
					results.Add(result);

					if (createSubselects)
					{
						subselectResultKeys.Add(keys);
						keys = new EntityKey[entitySpan]; //can't reuse in this case
					}
				}

				if (log.IsDebugEnabled)
				{
					log.Debug(string.Format("done processing result set ({0} rows)", count));
				}
			}
			catch (Exception e)
			{
				e.Data["actual-sql-query"] = st.CommandText;
				throw;
			}
			finally
			{
				session.Batcher.CloseCommand(st, rs);
			}

			InitializeEntitiesAndCollections(hydratedObjects, rs, session, queryParameters.ReadOnly);

			if (createSubselects)
			{
				CreateSubselects(subselectResultKeys, queryParameters, session);
			}

			return results;
		}
		public static void IdentityRemove(IList list, object obj, string entityName, ISessionImplementor session)
		{
			if (obj != null && ForeignKeys.IsNotTransient(entityName, obj, null, session))
			{
				IType idType = session.Factory.GetEntityPersister(entityName).IdentifierType;

				object idOfCurrent = ForeignKeys.GetEntityIdentifierIfNotUnsaved(entityName, obj, session);
				List<object> toRemove = new List<object>(list.Count);
				foreach (object current in list)
				{
					if (current == null)
					{
						continue;
					}
					object idOfOld = ForeignKeys.GetEntityIdentifierIfNotUnsaved(entityName, current, session);
					if (idType.IsEqual(idOfCurrent, idOfOld, session.EntityMode, session.Factory))
					{
						toRemove.Add(current);
					}
				}
				foreach (object ro in toRemove)
				{
					list.Remove(ro);
				}
			}
		}
Beispiel #23
0
		/// <summary>
		/// Generate a sequence of <c>LEFT OUTER JOIN</c> clauses for the given associations.
		/// </summary>
		protected JoinFragment MergeOuterJoins(IList<OuterJoinableAssociation> associations)
		{
			IList<OuterJoinableAssociation> sortedAssociations = new List<OuterJoinableAssociation>();

			var indices = GetTopologicalSortOrder(_dependentAliases);
			for (int index = indices.Length - 1; index >= 0; index--)
			{
				sortedAssociations.Add(associations[indices[index]]);
			}

			JoinFragment outerjoin = Dialect.CreateOuterJoinFragment();

			OuterJoinableAssociation last = null;
			foreach (OuterJoinableAssociation oj in sortedAssociations)
			{
				if (last != null && last.IsManyToManyWith(oj))
				{
					oj.AddManyToManyJoin(outerjoin, (IQueryableCollection) last.Joinable);
				}
				else
				{
					oj.AddJoins(outerjoin);
					// NH Different behavior : NH1179 and NH1293
					// Apply filters in Many-To-One association
					if (enabledFiltersForManyToOne.Count > 0)
					{
						string manyToOneFilterFragment = oj.Joinable.FilterFragment(oj.RHSAlias, enabledFiltersForManyToOne);
						bool joinClauseDoesNotContainsFilterAlready =
							outerjoin.ToFromFragmentString.IndexOfCaseInsensitive(manyToOneFilterFragment) == -1;
						if (joinClauseDoesNotContainsFilterAlready)
						{
							outerjoin.AddCondition(manyToOneFilterFragment);
						}
					}
				}
				last = oj;
			}

			return outerjoin;
		}
        public override IAuditWorkUnit Dispatch(IWorkUnitMergeVisitor first)
        {
            var original = first as PersistentCollectionChangeWorkUnit;

            if (original != null)
            {

                // Merging the collection changes in both work units.

                // First building a map from the ids of the collection-entry-entities from the "second" collection changes,
                // to the PCCD objects. That way, we will be later able to check if an "original" collection change
                // should be added, or if it is overshadowed by a new one.
                var newChangesIdMap = new Dictionary<IDictionary<string, object>, PersistentCollectionChangeData>(new DictionaryComparer<string, object>());
                foreach (var persistentCollectionChangeData in CollectionChanges)
                {
                    newChangesIdMap.Add(
                            OriginalId(persistentCollectionChangeData),
                            persistentCollectionChangeData);
                }

                // This will be the list with the resulting (merged) changes.
                var mergedChanges = new List<PersistentCollectionChangeData>();

                // Including only those original changes, which are not overshadowed by new ones.
                foreach (var originalCollectionChangeData in original.CollectionChanges)
                {
                    var originalOriginalId = OriginalId(originalCollectionChangeData);

                    if (!newChangesIdMap.ContainsKey(originalOriginalId))
                    {
                        mergedChanges.Add(originalCollectionChangeData);
                    }
                    else
                    {
                        // If the changes collide, checking if the first one isn't a DEL, and the second a subsequent ADD
                        // If so, removing the change alltogether.
                        var revTypePropName = VerCfg.AuditEntCfg.RevisionTypePropName;
                        if((RevisionType)newChangesIdMap[originalOriginalId].Data[revTypePropName] == RevisionType.Added &&
                            (RevisionType)originalCollectionChangeData.Data[revTypePropName] == RevisionType.Deleted)
                        {
                            newChangesIdMap.Remove(originalOriginalId);
                        }
                    }
                }

                // Finally adding all of the new changes to the end of the list
                // (the map values may differ from CollectionChanges because of the last operation above)
                mergedChanges = mergedChanges.Concat(newChangesIdMap.Values).ToList();

                return new PersistentCollectionChangeWorkUnit(SessionImplementor, EntityName, VerCfg, EntityId, mergedChanges,
                        referencingPropertyName);
            }
            throw new Exception("Trying to merge a " + first + " with a PersitentCollectionChangeWorkUnit. " +
                                "This is not really possible.");
        }