Beispiel #1
0
 private Set<ShardId> GetShardIdSet()
 {
     Set<ShardId> hashedSet = new HashedSet<ShardId>();
     hashedSet.Add(new ShardId(1));
     hashedSet.Add(new ShardId(2));
     hashedSet.Add(new ShardId(3));
     return hashedSet;
 }
 public ISet<Instrument> GetAllUniqueInstruments()
 {
     ISet<Instrument> instruments = new HashedSet<Instrument>();
     foreach (Transaction transaction in _transactionList)
     {
         instruments.Add(transaction.Instrument);
     }
     return instruments;
 }
Beispiel #3
0
        /// <summary>
        /// Constructor for creating a new radiology report for the specified procedure.
        /// </summary>
        /// <param name="procedure">The procedure being reported.</param>
        public Report(Procedure procedure)
        {
            _procedures = new HashedSet<Procedure>();
            _parts = new List<ReportPart>();

            _procedures.Add(procedure);
            procedure.Reports.Add(this);

            // create the main report part
            ReportPart mainReport = new ReportPart(this, 0);
            _parts.Add(mainReport);
        }
		public virtual void OnMerge(MergeEvent @event)
		{
			EventCache copyCache = new EventCache();
			
			OnMerge(@event, copyCache);
			
			// TODO: iteratively get transient entities and retry merge until one of the following conditions:
			//   1) transientCopyCache.size() == 0
			//   2) transientCopyCache.size() is not decreasing and copyCache.size() is not increasing
			
			// TODO: find out if retrying can add entities to copyCache (don't think it can...)
			// For now, just retry once; throw TransientObjectException if there are still any transient entities
			
			IDictionary transientCopyCache = this.GetTransientCopyCache(@event, copyCache);
			
			if (transientCopyCache.Count > 0)
			{
				RetryMergeTransientEntities(@event, transientCopyCache, copyCache);
				
				// find any entities that are still transient after retry
				transientCopyCache = this.GetTransientCopyCache(@event, copyCache);
				
				if (transientCopyCache.Count > 0)
				{
					ISet<string> transientEntityNames = new HashedSet<string>();
					
					foreach (object transientEntity in transientCopyCache.Keys)
					{
						string transientEntityName = @event.Session.GuessEntityName(transientEntity);
						
						transientEntityNames.Add(transientEntityName);
						
						log.InfoFormat(
							"transient instance could not be processed by merge: {0} [{1}]",
							transientEntityName,
							transientEntity.ToString());
					}
					
					throw new TransientObjectException("one or more objects is an unsaved transient instance - save transient instance(s) before merging: " + transientEntityNames);
				}
			}

			copyCache.Clear();
			copyCache = null;
		}
Beispiel #5
0
        public DocumentBuilder(DocumentMapping classMapping, Analyzer defaultAnalyzer, IDirectoryProvider[] directoryProviders,
                               IIndexShardingStrategy shardingStrategy)
        {
            analyzer = new ScopedAnalyzer();
            this.directoryProviders = directoryProviders;
            this.shardingStrategy = shardingStrategy;

            if (classMapping == null) throw new AssertionFailure("Unable to build a DocumemntBuilder with a null class");

            rootClassMapping = classMapping;

            Set<System.Type> processedClasses = new HashedSet<System.Type>();
            processedClasses.Add(classMapping.MappedClass);
            CollectAnalyzers(rootClassMapping, defaultAnalyzer, true, string.Empty, processedClasses);
            //processedClasses.remove( clazz ); for the sake of completness
            analyzer.GlobalAnalyzer = defaultAnalyzer;
            if (idMapping == null)
                throw new SearchException("No document id for: " + classMapping.MappedClass.Name);
        }
		public IList TransformList(IList list)
		{
			IList result = (IList)Activator.CreateInstance(list.GetType());
			ISet<Identity> distinct = new HashedSet<Identity>();

			for (int i = 0; i < list.Count; i++)
			{
				object entity = list[i];
				if (distinct.Add(new Identity(entity)))
				{
					result.Add(entity);
				}
			}

			if (log.IsDebugEnabled)
			{
				log.Debug(string.Format("transformed: {0} rows to: {1} distinct results",
				                        list.Count, result.Count));
			}
			return result;
		}
        public DocumentBuilder(System.Type clazz, Analyzer defaultAnalyzer, IDirectoryProvider[] directoryProviders,
            IIndexShardingStrategy shardingStrategy)
        {
            analyzer = new ScopedAnalyzer();
            beanClass = clazz;
            this.directoryProviders = directoryProviders;
            this.shardingStrategy = shardingStrategy;

            if (clazz == null) throw new AssertionFailure("Unable to build a DocumemntBuilder with a null class");

            rootPropertiesMetadata = new PropertiesMetadata();
            rootPropertiesMetadata.boost = GetBoost(clazz);
            rootPropertiesMetadata.analyzer = defaultAnalyzer;

            Set<System.Type> processedClasses = new HashedSet<System.Type>();
            processedClasses.Add(clazz);
            InitializeMembers(clazz, rootPropertiesMetadata, true, string.Empty, processedClasses);
            //processedClasses.remove( clazz ); for the sake of completness
            analyzer.GlobalAnalyzer = rootPropertiesMetadata.analyzer;
            if (idKeywordName == null)
                throw new SearchException("No document id for: " + clazz.Name);
        }
		/// <summary> 
		/// Given a collection of entity instances that used to
		/// belong to the collection, and a collection of instances
		/// that currently belong, return a collection of orphans
		/// </summary>
		protected static ICollection GetOrphans(ICollection oldElements, ICollection currentElements, string entityName,
		                                        ISessionImplementor session)
		{
			// short-circuit(s)
			if (currentElements.Count == 0)
			{
				// no new elements, the old list contains only Orphans
				return oldElements;
			}
			if (oldElements.Count == 0)
			{
				// no old elements, so no Orphans neither
				return oldElements;
			}

			IType idType = session.Factory.GetEntityPersister(entityName).IdentifierType;

			// create the collection holding the orphans
			List<object> res = new List<object>();

			// collect EntityIdentifier(s) of the *current* elements - add them into a HashSet for fast access
			HashedSet<TypedValue> currentIds = new HashedSet<TypedValue>();
			foreach (object current in currentElements)
			{
				if (current != null && ForeignKeys.IsNotTransient(entityName, current, null, session))
				{
					object currentId = ForeignKeys.GetEntityIdentifierIfNotUnsaved(entityName, current, session);
					currentIds.Add(new TypedValue(idType, currentId, session.EntityMode));
				}
			}

			// iterate over the *old* list
			foreach (object old in oldElements)
			{
				object oldId = ForeignKeys.GetEntityIdentifierIfNotUnsaved(entityName, old, session);
				if (!currentIds.Contains(new TypedValue(idType, oldId, session.EntityMode)))
				{
					res.Add(old);
				}
			}

			return res;
		}
		public IRStatistics Evaluate(RecommenderBuilder recommenderBuilder,
		                             DataModel dataModel,
		                             int at,
		                             double relevanceThreshold,
		                             double evaluationPercentage) 
		{

			if (recommenderBuilder == null) {
				throw new ArgumentNullException("recommenderBuilder is null");
			}
			if (dataModel == null) {
				throw new ArgumentNullException("dataModel is null");
			}
			if (at < 1) {
				throw new ArgumentException("at must be at least 1");
			}
			if (double.IsNaN(evaluationPercentage) || evaluationPercentage <= 0.0 || evaluationPercentage > 1.0) 
			{
				throw new ArgumentException("Invalid evaluationPercentage: " + evaluationPercentage);
			}
			if (double.IsNaN(relevanceThreshold)) {
				throw new ArgumentException("Invalid relevanceThreshold: " + evaluationPercentage);
			}

			RunningAverage precision = new FullRunningAverage();
			RunningAverage recall = new FullRunningAverage();
			foreach (User user in dataModel.GetUsers()) 
			{
				Object id = user.ID;
				if (random.NextDouble() < evaluationPercentage) 
				{
					ICollection<Item> relevantItems = new HashedSet<Item>(/* at */);
                    Preference[] prefs = user.GetPreferencesAsArray();

					foreach (Preference pref in prefs) 
					{
						if (pref.Value >= relevanceThreshold) 
                        {
							relevantItems.Add(pref.Item);
						}
					}
					int numRelevantItems = relevantItems.Count;
					if (numRelevantItems > 0) 
                    {
						ICollection<User> trainingUsers = new List<User>(dataModel.GetNumUsers());
						foreach (User user2 in dataModel.GetUsers()) 
                        {
							if (id.Equals(user2.ID)) 
							{
								ICollection<Preference> trainingPrefs = new List<Preference>();
                                prefs = user2.GetPreferencesAsArray();
								foreach (Preference pref in prefs) 
								{
									if (!relevantItems.Contains(pref.Item)) 
									{
										trainingPrefs.Add(pref);
									}
								}
								if (trainingPrefs.Count > 0) 
								{
									User trainingUser = new GenericUser<String>(id.ToString(), trainingPrefs);
									trainingUsers.Add(trainingUser);
								}
							} 
                            else 
                            {
								trainingUsers.Add(user2);
							}

						}
						DataModel trainingModel = new GenericDataModel(trainingUsers);
						Recommender recommender = recommenderBuilder.BuildRecommender(trainingModel);

						try 
						{
							trainingModel.GetUser(id);
						} 
                        catch (NoSuchElementException) 
                        {
							continue; // Oops we excluded all prefs for the user -- just move on
						}
						
						int intersectionSize = 0;
						foreach (RecommendedItem recommendedItem in recommender.Recommend(id, at)) 
						{
							if (relevantItems.Contains(recommendedItem.Item)) 
							{
								intersectionSize++;
							}
						}
						precision.AddDatum((double) intersectionSize / (double) at);
						recall.AddDatum((double) intersectionSize / (double) numRelevantItems);					
					}
				}
			}

			return new IRStatisticsImpl(precision.Average, recall.Average);
		}
		private IDictionary<string, string[]> BindPropertyResults(string alias, HbmReturnDiscriminator discriminatorSchema,
			HbmReturnProperty[] returnProperties, PersistentClass pc)
		{
			Dictionary<string, string[]> propertyresults = new Dictionary<string, string[]>();
			// maybe a concrete SQLpropertyresult type, but Map is exactly what is required at the moment

			if (discriminatorSchema != null)
			{
				propertyresults["class"] = GetResultColumns(discriminatorSchema).ToArray();
			}

			List<HbmReturnProperty> properties = new List<HbmReturnProperty>();
			List<string> propertyNames = new List<string>();

			foreach (HbmReturnProperty returnPropertySchema in returnProperties ?? new HbmReturnProperty[0])
			{
				string name = returnPropertySchema.name;
				if (pc == null || name.IndexOf('.') == -1)
				{
					//if dotted and not load-collection nor return-join
					//regular property
					properties.Add(returnPropertySchema);
					propertyNames.Add(name);
				}
				else
				{
					// Reorder properties
					// 1. get the parent property
					// 2. list all the properties following the expected one in the parent property
					// 3. calculate the lowest index and insert the property

					int dotIndex = name.LastIndexOf('.');
					string reducedName = name.Substring(0, dotIndex);
					IValue value = pc.GetRecursiveProperty(reducedName).Value;
					IEnumerable<Mapping.Property> parentPropIter;
					if (value is Component)
					{
						Component comp = (Component) value;
						parentPropIter = comp.PropertyIterator;
					}
					else if (value is ToOne)
					{
						ToOne toOne = (ToOne) value;
						PersistentClass referencedPc = mappings.GetClass(toOne.ReferencedEntityName);
						if (toOne.ReferencedPropertyName != null)
							try
							{
								parentPropIter =
									((Component) referencedPc.GetRecursiveProperty(toOne.ReferencedPropertyName).Value).PropertyIterator;
							}
							catch (InvalidCastException e)
							{
								throw new MappingException("dotted notation reference neither a component nor a many/one to one", e);
							}
						else
							try
							{
								parentPropIter = ((Component) referencedPc.IdentifierProperty.Value).PropertyIterator;
							}
							catch (InvalidCastException e)
							{
								throw new MappingException("dotted notation reference neither a component nor a many/one to one", e);
							}
					}
					else
						throw new MappingException("dotted notation reference neither a component nor a many/one to one");
					bool hasFollowers = false;
					List<string> followers = new List<string>();
					foreach (Mapping.Property prop in parentPropIter)
					{
						string currentPropertyName = prop.Name;
						string currentName = reducedName + '.' + currentPropertyName;
						if (hasFollowers)
							followers.Add(currentName);
						if (name.Equals(currentName))
							hasFollowers = true;
					}

					int index = propertyNames.Count;
					int followersSize = followers.Count;
					for (int loop = 0; loop < followersSize; loop++)
					{
						string follower = followers[loop];
						int currentIndex = GetIndexOfFirstMatchingProperty(propertyNames, follower);
						index = currentIndex != -1 && currentIndex < index ? currentIndex : index;
					}
					propertyNames.Insert(index, name);
					properties.Insert(index, returnPropertySchema);
				}
			}

			ISet<string> uniqueReturnProperty = new HashedSet<string>();
			foreach (HbmReturnProperty returnPropertySchema in properties)
			{
				string name = returnPropertySchema.name;
				if ("class".Equals(name))
					throw new MappingException(
						"class is not a valid property name to use in a <return-property>, use <return-discriminator> instead"
						);
				//TODO: validate existing of property with the chosen name. (secondpass )
				List<string> allResultColumns = GetResultColumns(returnPropertySchema);

				if (allResultColumns.Count == 0)
					throw new MappingException(
						"return-property for alias " + alias +
							" must specify at least one column or return-column name"
						);
				if (uniqueReturnProperty.Contains(name))
					throw new MappingException(
						"duplicate return-property for property " + name +
							" on alias " + alias
						);
				uniqueReturnProperty.Add(name);

				// the issue here is that for <return-join/> representing an entity collection,
				// the collection element values (the property values of the associated entity)
				// are represented as 'element.{propertyname}'.  Thus the StringHelper.root()
				// here puts everything under 'element' (which additionally has significant
				// meaning).  Probably what we need to do is to something like this instead:
				//      String root = StringHelper.root( name );
				//      String key = root; // by default
				//      if ( !root.equals( name ) ) {
				//	        // we had a dot
				//          if ( !root.equals( alias ) {
				//              // the root does not apply to the specific alias
				//              if ( "elements".equals( root ) {
				//                  // we specifically have a <return-join/> representing an entity collection
				//                  // and this <return-property/> is one of that entity's properties
				//                  key = name;
				//              }
				//          }
				//      }
				// but I am not clear enough on the intended purpose of this code block, especially
				// in relation to the "Reorder properties" code block above... 
				//			String key = StringHelper.root( name );
				string key = name;
				string[] intermediateResults;
				if (!propertyresults.TryGetValue(key,out intermediateResults))
					propertyresults[key] = allResultColumns.ToArray();
				else
					ArrayHelper.AddAll(intermediateResults, allResultColumns); // TODO: intermediateResults not used after this
			}

			Dictionary<string, string[]> newPropertyResults = new Dictionary<string, string[]>();

			foreach (KeyValuePair<string, string[]> entry in propertyresults)
			{
				newPropertyResults[entry.Key] = entry.Value;
			}
			return newPropertyResults.Count == 0 ? (IDictionary<string, string[]>)new CollectionHelper.EmptyMapClass<string, string[]>() : newPropertyResults;
		}
Beispiel #11
0
 /// <summary>
 /// Compute depths for all dirEdges via breadth-first traversal of nodes in graph.
 /// </summary>
 /// <param name="startEdge">Edge to start processing with.</param>
 // <FIX> MD - use iteration & queue rather than recursion, for speed and robustness
 private void ComputeDepths(DirectedEdge startEdge)
 {
     ISet nodesVisited = new HashedSet();
     Queue nodeQueue = new Queue();
     Node startNode = startEdge.Node;
     nodeQueue.Enqueue(startNode);
     nodesVisited.Add(startNode);
     startEdge.Visited = true;
     while (nodeQueue.Count != 0)
     {
         Node n = (Node) nodeQueue.Dequeue();
         nodesVisited.Add(n);
         // compute depths around node, starting at this edge since it has depths assigned
         ComputeNodeDepth(n);
         // add all adjacent nodes to process queue, unless the node has been visited already
         IEnumerator i = ((DirectedEdgeStar)n.Edges).GetEnumerator();
         while (i.MoveNext())
         {
             DirectedEdge de = (DirectedEdge) i.Current;
             DirectedEdge sym = de.Sym;
             if (sym.IsVisited) continue;
             Node adjNode = sym.Node;
             if (!(nodesVisited.Contains(adjNode)))
             {
                 nodeQueue.Enqueue(adjNode);
                 nodesVisited.Add(adjNode);
             }
         }
     }
 }
		public void DefaultModifiableWithQueryForEntity()
		{
			Container cOrig = CreateContainer();
			ISet<object> expectedInitializedObjects =
					new HashedSet<object>(
						new object[]
						{
							cOrig,
							//cOrig.NoProxyInfo,
							cOrig.ProxyInfo,
							cOrig.NonLazyInfo,
							//cOrig.NoProxyOwner,
							cOrig.ProxyOwner,
							cOrig.NonLazyOwner,
							cOrig.LazyDataPoints.First(),
							cOrig.NonLazyJoinDataPoints.First(),
							cOrig.NonLazySelectDataPoints.First()
						});

			ISet<object> expectedReadOnlyObjects = new HashedSet<object>();
	
			ISession s = OpenSession();
			Assert.That(s.DefaultReadOnly, Is.False);
			ITransaction t = s.BeginTransaction();
			s.Save(cOrig);
			CheckContainer(cOrig, expectedInitializedObjects, expectedReadOnlyObjects, s);
			s.DefaultReadOnly = true;
			Assert.That(s.DefaultReadOnly, Is.True);
			CheckContainer(cOrig, expectedInitializedObjects, expectedReadOnlyObjects, s);
			t.Commit();
			s.Close();
	
			s = OpenSession();
			t = s.BeginTransaction();
			Assert.That(s.DefaultReadOnly, Is.False);
			Container c = s.CreateQuery("from Container where id=" + cOrig.Id).UniqueResult<Container>();
			expectedInitializedObjects =
					new HashedSet<object>(
						new object[]
						{
							c,
							c.NonLazyInfo,
							//c.NoProxyOwner,
							c.ProxyOwner,
							c.NonLazyOwner,
							c.NonLazyJoinDataPoints.First(),
							c.NonLazySelectDataPoints.First()
						});

			expectedReadOnlyObjects = new HashedSet<object>();
			CheckContainer(c, expectedInitializedObjects, expectedReadOnlyObjects, s);
//			Assert.That(NHibernateUtil.IsInitialized(c.NoProxyInfo), Is.False);
//			NHibernateUtil.Initialize(c.NoProxyInfo);
//			expectedInitializedObjects.Add(c.NoProxyInfo);
//			CheckContainer(c, expectedInitializedObjects, expectedReadOnlyObjects, s);
			Assert.That(NHibernateUtil.IsInitialized(c.ProxyInfo), Is.False);
			NHibernateUtil.Initialize(c.ProxyInfo);
			expectedInitializedObjects.Add(c.ProxyInfo);
			CheckContainer(c, expectedInitializedObjects, expectedReadOnlyObjects, s);
			Assert.That(NHibernateUtil.IsInitialized(c.LazyDataPoints), Is.False);
			NHibernateUtil.Initialize(c.LazyDataPoints);
			expectedInitializedObjects.Add(c.LazyDataPoints.First());
			//expectedReadOnlyObjects.Add(c.LazyDataPoints.First());
			CheckContainer(c, expectedInitializedObjects, expectedReadOnlyObjects, s);
			t.Commit();
			s.Close();
			s = OpenSession();
			t = s.BeginTransaction();
			s.CreateQuery("delete from DataPoint").ExecuteUpdate();
			s.CreateQuery("delete from Container").ExecuteUpdate();
			s.CreateQuery("delete from Info").ExecuteUpdate();
			s.CreateQuery("delete from Owner").ExecuteUpdate();
			t.Commit();
			s.Close();
		}
Beispiel #13
0
 private void CheckPropertyDuplication()
 {
     HashedSet<string> names = new HashedSet<string>();
     foreach (Property prop in PropertyIterator)
     {
         if (!names.Add(prop.Name))
             throw new MappingException("Duplicate property mapping of " + prop.Name + " found in " + EntityName);
     }
 }
		/// <summary>
		/// SQL UPDATE query
		/// <example>
		/// Example:
		/// <code>
		/// using  HyperNetDatabase.R2;
		/// ...
		/// Database  db  =  new  Database();
		/// db.Open("file.hnd");  //  creates  or  opens  database
		/// ...
		/// string StockName = "peppers";
		/// DataTable  result  =  db.Update("Stock", 
		/// new object[,]{ {"NAME",StockName}, {"QTY",0.5m} },
		/// new object[,]{ {"NAME","=","pepperoni"} } 
		/// );
		/// ...
		/// </code>
		/// Is the same as: UPDATE Stock SET NAME=@StockName, QTY=0.5 WHERE NAME=pepperoni 
		/// </example>	
		/// <code>
		/// 
		/// </code>
		/// <include file='../XML_DOC.xml' path='REPEATED_COMMENTS/WHERE_SINTAX/*' />	
		/// <code>
		/// 
		/// </code>
		/// <include file='../XML_DOC.xml' path='REPEATED_COMMENTS/SET_EXPRESSION/*' />	
		/// </summary>
		/// <param name="From_TableName"></param>
		/// <param name="Set">SET expression</param>
		/// <param name="Where_NameCondValue">WHERE expresion</param>
		/// <returns></returns>
		public void Update(string From_TableName, object[,] Set, object[,] Where_NameCondValue)
		{

				lock(this.TableBlocking)
				{
					this.RaiseExceptionIfClosed();
					string TableName=From_TableName;
					QueryCacheDestroy(TableName);


					// Build table fields without deletion field
					Hashtable htTableFields = new Hashtable(); 
					if(true)
					{
						Field[] flds = GetFields(TableName);
						foreach(Field f in flds) if(f.Name!=Database.DeletedFieldName) htTableFields[f.Name]=f;
					}

					// Check 'Set'
					Hashtable SetFields = new Hashtable();
					for(int n=0;n<Set.GetLength(0);n++)
					{
						string fname = Set[n,0].ToString();
						if(!htTableFields.ContainsKey(fname))
							throw new Exception("Field '"+fname+"' do not exist in this table.");
						if(SetFields.ContainsKey(fname))
							throw new Exception("Field '"+fname+"' is repeated in the set clause of this update command.");
						Field f = (htTableFields[fname] as Field);
						object val = Set[n,1];
						Variant v = Variant.Object2Variant( val, f.type );
						object obj = v.obj;
						SetFields[fname]=obj;
					}

					// Check constraints
					foreach(string fname in SetFields.Keys)
					{ 
						Field f = htTableFields[fname] as Field;
						if(f.bIndexed&&f.bUnique)
						{
							Index ndx = this.GetIndex(TableName,f);
							if(ndx.ExistsKey( SetFields[fname] ))
								throw new Exception("Insert violates '"+f.Name+"' field for table '"+TableName+"'.");
						}
					}

					// Get the rowids
					Set ROWIDS;
					ExecuteWhere(From_TableName,Where_NameCondValue,out ROWIDS);

					// Set fields
					if(ROWIDS==null) throw new Exception("Where condition returned null rows.");
					
					// Data set
					int tid = (int)TableName2TID[TableName];
					TableNameDef tnd = TID2Def[tid] as TableNameDef;

					// Create roll-back to prevent blackouts

					// make log file of old data
					string lfn = DatabaseFilePath+".hlg";
					FileStream lf;
					try
					{
						lf = new FileStream(lfn,FileMode.Create,FileAccess.Write,FileShare.None);
					}
					catch
					{
						throw new Exception("Insufficient disk space.");
					}
					BinaryWriter lfw=null;
					try
					{
						lfw = new BinaryWriter(lf,System.Text.Encoding.Unicode);
						lfw.Write( (bool)false ); // not valid
						lfw.Flush();
						lfw.Write( (byte)1 ); // overwrite pages operation 
						Set PAGES = new HashedSet();
						foreach(string name in SetFields.Keys)
						{
							Field f = htTableFields[name] as Field;
							int valSize = (int)f.DataSize();
							long Capacity = (PageSize-ContentPageDataOffset)/valSize;
							ArrayList pages = f.DataFID;
							foreach(long rowid in ROWIDS)
							{
								long npage = rowid / Capacity;
								int page = (int)pages[(int)npage];
								PAGES.Add( page );
							}
						}
						lfw.Write( (int)PAGES.Count ); // num of pages involved
						foreach(int page in PAGES)
						{
							br.BaseStream.Position = (page*PageSize);
							byte[] buf = br.ReadBytes( Database.PageSize );
							lfw.Write( (int)page ); // page id
							lfw.Write( buf ); // page
						}
						lfw.Flush();
						try
						{
							lfw.BaseStream.Position=0;
							lfw.Write( (bool)true ); // valid
							lfw.Flush();
						}
						catch
						{
							// aborting log file
							try
							{
								lfw.BaseStream.SetLength(0);
							}
							catch
							{
							}
							throw;
						}
					}
					catch
					{
						try
						{
							if(lfw!=null)
								lfw.Close();
						}
						catch
						{
						}
						throw new Exception("Error while writing rollback, update operation cancelled. (Insufficient disk space?)");
					}

					// Do the changes
					foreach(string name in SetFields.Keys)
					{	
						Field f = htTableFields[name] as Field;
						int valSize = (int)f.DataSize();
						long Capacity = (PageSize-ContentPageDataOffset)/valSize;
						ArrayList pages = f.DataFID;
						if((pages.Count*Capacity)<tnd.rownum)
							throw new Exception("Row num corrupted.");


						foreach(long rowid in ROWIDS)
						{
							object oldkey;
							try
							{
								long npage = rowid / Capacity;
								long offset = rowid % Capacity;
								int page = (int)pages[(int)npage];
								//br.BaseStream.Position = (page*PageSize)+ContentPageDataOffset+offset*valSize;
								//oldkey = f.ReadData(br);
								oldkey = f.ReadData( this.PageReader(page,ContentPageDataOffset+offset*valSize) );
								br.BaseStream.Position = (page*PageSize)+ContentPageDataOffset+offset*valSize;
								Variant v = Variant.Object2Variant( SetFields[f.Name], f.type);
								f.WriteData(bw,v,false); // CAUTION
								this.InvalidatePage(page);
							}
							catch(Exception ex)
							{
								this.Close();
								this.LogToFile(ex.Message,ex.StackTrace);
								throw new Exception("Fatal error while writting data into database.");
							}

							if(f.bIndexed)
							{
								Index ndx;
								try
								{
									ndx = GetIndex(TableName,f);
								}
								catch(Exception ex)
								{
									this.Close();
									this.LogToFile(ex.Message,ex.StackTrace);
									throw new Exception("Fatal error while reading index database.");
								}
								object key = SetFields[f.Name];
								try
								{	
									if((key as IComparable).CompareTo(oldkey as IComparable)!=0)
									{
										//if(f.type==FieldType.ftDateTime)
											ndx.RemoveByRowid(rowid);
										//else
											ndx.Add(key,rowid,f.Name);
									}
								}
								catch(Exception ex)
								{
									this.Close();
									this.LogToFile(ex.Message,ex.StackTrace);
									throw new Exception("Fatal error while changing key. Table:"+TableName+", Field:"+f.Name+", OldKey:"+oldkey.ToString()+", NewKey:"+key.ToString()+".");
								}
							}
						}
						bw.BaseStream.Flush();
					}

					// clear log
					lfw.BaseStream.SetLength(0);
					lfw.Flush();
					lfw.Close();
				}

		}
Beispiel #15
0
        public void PostInitialize(ISet<System.Type> indexedClasses)
        {
            // this method does not requires synchronization
            Type plainClass = rootClassMapping.MappedClass;
            ISet<Type> tempMappedSubclasses = new HashedSet<System.Type>();

            // together with the caller this creates a o(2), but I think it's still faster than create the up hierarchy for each class
            foreach (Type currentClass in indexedClasses)
            {
                if (plainClass.IsAssignableFrom(currentClass))
                {
                    tempMappedSubclasses.Add(currentClass);
                }
            }

            mappedSubclasses = tempMappedSubclasses;
        }
        private static void InitialiseBuiltIn()
        {
            HashedSet<string> builtin;

            if (_builtin == null)
            {
                lock (_synLock)
                {
                    if (_builtin == null)
                    {
                        builtin = new HashedSet<string>();
                        builtin.Add("NaN");
                        builtin.Add("top");

                        _builtin = builtin;
                    }
                }
            }
        }
        private void saveCoures()
        {
            try
            {
                string ProfessionID = context.Request.Form.Get("Profession");
                string FacultyID = context.Request.Form.Get("Faculty");

                DepartmentService ds = new DepartmentService();
                Profession profession = ds.getProfessionByID(ProfessionID);
                Faculty faculty = ds.getFacultyByID(FacultyID);
                if (profession != null && faculty != null)
                {
                    ISet<Coures> couresSet = new HashedSet<Coures>();
                    string[] couresArr = context.Request.Form.GetValues("Coures");
                    CouresService cs = new CouresService();
                    foreach (string c in couresArr) {
                       Coures coures = cs.getCouresByID(c);
                       if (coures != null)
                       couresSet.Add(coures);
                    }

                    ExamPlan p = new ExamPlan();
                    setValue(p, context);
                    p.Profession = profession;
                    p.Faculty = faculty;
                    p.CouresSet = couresSet;
                    PlanService ps = new PlanService();
                    ps.save(p);
                    context.Response.Write("1");
                }
            }
            catch (Exception e)
            {
                context.Response.Write("0");
            }
        }
		public void ExistingModifiableAfterSetSessionReadOnly()
		{
			Container cOrig = CreateContainer();

			ISet<object> expectedInitializedObjects =
					new HashedSet<object>(
						new object[]
						{
							cOrig,
							//cOrig.NoProxyInfo,
							cOrig.ProxyInfo,
							cOrig.NonLazyInfo,
							//cOrig.NoProxyOwner,
							cOrig.ProxyOwner,
							cOrig.NonLazyOwner,
							cOrig.LazyDataPoints.First(),
							cOrig.NonLazyJoinDataPoints.First(),
							cOrig.NonLazySelectDataPoints.First()
						});

			ISet<object> expectedReadOnlyObjects = new HashedSet<object>();
	
			ISession s = OpenSession();
			Assert.That(s.DefaultReadOnly, Is.False);
			ITransaction t = s.BeginTransaction();
			s.Save(cOrig);
			CheckContainer(cOrig, expectedInitializedObjects, expectedReadOnlyObjects, s);
			s.DefaultReadOnly = true;
			Assert.That(s.DefaultReadOnly, Is.True);
			CheckContainer(cOrig, expectedInitializedObjects, expectedReadOnlyObjects, s);
			t.Commit();
	
			t = s.BeginTransaction();
			Assert.That(s.DefaultReadOnly, Is.True);
			CheckContainer(cOrig, expectedInitializedObjects, expectedReadOnlyObjects, s);
			Container c = s.Load<Container>(cOrig.Id);
			Assert.That(cOrig, Is.SameAs(c));
			CheckContainer(c, expectedInitializedObjects, expectedReadOnlyObjects, s);
			c = s.Get<Container>(cOrig.Id);
			Assert.That(cOrig, Is.SameAs(c));
			CheckContainer(cOrig, expectedInitializedObjects, expectedReadOnlyObjects, s);
			s.Refresh(cOrig);
			Assert.That(cOrig, Is.SameAs(c));
			CheckContainer(cOrig, expectedInitializedObjects, expectedReadOnlyObjects, s);
			
			// NH-specific: The following line is required to evict DataPoint(Id=1) from the Container.LazyDataPoint collection.
			// This behaviour would seem to be necessary 'by design', as a comment in EvictCascadingAction states, "evicts don't
			// cascade to uninitialized collections".
			// If LazyDataPoint(Id=1) is not evicted, it has a status of Loaded, not ReadOnly, and causes the test to fail further
			// down.
			// Another way to get this test to pass is s.Clear().
			NHibernateUtil.Initialize(cOrig.LazyDataPoints);
			
			s.Evict(cOrig);

			c = s.Get<Container>(cOrig.Id);
			Assert.That(cOrig, Is.Not.SameAs(c));
		
			expectedInitializedObjects =
					new HashedSet<object>(
						new object[]
						{
							c,
							c.NonLazyInfo,
							//c.NoProxyOwner,
							c.ProxyOwner,
							c.NonLazyOwner,
							c.NonLazyJoinDataPoints.First(),
							c.NonLazySelectDataPoints.First()
						});
			
			expectedReadOnlyObjects =
					new HashedSet<object>(
						new object[]
						{
							c,
							//c.NoProxyInfo,
							c.ProxyInfo,
							c.NonLazyInfo,
							//c.NoProxyOwner,
							c.ProxyOwner,
							c.NonLazyOwner,
							//c.getLazyDataPoints(),
							c.NonLazyJoinDataPoints.First(),
							c.NonLazySelectDataPoints.First()
						});

			CheckContainer(c, expectedInitializedObjects, expectedReadOnlyObjects, s);
//			Assert.That(NHibernateUtil.IsInitialized(c.NoProxyInfo), Is.False);
//			NHibernateUtil.Initialize(c.NoProxyInfo);
//			expectedInitializedObjects.Add(c.NoProxyInfo);
//			CheckContainer(c, expectedInitializedObjects, expectedReadOnlyObjects, s);
			Assert.That(NHibernateUtil.IsInitialized(c.ProxyInfo), Is.False);
			NHibernateUtil.Initialize(c.ProxyInfo);
			expectedInitializedObjects.Add(c.ProxyInfo);
			CheckContainer(c, expectedInitializedObjects, expectedReadOnlyObjects, s);
			
			Assert.That(NHibernateUtil.IsInitialized(c.LazyDataPoints), Is.False);
			NHibernateUtil.Initialize(c.LazyDataPoints);
			expectedInitializedObjects.Add(c.LazyDataPoints.First());
			expectedReadOnlyObjects.Add(c.LazyDataPoints.First());
			
			// The following check fails if the NH-specific change (above) is not made. More specifically it fails
			// when asserting that the c.LazyDataPoints.First() is ReadOnly
			CheckContainer(c, expectedInitializedObjects, expectedReadOnlyObjects, s);
			
			t.Commit();
			s.Close();
			
			s = OpenSession();
			t = s.BeginTransaction();
			s.CreateQuery("delete from DataPoint").ExecuteUpdate();
			s.CreateQuery("delete from Container").ExecuteUpdate();
			s.CreateQuery("delete from Info").ExecuteUpdate();
			s.CreateQuery("delete from Owner").ExecuteUpdate();
			t.Commit();
			s.Close();
		}
		public void ExistingReadOnlyAfterSetSessionModifiableExistingProxyReadOnly()
		{
			Container cOrig = CreateContainer();
			ISet<object> expectedInitializedObjects =
					new HashedSet<object>(
						new object[]
						{
							cOrig,
							//cOrig.NoProxyInfo,
							cOrig.ProxyInfo,
							cOrig.NonLazyInfo,
							//cOrig.NoProxyOwner,
							cOrig.ProxyOwner,
							cOrig.NonLazyOwner,
							cOrig.LazyDataPoints.First(),
							cOrig.NonLazyJoinDataPoints.First(),
							cOrig.NonLazySelectDataPoints.First()
						});

			ISet<object> expectedReadOnlyObjects = new HashedSet<object>();
			DataPoint lazyDataPointOrig = cOrig.LazyDataPoints.First();
			ISession s = OpenSession();
			Assert.That(s.DefaultReadOnly, Is.False);
			ITransaction t = s.BeginTransaction();
			s.Save(cOrig);
			CheckContainer(cOrig, expectedInitializedObjects, expectedReadOnlyObjects, s);
			s.DefaultReadOnly = true;
			Assert.That(s.DefaultReadOnly, Is.True);
			CheckContainer(cOrig, expectedInitializedObjects, expectedReadOnlyObjects, s);
			t.Commit();
			s.Close();
	
			s = OpenSession();
			t = s.BeginTransaction();
			s.DefaultReadOnly = true;
			Container c = s.Get<Container>(cOrig.Id);
			Assert.That(cOrig, Is.Not.SameAs(c));
			
			expectedInitializedObjects =
					new HashedSet<object>(
						new object[]
						{
							c,
							c.NonLazyInfo,
							//c.NoProxyOwner,
							c.ProxyOwner,
							c.NonLazyOwner,
							c.NonLazyJoinDataPoints.First(),
							c.NonLazySelectDataPoints.First()
						});

			expectedReadOnlyObjects =
					new HashedSet<object>(
						new object[]
						{
							c,
							//c.NoProxyInfo,
							c.ProxyInfo,
							c.NonLazyInfo,
							//c.NoProxyOwner,
							c.ProxyOwner,
							c.NonLazyOwner,
							//c.getLazyDataPoints(),
							c.NonLazyJoinDataPoints.First(),
							c.NonLazySelectDataPoints.First()
						});

			CheckContainer(c, expectedInitializedObjects, expectedReadOnlyObjects, s);
			s.DefaultReadOnly = false;
			CheckContainer(c, expectedInitializedObjects, expectedReadOnlyObjects, s);
//			Assert.That(NHibernateUtil.IsInitialized(c.NoProxyInfo), Is.False);
//			NHibernateUtil.Initialize(c.NoProxyInfo);
//			expectedInitializedObjects.Add(c.NoProxyInfo);
//			CheckContainer(c, expectedInitializedObjects, expectedReadOnlyObjects, s);
			Assert.That(NHibernateUtil.IsInitialized(c.ProxyInfo), Is.False);
			NHibernateUtil.Initialize(c.ProxyInfo);
			expectedInitializedObjects.Add(c.ProxyInfo);
			CheckContainer(c, expectedInitializedObjects, expectedReadOnlyObjects, s);
			s.DefaultReadOnly = true;
			DataPoint lazyDataPoint = s.Load<DataPoint>(lazyDataPointOrig.Id);
			s.DefaultReadOnly = false;
			Assert.That(NHibernateUtil.IsInitialized(c.LazyDataPoints), Is.False);
			NHibernateUtil.Initialize(c.LazyDataPoints);
			Assert.That(lazyDataPoint, Is.SameAs(c.LazyDataPoints.First()));
			expectedInitializedObjects.Add(c.LazyDataPoints.First());
			expectedReadOnlyObjects.Add(lazyDataPoint);
			CheckContainer(c, expectedInitializedObjects, expectedReadOnlyObjects, s);
			t.Commit();
			s.Close();
			s = OpenSession();
			t = s.BeginTransaction();
			s.CreateQuery("delete from DataPoint").ExecuteUpdate();
			s.CreateQuery("delete from Container").ExecuteUpdate();
			s.CreateQuery("delete from Info").ExecuteUpdate();
			s.CreateQuery("delete from Owner").ExecuteUpdate();
			t.Commit();
			s.Close();
		}
		protected internal virtual SqlString GenerateLazySelectString()
		{
			if (!entityMetamodel.HasLazyProperties)
				return null;

			HashedSet<int> tableNumbers = new HashedSet<int>();
			List<int> columnNumbers = new List<int>();
			List<int> formulaNumbers = new List<int>();
			for (int i = 0; i < lazyPropertyNames.Length; i++)
			{
				// all this only really needs to consider properties
				// of this class, not its subclasses, but since we
				// are reusing code used for sequential selects, we
				// use the subclass closure
				int propertyNumber = GetSubclassPropertyIndex(lazyPropertyNames[i]);

				int tableNumber = GetSubclassPropertyTableNumber(propertyNumber);
				tableNumbers.Add(tableNumber);

				int[] colNumbers = subclassPropertyColumnNumberClosure[propertyNumber];
				for (int j = 0; j < colNumbers.Length; j++)
				{
					if (colNumbers[j] != -1)
					{
						columnNumbers.Add(colNumbers[j]);
					}
				}
				int[] formNumbers = subclassPropertyFormulaNumberClosure[propertyNumber];
				for (int j = 0; j < formNumbers.Length; j++)
				{
					if (formNumbers[j] != -1)
					{
						formulaNumbers.Add(formNumbers[j]);
					}
				}
			}

			if (columnNumbers.Count == 0 && formulaNumbers.Count == 0)
			{
				// only one-to-one is lazy fetched
				return null;
			}

			return RenderSelect(tableNumbers.ToArray(), columnNumbers.ToArray(), formulaNumbers.ToArray());
		}
Beispiel #21
0
        /// <summary>
        /// Marks all edges from the graph which are "dangles".
        /// Dangles are which are incident on a node with degree 1.
        /// This process is recursive, since removing a dangling edge
        /// may result in another edge becoming a dangle.
        /// In order to handle large recursion depths efficiently,
        /// an explicit recursion stack is used.
        /// </summary>
        /// <returns>A List containing the LineStrings that formed dangles.</returns>
        public IList DeleteDangles()
        {
            IList nodesToRemove = FindNodesOfDegree(1);
            ISet dangleLines = new HashedSet();

            Stack nodeStack = new Stack();
            for (IEnumerator i = nodesToRemove.GetEnumerator(); i.MoveNext(); )
                nodeStack.Push(i.Current);

            while (nodeStack.Count != 0)
            {
                Node node = (Node) nodeStack.Pop();

                DeleteAllEdges(node);
                IList nodeOutEdges = node.OutEdges.Edges;
                for (IEnumerator i = nodeOutEdges.GetEnumerator(); i.MoveNext(); )
                {
                    PolygonizeDirectedEdge de = (PolygonizeDirectedEdge) i.Current;
                    // delete this edge and its sym
                    de.Marked = true;
                    PolygonizeDirectedEdge sym = (PolygonizeDirectedEdge) de.Sym;
                    if (sym != null) sym.Marked = true;

                    // save the line as a dangle
                    PolygonizeEdge e = (PolygonizeEdge) de.Edge;
                    dangleLines.Add(e.Line);

                    Node toNode = de.ToNode;
                    // add the toNode to the list to be processed, if it is now a dangle
                    if (GetDegreeNonDeleted(toNode) == 1)
                        nodeStack.Push(toNode);
                }
            }
            return new ArrayList(dangleLines);
        }