static void Main() { var set = new HashedSet<int>(); set.Add(5); set.Add(3); set.Add(-4); set.Add(12); set.Add(0); set.Add(-50); set.Add(10); Console.WriteLine("Set contains 12 -> {0}", set.Find(12)); Console.WriteLine("Set contains 13 -> {0}", set.Find(13)); set.Remove(10); Console.WriteLine("Removed 10\nSet contains 10 -> {0}", set.Find(10)); Console.WriteLine("Set contains {0} items", set.Count); Console.WriteLine("Set 1: {0}", set); var anotherSet = new HashedSet<int>(); anotherSet.Add(-4); anotherSet.Add(15); anotherSet.Add(0); anotherSet.Add(-122); anotherSet.Add(35); Console.WriteLine("Set 2: {0}", anotherSet); set.Union(anotherSet); Console.WriteLine("Set after union: {0}", set); set.Intersect(anotherSet); Console.WriteLine("Set after intersect: {0}", set); set.Clear(); Console.WriteLine("Set contains {0} items after clear", set.Count); }
public static void Main() { var set = new HashedSet<int>(); for (int i = 1; i < 11; i++) { for (int j = 0; j < 3; j++) { set.Add(i); } } System.Console.WriteLine(set); System.Console.WriteLine(set.Count); var otherSet = new HashedSet<int>(); for (int i = 5; i < 16; i++) { otherSet.Add(i); } System.Console.WriteLine(otherSet); System.Console.WriteLine(otherSet.Count); System.Console.WriteLine(set.IntersectsWith(otherSet)); System.Console.WriteLine(set.Union(otherSet)); }
public static void Main(string[] args) { HashedSet<string> students = new HashedSet<string>(); students.Add("Pesho"); students.Add("Pesho"); students.Add("Gosho"); students.Remove("Gosho"); students.Add("Misho"); students.Add("Ivan"); Console.WriteLine("Student count: {0}", students.Count); HashedSet<string> users = new HashedSet<string>(); users.Add("Mariq"); users.Add("Pesho"); users.Add("Misho"); HashedSet<string> intersection = students.Intersect(users); Console.WriteLine("Intersection:"); foreach (var name in intersection) { Console.WriteLine(name); } HashedSet<string> union = students.Union(users); Console.WriteLine("Union: "); foreach (var name in union) { Console.WriteLine(name); } }
public MappingsQueueEntry(NamedXmlDocument document, IEnumerable<ClassExtractor.ClassEntry> classEntries) { this.document = document; containedClassNames = GetClassNames(classEntries); requiredClassNames = GetRequiredClassNames(classEntries, containedClassNames); }
/* 5 Implement the data structure "set" in a class HashedSet<T> using your class HashTable<K,T> * to hold the elements. Implement all standard set operations like Add(T), Find(T), Remove(T), * Count, Clear(), union and intersect. * */ static void Main(string[] args) { var set = new HashedSet<int>(); Debug.Assert(set.Count == 0); Debug.Assert(!set.Find(1)); set.Add(1); Debug.Assert(set.Count == 1); Debug.Assert(set.Find(1)); set.Add(2); Debug.Assert(set.Count == 2); Debug.Assert(set.Find(2)); set.Add(1); Debug.Assert(set.Count == 2); Debug.Assert(set.Find(1)); set.Remove(1); Debug.Assert(set.Count == 1); Debug.Assert(!set.Find(1)); Debug.Assert(set.Find(2)); var set1 = new HashedSet<int> { 1, 2, 3, 4, 5, 6 }.Intersect(new HashedSet<int> { 2, 4, 6, 8, 10 }); Debug.Assert(set1.SameContents(new[] { 2, 4, 6 }, i => i)); var set2 = new HashedSet<int> { 1, 2, 3, 4, 5, 6 }.Union(new HashedSet<int> { 2, 4, 6, 8, 10 }); Debug.Assert(set2.SameContents(new[] { 1, 2, 3, 4, 5, 6, 8, 10 }, i => i)); }
public void test() { using (ISession s = OpenSession()) using (ITransaction tx = s.BeginTransaction()) { Person person = new Person("1"); person.Name = "John Doe"; var set = new HashedSet<object>(); set.Add("555-1234"); set.Add("555-4321"); person.Properties.Add("Phones", set); s.Save(person); tx.Commit(); } using (ISession s = OpenSession()) using (ITransaction tx = s.BeginTransaction()) { Person person = (Person)s.CreateCriteria(typeof(Person)).UniqueResult(); Assert.AreEqual("1", person.ID); Assert.AreEqual("John Doe", person.Name); Assert.AreEqual(1, person.Properties.Count); Assert.That(person.Properties["Phones"], Is.InstanceOf<ISet<object>>()); Assert.IsTrue(((ISet<object>) person.Properties["Phones"]).Contains("555-1234")); Assert.IsTrue(((ISet<object>) person.Properties["Phones"]).Contains("555-4321")); } }
static void Main() { HashedSet<string> myBestFriends = new HashedSet<string>(); myBestFriends.Add("Ivan"); myBestFriends.Add("Daniel"); myBestFriends.Add("Cecilia"); Console.WriteLine(myBestFriends.Count); myBestFriends.Remove("Cecilia"); Console.WriteLine(myBestFriends.Count); HashedSet<string> yourBestFriends = new HashedSet<string>(); yourBestFriends.Add("Petar"); yourBestFriends.Add("Daniel"); yourBestFriends.Add("Monika"); HashedSet<string> allBestFriends = myBestFriends.Union(yourBestFriends); Console.WriteLine("All best friends: "); foreach (var item in allBestFriends.setOfData) { Console.WriteLine("{0}", item.Value); } HashedSet<string> mutualBestFriends = myBestFriends.Intersect(yourBestFriends); Console.WriteLine("Mutual best friends: "); foreach (var item in mutualBestFriends.setOfData) { Console.WriteLine("{0}", item.Value); } }
public Task() { CreatedDate = ModifiedDate = DateTimeHelper.Now; Id = Guid.NewGuid(); _title = ""; Links = new HashedSet<Link>(); }
public ProductQuotation(Quotation quotation, string name, string group) { Quotation = quotation; Name = name; Group = group; Editions = new HashedSet<EditionQuotation>(); }
static void Main(string[] args) { ISessionFactory sf = CreateSessionFactory(); using(ISession s = sf.OpenSession()) { using(ITransaction t = s.BeginTransaction()) { ISet<T3> t3 = new HashedSet<T3>() { new T3() {str="a"}, new T3() {str="b"}, new T3() {str="c"} }; foreach(T3 tt3 in t3) s.Save(tt3); T1 t1 = new T1() { str = "t1", t3 = t3 }; T2 t2 = new T2() { str = "t2", t3 = t3 }; s.Save(t1); s.Save(t2); t.Commit(); //IList<T1> k = s.QueryOver<T1>().List(); //Console.WriteLine(k[0].id); //s.Delete(k[0]); //t.Commit(); } } Console.ReadKey(); }
public void InitializerShouldCreateEmptyHashedSet() { var set = new HashedSet<int>(); int expected = 0; Assert.AreEqual(expected, set.Count); }
public void TestFindWithInvalidKey() { var table = new HashedSet<string>(); table.Add("Pesho"); var value = table.Find("Peho"); }
public void TestFindShouldProperlyWork() { var table = new HashedSet<string>(); table.Add("Pesho"); Assert.AreEqual(true, table.Find("Pesho")); }
public static void Main() { var firstSet = new HashedSet<string>(); var secondSet = new HashedSet<string>(); firstSet.Add("Pesho"); firstSet.Add("Gosho"); firstSet.Add("Tosho"); secondSet.Add("Ivan"); secondSet.Add("Petkan"); secondSet.Add("Dragan"); Console.WriteLine(firstSet); Console.WriteLine(secondSet); Console.WriteLine(firstSet.Intersect(secondSet)); Console.WriteLine(secondSet.Intersect(firstSet)); Console.WriteLine(firstSet.Union(secondSet)); Console.WriteLine(secondSet.Union(firstSet)); firstSet.Remove("Pesho"); firstSet.Remove("Tosho"); Console.WriteLine(firstSet); Console.WriteLine(firstSet.Find("Tosho")); Console.WriteLine(firstSet.Find("Gosho")); Console.WriteLine(firstSet.Count); }
public void AddShouldNotThrowExceptionWhenTheSameKeyIsAlreadyPresent() { var hashedset = new HashedSet<string>(); hashedset.Add("gosho"); hashedset.Add("gosho"); }
public IEnumerable Distinct(IEnumerable source) { var s = new HashedSet(); foreach (object item in source) s.Add(item); return s; }
static void Main() { var mySet = new HashedSet<string>(); mySet.Add("string"); mySet.Add("str"); //mySet.Add(null); mySet.Add("strength"); mySet.Add("string"); //var strength = mySet.Find("strength"); //Console.WriteLine(strength); //var isStringRemoved = mySet.Remove("string"); //Console.WriteLine(isStringRemoved); var mySecondSet = new HashedSet<string>(); mySecondSet.Add("strength"); mySecondSet.Add("dexterity"); mySecondSet.Add("intelligence"); mySet.Union(mySecondSet); //mySet.Intersect(mySecondSet); foreach (var item in mySet) { Console.WriteLine(item); } }
/// <summary> /// Returns a collection of <see cref="ClassEntry" /> containing /// information about all classes in this stream. /// </summary> /// <param name="document">A validated <see cref="XmlDocument"/> representing /// a mapping file.</param> public static ICollection GetClassEntries(XmlDocument document) { XmlNamespaceManager nsmgr = HbmBinder.BuildNamespaceManager(document.NameTable); // Since the document is validated, no error checking is done in this method. HashedSet classEntries = new HashedSet(); XmlNode root = document.DocumentElement; string assembly = XmlHelper.GetAttributeValue(root, "assembly"); string @namespace = XmlHelper.GetAttributeValue(root, "namespace"); XmlNodeList classNodes = document.SelectNodes( "//" + HbmConstants.nsClass + "|//" + HbmConstants.nsSubclass + "|//" + HbmConstants.nsJoinedSubclass + "|//" + HbmConstants.nsUnionSubclass, nsmgr ); foreach (XmlNode classNode in classNodes) { string name = XmlHelper.GetAttributeValue(classNode, "name"); string extends = XmlHelper.GetAttributeValue(classNode, "extends"); ClassEntry ce = new ClassEntry(extends, name, assembly, @namespace); classEntries.Add(ce); } return classEntries; }
public static void Main() { HashedSet<float> firstSet = new HashedSet<float>(); firstSet.Add(1f); firstSet.Add(1.4f); firstSet.Add(1.7f); firstSet.Add(2f); firstSet.Add(2.2f); firstSet.Remove(1.7f); Console.WriteLine(firstSet.Find(1f)); Console.WriteLine(firstSet.Count); HashedSet<float> secondSet = new HashedSet<float>(); secondSet.Add(1f); secondSet.Add(2f); secondSet.Add(3f); secondSet.Add(5f); HashedSet<float> thirdSet = new HashedSet<float>(); thirdSet.Add(1f); thirdSet.Add(2f); thirdSet.Add(3f); thirdSet.Add(5f); secondSet.Union(firstSet); thirdSet.Intersect(firstSet); firstSet.Clear(); }
/// <summary> /// Returns a collection of <see cref="ClassEntry" /> containing /// information about all classes in this stream. /// </summary> /// <param name="document">A validated <see cref="XmlDocument"/> representing /// a mapping file.</param> public static ICollection GetClassEntries(XmlDocument document) { // TODO this should be extracted into a utility method since there's similar // code in Configuration XmlNamespaceManager nsmgr = new XmlNamespaceManager(document.NameTable); nsmgr.AddNamespace(HbmConstants.nsPrefix, Configuration.MappingSchemaXMLNS); // Since the document is validated, no error checking is done in this method. HashedSet classEntries = new HashedSet(); XmlNode root = document.DocumentElement; string assembly = XmlHelper.GetAttributeValue(root, "assembly"); string @namespace = XmlHelper.GetAttributeValue(root, "namespace"); XmlNodeList classNodes = document.SelectNodes( "//" + HbmConstants.nsClass + "|//" + HbmConstants.nsSubclass + "|//" + HbmConstants.nsJoinedSubclass + "|//" + HbmConstants.nsUnionSubclass, nsmgr ); foreach (XmlNode classNode in classNodes) { string name = XmlHelper.GetAttributeValue(classNode, "name"); string extends = XmlHelper.GetAttributeValue(classNode, "extends"); ClassEntry ce = new ClassEntry(extends, name, assembly, @namespace); classEntries.Add(ce); } return classEntries; }
public Product() { Images = new List<Image>(); Comments = new List<Comment>(); Recomended = new HashedSet<Product>(); IsVisible = true; }
/// <summary> /// Create an action that will evict collection and entity regions based on queryspaces (table names). /// </summary> public BulkOperationCleanupAction(ISessionImplementor session, ISet<string> querySpaces) { //from H3.2 TODO: cache the autodetected information and pass it in instead. this.session = session; ISet<string> tmpSpaces = new HashedSet<string>(querySpaces); ISessionFactoryImplementor factory = session.Factory; IDictionary acmd = factory.GetAllClassMetadata(); foreach (DictionaryEntry entry in acmd) { string entityName = ((System.Type) entry.Key).FullName; IEntityPersister persister = factory.GetEntityPersister(entityName); string[] entitySpaces = persister.QuerySpaces; if (AffectedEntity(querySpaces, entitySpaces)) { if (persister.HasCache) { affectedEntityNames.Add(persister.EntityName); } ISet roles = session.Factory.GetCollectionRolesByEntityParticipant(persister.EntityName); if (roles != null) { affectedCollectionRoles.AddAll(roles); } for (int y = 0; y < entitySpaces.Length; y++) { tmpSpaces.Add(entitySpaces[y]); } } } spaces = new List<string>(tmpSpaces); }
public void Union_Test() { HashedSet<int> set = new HashedSet<int>(); HashedSet<int> otherSet = new HashedSet<int>(); set.Add(1); set.Add(2); set.Add(3); otherSet.Add(3); otherSet.Add(4); otherSet.Add(5); set.Union(otherSet); StringBuilder actual = new StringBuilder(); foreach (var item in set) { actual.Append(item + " "); } string expected = "1 2 3 4 5 "; Assert.AreEqual(expected, actual.ToString()); }
public void UpdateReservedWordsInDialect() { var reservedDb = new HashedSet<string>(); var configuration = TestConfigurationHelper.GetDefaultConfiguration(); var dialect = Dialect.Dialect.GetDialect(configuration.Properties); var connectionHelper = new ManagedProviderConnectionHelper(configuration.Properties); connectionHelper.Prepare(); try { var metaData = dialect.GetDataBaseSchema(connectionHelper.Connection); foreach (var rw in metaData.GetReservedWords()) { reservedDb.Add(rw.ToLowerInvariant()); } } finally { connectionHelper.Release(); } var sf = (ISessionFactoryImplementor) configuration.BuildSessionFactory(); SchemaMetadataUpdater.Update(sf); var match = reservedDb.Intersect(sf.Dialect.Keywords); Assert.That(match, Is.EquivalentTo(reservedDb)); }
public NativeSQLQuerySpecification( string queryString, ISQLQueryReturn[] sqlQueryReturns, ICollection querySpaces) { this.queryString = queryString; this.sqlQueryReturns = sqlQueryReturns; if (querySpaces == null) { this.querySpaces = new HashedSet(); } else { ISet tmp = new HashedSet(); tmp.AddAll(querySpaces); // Can't use ImmutableSet here because it doesn't implement GetHashCode properly. this.querySpaces = tmp; } // pre-determine and cache the hashcode int hashCode = queryString.GetHashCode(); unchecked { hashCode = 29 * hashCode + this.querySpaces.GetHashCode(); if (this.sqlQueryReturns != null) { hashCode = 29 * hashCode + sqlQueryReturns.Length; } } this.hashCode = hashCode; }
public void HashedSetUnionTestSameElementsInBoth() { HashedSet<int> firstSet = new HashedSet<int>(); int firstSetLength = 5; for (int i = 0; i < firstSetLength; i++) { firstSet.Add(i); } HashedSet<int> secondSet = new HashedSet<int>(); int secondSetLength = 5; for (int i = 0; i < secondSetLength; i++) { secondSet.Add(i); } Assert.AreEqual(firstSetLength, firstSet.Count, "Incorrect set count!"); Assert.AreEqual(secondSetLength, secondSet.Count, "Incorrect set count!"); firstSet.Union(secondSet); for (int i = 0; i < firstSetLength; i++) { Assert.IsTrue(firstSet.Contains(i), "Incorrect union!"); } Assert.AreEqual(firstSetLength, firstSet.Count, "Incorrect amount of elements after union"); }
public void ShouldRemoveElementsCorrectly() { var set = new HashedSet<int>(); set.Add(5); Assert.AreEqual(1, set.Count); set.Remove(5); Assert.AreEqual(0, set.Count); }
/// <summary> /// Returns all Edges that connect the two nodes (which are assumed to be different). /// </summary> /// <param name="node0"></param> /// <param name="node1"></param> /// <returns></returns> public static IList getEdgesBetween(Node node0, Node node1) { IList edges0 = DirectedEdge.ToEdges(node0.OutEdges.Edges); ISet commonEdges = new HashedSet(edges0); IList edges1 = DirectedEdge.ToEdges(node1.OutEdges.Edges); commonEdges.RetainAll(edges1); return new ArrayList(commonEdges); }
public Blend() { Name = String.Empty; Drinks = new HashedSet<Drink>(); Acidity = 0; Strength = new Strength(); Upcharge = 0M; }
public void TestAddMethod() { var hashedSet = new HashedSet<int>(); Assert.IsTrue(hashedSet.Add(1)); Assert.IsTrue(hashedSet.Add(2)); Assert.IsFalse(hashedSet.Add(2)); Assert.AreEqual(2, hashedSet.Count); }
public void TestInitialSizeToBeZero() { HashedSet <string> testHashSet = new HashedSet <string>(100); Assert.AreEqual(0, testHashSet.Count); }
public InstanceModel() { InstanceProperties = new HashedSet <InstancePropertyModel>(); }
public MP_EkmOrder() { OrderItems = new HashedSet <MP_EkmOrderItem>(); }
public CassetteLessFileReader(IDirectory directory, HashedSet <string> importFilePaths) { this.directory = directory; this.importFilePaths = importFilePaths; }
protected override IProxyFactory BuildProxyFactory(PersistentClass persistentClass, IGetter idGetter, ISetter idSetter) { bool needAccesorCheck = true; // NH specific (look the comment below) // determine the id getter and setter methods from the proxy interface (if any) // determine all interfaces needed by the resulting proxy var proxyInterfaces = new HashedSet <System.Type> { typeof(INHibernateProxy) }; System.Type _mappedClass = persistentClass.MappedClass; System.Type _proxyInterface = persistentClass.ProxyInterface; if (_proxyInterface != null && !_mappedClass.Equals(_proxyInterface)) { if (!_proxyInterface.IsInterface) { throw new MappingException("proxy must be either an interface, or the class itself: " + EntityName); } needAccesorCheck = false; // NH (the proxy is an interface all properties can be overridden) proxyInterfaces.Add(_proxyInterface); } if (_mappedClass.IsInterface) { needAccesorCheck = false; // NH (the mapped class is an interface all properties can be overridden) proxyInterfaces.Add(_mappedClass); } foreach (Subclass subclass in persistentClass.SubclassIterator) { System.Type subclassProxy = subclass.ProxyInterface; System.Type subclassClass = subclass.MappedClass; if (subclassProxy != null && !subclassClass.Equals(subclassProxy)) { if (!subclassProxy.IsInterface) { throw new MappingException("proxy must be either an interface, or the class itself: " + subclass.EntityName); } proxyInterfaces.Add(subclassProxy); } } /* * NH Different Implementation (for Error logging): * - Check if the logger is enabled * - Don't need nothing to check if the mapped-class or proxy is an interface */ if (log.IsErrorEnabled && needAccesorCheck) { LogPropertyAccessorsErrors(persistentClass); } /**********************************************************/ MethodInfo idGetterMethod = idGetter == null ? null : idGetter.Method; MethodInfo idSetterMethod = idSetter == null ? null : idSetter.Method; MethodInfo proxyGetIdentifierMethod = idGetterMethod == null || _proxyInterface == null ? null : ReflectHelper.TryGetMethod(_proxyInterface, idGetterMethod); MethodInfo proxySetIdentifierMethod = idSetterMethod == null || _proxyInterface == null ? null : ReflectHelper.TryGetMethod(_proxyInterface, idSetterMethod); IProxyFactory pf = BuildProxyFactoryInternal(persistentClass, idGetter, idSetter); try { pf.PostInstantiate(EntityName, _mappedClass, proxyInterfaces, proxyGetIdentifierMethod, proxySetIdentifierMethod, persistentClass.HasEmbeddedIdentifier ? (IAbstractComponentType)persistentClass.Identifier.Type: null); } catch (HibernateException he) { log.Warn("could not create proxy factory for:" + EntityName, he); pf = null; } return(pf); }
public MP_TeraPeakOrder() { OrderItems = new HashedSet <MP_TeraPeakOrderItem>(); }
public Child() { Sensitivitieses = new HashedSet <Sensitivity>(); }
public MP_PayPalTransaction() { TransactionItems = new HashedSet <MP_PayPalTransactionItem2>(); }
/// <summary> /// Constructs a ShardedSessionFactoryImpl /// </summary> /// <param name="shardIds"> The ids of the shards with which this SessionFactory should be associated.</param> /// <param name="sessionFactoryShardIdMap">Mapping of SessionFactories to shard ids. /// When using virtual shards, this map associates SessionFactories (physical /// shards) with virtual shards (shard ids). Map cannot be empty. /// Map keys cannot be null. Map values cannot be null or empty.</param> /// <param name="shardStrategyFactory">factory that knows how to create the <see cref="IShardStrategy"/> /// that will be used for all shard-related operations</param> /// <param name="classesWithoutTopLevelSaveSupport"> All classes that cannot be saved /// as top-level objects</param> /// <param name="checkAllAssociatedObjectsForDifferentShards">Flag that controls /// whether or not we do full cross-shard relationshp checking (very slow)</param> public ShardedSessionFactoryImpl( ICollection <ShardId> shardIds, IDictionary <ISessionFactoryImplementor, Set <ShardId> > sessionFactoryShardIdMap, IShardStrategyFactory shardStrategyFactory, ISet <System.Type> classesWithoutTopLevelSaveSupport, bool checkAllAssociatedObjectsForDifferentShards) { Preconditions.CheckNotNull(sessionFactoryShardIdMap); Preconditions.CheckArgument(!(sessionFactoryShardIdMap.Count == 0)); Preconditions.CheckNotNull(shardStrategyFactory); Preconditions.CheckNotNull(classesWithoutTopLevelSaveSupport); sessionFactories = new List <ISessionFactoryImplementor>(sessionFactoryShardIdMap.Keys); this.sessionFactoryShardIdMap = new Dictionary <ISessionFactoryImplementor, Set <ShardId> >(); fullSessionFactoryShardIdMap = sessionFactoryShardIdMap; this.classesWithoutTopLevelSaveSupport = new HashedSet <System.Type>(classesWithoutTopLevelSaveSupport); this.checkAllAssociatedObjectsForDifferentShards = checkAllAssociatedObjectsForDifferentShards; Set <ShardId> uniqueShardIds = new HashedSet <ShardId>(); ISessionFactoryImplementor controlSessionFactoryToSet = null; foreach (var entry in sessionFactoryShardIdMap) { ISessionFactoryImplementor implementor = entry.Key; Preconditions.CheckNotNull(implementor); Set <ShardId> shardIdSet = entry.Value; Preconditions.CheckNotNull(shardIdSet); Preconditions.CheckState(!(shardIdSet.Count == 0)); foreach (ShardId shardId in shardIdSet) { //TODO: we should change it so we specify control shard in configuration if (shardId.Id == CONTROL_SHARD_ID) { controlSessionFactoryToSet = implementor; } if (!uniqueShardIds.Add(shardId)) { string msg = string.Format("Cannot have more than one shard with shard id {0}.", shardId.Id); log.Error(msg); throw new HibernateException(msg); } if (shardIds.Contains(shardId)) { if (!this.sessionFactoryShardIdMap.ContainsKey(implementor)) { this.sessionFactoryShardIdMap.Add(implementor, new HashedSet <ShardId>()); } this.sessionFactoryShardIdMap[implementor].Add(shardId); } } } // make sure someone didn't associate a session factory with a shard id // that isn't in the full list of shards foreach (ShardId shardId in shardIds) { Preconditions.CheckState(uniqueShardIds.Contains(shardId)); } controlSessionFactory = controlSessionFactoryToSet; // now that we have all our shard ids, construct our shard strategy shardStrategy = shardStrategyFactory.NewShardStrategy(shardIds); SetupIdGenerators(); }
protected AbstractAuxiliaryDatabaseObject(HashedSet <string> dialectScopes) { this.dialectScopes = dialectScopes; }
protected AbstractAuxiliaryDatabaseObject() { dialectScopes = new HashedSet <string>(); }
public Order() { OrderLines = new HashedSet <OrderLine>(); }
public void MultiTableCollections() { if (Dialect is MySQLDialect) { return; } ISession s = OpenSession(); ITransaction t = s.BeginTransaction(); Assert.AreEqual(0, s.CreateQuery("from s in class Top").List().Count); Multi multi = new Multi(); multi.ExtraProp = "extra"; multi.Name = "name"; Top simp = new Top(); simp.Date = DateTime.Now; simp.Name = "simp"; object mid; object sid; if ((Dialect is SybaseDialect) || (Dialect is MsSql2000Dialect)) { mid = s.Save(multi); sid = s.Save(simp); } else { mid = 123L; sid = 1234L; s.Save(multi, mid); s.Save(simp, sid); } Lower ls = new Lower(); ls.Other = ls; ls.Another = ls; ls.YetAnother = ls; ls.Name = "Less Simple"; ISet dict = new HashedSet(); ls.Set = dict; dict.Add(multi); dict.Add(simp); object id; if ((Dialect is SybaseDialect) || (Dialect is MsSql2000Dialect)) { id = s.Save(ls); } else { id = 2L; s.Save(ls, id); } t.Commit(); s.Close(); Assert.AreSame(ls, ls.Other); Assert.AreSame(ls, ls.Another); Assert.AreSame(ls, ls.YetAnother); s = OpenSession(); t = s.BeginTransaction(); ls = (Lower)s.Load(typeof(Lower), id); Assert.AreSame(ls, ls.Other); Assert.AreSame(ls, ls.Another); Assert.AreSame(ls, ls.YetAnother); Assert.AreEqual(2, ls.Set.Count); int foundMulti = 0; int foundSimple = 0; foreach (object obj in ls.Set) { if (obj is Top) { foundSimple++; } if (obj is Multi) { foundMulti++; } } Assert.AreEqual(2, foundSimple); Assert.AreEqual(1, foundMulti); Assert.AreEqual(3, s.Delete("from s in class Top")); t.Commit(); s.Close(); }
public ConnectLevel() { Domains = new HashedSet <ConnectDomain>(); NotIn = new HashedSet <Point>(); }
/** * Bir eke iliskin ardisil ekler belirlenir. ardisil ekler * a) ek kumelerinden * b) normal tek olarak * c) dogrudan baska bir ekin ardisil eklerinden kopyalanarak * elde edilir. * Ayrica eger oncelikli ekler belirtilmis ise bu ekler ardisil ek listeisnin en basina koyulur. * * @param ekElement : ek xml bileseni.. * @return Ek referans Listesi. * @param anaEk ardisil ekler eklenecek asil ek */ private List <Ek> ardisilEkleriOlustur(Ek anaEk, XmlElement ekElement) { Set <Ek> ardisilEkSet = new HashedSet <Ek>(); XmlElement ardisilEklerEl = (XmlElement)ekElement.SelectNodes("ardisil-ekler")[0]; if (ardisilEklerEl == null) { return(new List <Ek>()); } // tek ekleri ekle. XmlNodeList tekArdisilEkler = ardisilEklerEl.SelectNodes("aek"); foreach (XmlElement element in tekArdisilEkler) { String ekAdi = element.InnerText; Ek ek = this.ekler[ekAdi]; if (ek == null) { exit(anaEk.ad() + " icin ardisil ek bulunamiyor! " + ekAdi); } ardisilEkSet.Add(ek); } // kume eklerini ekle. XmlNodeList kumeEkler = ardisilEklerEl.SelectNodes("kume"); foreach (XmlElement element in kumeEkler) { String kumeAdi = element.InnerText; Set <Ek> kumeEkleri = ekKumeleri[kumeAdi]; if (kumeEkleri == null) { exit("kume bulunamiyor..." + kumeAdi); } ardisilEkSet.AddAll(kumeEkleri); } //varsa baska bir ekin ardisil eklerini kopyala. XmlAttribute attr = ardisilEklerEl.GetAttributeNode("kopya-ek"); if (attr != null) { String kopyaEkadi = attr.Value; Ek ek = this.ekler[kopyaEkadi]; if (ek == null) { exit(anaEk.ad() + " icin kopyalanacak ek bulunamiyor! " + kopyaEkadi); } ardisilEkSet.AddAll(ek.ardisilEkler()); } List <Ek> ardisilEkler = new List <Ek>(ardisilEkSet.Count); //varsa oncelikli ekleri oku ve ardisil ekler listesinin ilk basina koy. // bu tamamen performans ile iliskili bir islemdir. XmlElement oncelikliEklerEl = (XmlElement)ekElement.SelectNodes("oncelikli-ekler")[0]; if (oncelikliEklerEl != null) { XmlNodeList oncelikliEkler = oncelikliEklerEl.SelectNodes("oek"); foreach (XmlElement element in oncelikliEkler) { String ekAdi = element.InnerText; Ek ek = this.ekler[ekAdi]; if (ek == null) { exit(anaEk.ad() + " icin oncelikli ek bulunamiyor! " + ekAdi); } if (ardisilEkSet.Contains(ek)) { ardisilEkler.Add(ek); ardisilEkSet.Remove(ek); } else { logger.Warn(anaEk.ad() + "icin oncelikli ek:" + ekAdi + " bu ekin ardisil eki degil!"); } } } ardisilEkler.AddRange(ardisilEkSet); return(ardisilEkler); }
private void DoUpdateNotifier() { BuildStatusEnum?worstBuildStatus = null; bool buildInProgress = false; bool buildIsStuck = false; var errorProjects = new HashedSet <Project>(); var regressingProjects = new HashedSet <Project>(); var progressingAndErrorProjects = new HashedSet <Project>(); var interestingProjects = new HashedSet <Project>(); int totalProjectCount = 0; foreach (Server server in ConfigurationService.Servers) { foreach (Project project in server.Projects) { totalProjectCount++; BuildStatus status = GetProjectStatus(project); if (worstBuildStatus == null || status.Value > worstBuildStatus) { worstBuildStatus = status.Value; } if (status.Value >= BuildStatusEnum.Failed) { errorProjects.Add(project); } if (status.Value > BuildStatusEnum.Successful) { progressingAndErrorProjects.Add(project); } if (status.IsInProgress) { buildInProgress = true; progressingAndErrorProjects.Add(project); } if (status.IsStuck) { buildIsStuck = true; } if (IsRegressing(project)) { regressingProjects.Add(project); } lastProjectsBuildDetails[project] = project.AllBuildDetails; if (project.Activity.HasBuildActivity) { interestingProjects.Add(project); } } } if (worstBuildStatus == null) { worstBuildStatus = BuildStatusEnum.Unknown; } #if false // tests lastBuildStatus++; if (lastBuildStatus > BuildStatus.Failed_BuildInProgress) { lastBuildStatus = 0; } worstBuildStatus = lastBuildStatus; Console.WriteLine("tray:" + lastBuildStatus); #endif BuildStatus buildStatus = new BuildStatus(worstBuildStatus.Value, buildInProgress, buildIsStuck); UpdateIcon(buildStatus); UpdateBalloonTip(errorProjects, regressingProjects); UpdateTrayTooltip(progressingAndErrorProjects, totalProjectCount); ShowBallowTip(interestingProjects); lastBuildStatus = buildStatus; }
private IDictionary <string, string[]> BindPropertyResults(string alias, HbmReturnDiscriminator discriminatorSchema, HbmReturnProperty[] returnProperties, PersistentClass pc) { Dictionary <string, string[]> propertyresults = new Dictionary <string, string[]>(); // maybe a concrete SQLpropertyresult type, but Map is exactly what is required at the moment if (discriminatorSchema != null) { propertyresults["class"] = GetResultColumns(discriminatorSchema).ToArray(); } List <HbmReturnProperty> properties = new List <HbmReturnProperty>(); List <string> propertyNames = new List <string>(); foreach (HbmReturnProperty returnPropertySchema in returnProperties ?? new HbmReturnProperty[0]) { string name = returnPropertySchema.name; if (pc == null || name.IndexOf('.') == -1) { //if dotted and not load-collection nor return-join //regular property properties.Add(returnPropertySchema); propertyNames.Add(name); } else { // Reorder properties // 1. get the parent property // 2. list all the properties following the expected one in the parent property // 3. calculate the lowest index and insert the property int dotIndex = name.LastIndexOf('.'); string reducedName = name.Substring(0, dotIndex); IValue value = pc.GetRecursiveProperty(reducedName).Value; IEnumerable <Mapping.Property> parentPropIter; if (value is Component) { Component comp = (Component)value; parentPropIter = comp.PropertyIterator; } else if (value is ToOne) { ToOne toOne = (ToOne)value; PersistentClass referencedPc = mappings.GetClass(toOne.ReferencedEntityName); if (toOne.ReferencedPropertyName != null) { try { parentPropIter = ((Component)referencedPc.GetRecursiveProperty(toOne.ReferencedPropertyName).Value).PropertyIterator; } catch (InvalidCastException e) { throw new MappingException("dotted notation reference neither a component nor a many/one to one", e); } } else { try { parentPropIter = ((Component)referencedPc.IdentifierProperty.Value).PropertyIterator; } catch (InvalidCastException e) { throw new MappingException("dotted notation reference neither a component nor a many/one to one", e); } } } else { throw new MappingException("dotted notation reference neither a component nor a many/one to one"); } bool hasFollowers = false; List <string> followers = new List <string>(); foreach (Mapping.Property prop in parentPropIter) { string currentPropertyName = prop.Name; string currentName = reducedName + '.' + currentPropertyName; if (hasFollowers) { followers.Add(currentName); } if (name.Equals(currentName)) { hasFollowers = true; } } int index = propertyNames.Count; int followersSize = followers.Count; for (int loop = 0; loop < followersSize; loop++) { string follower = followers[loop]; int currentIndex = GetIndexOfFirstMatchingProperty(propertyNames, follower); index = currentIndex != -1 && currentIndex < index ? currentIndex : index; } propertyNames.Insert(index, name); properties.Insert(index, returnPropertySchema); } } ISet <string> uniqueReturnProperty = new HashedSet <string>(); foreach (HbmReturnProperty returnPropertySchema in properties) { string name = returnPropertySchema.name; if ("class".Equals(name)) { throw new MappingException( "class is not a valid property name to use in a <return-property>, use <return-discriminator> instead" ); } //TODO: validate existing of property with the chosen name. (secondpass ) List <string> allResultColumns = GetResultColumns(returnPropertySchema); if (allResultColumns.Count == 0) { throw new MappingException( "return-property for alias " + alias + " must specify at least one column or return-column name" ); } if (uniqueReturnProperty.Contains(name)) { throw new MappingException( "duplicate return-property for property " + name + " on alias " + alias ); } uniqueReturnProperty.Add(name); // the issue here is that for <return-join/> representing an entity collection, // the collection element values (the property values of the associated entity) // are represented as 'element.{propertyname}'. Thus the StringHelper.root() // here puts everything under 'element' (which additionally has significant // meaning). Probably what we need to do is to something like this instead: // String root = StringHelper.root( name ); // String key = root; // by default // if ( !root.equals( name ) ) { // // we had a dot // if ( !root.equals( alias ) { // // the root does not apply to the specific alias // if ( "elements".equals( root ) { // // we specifically have a <return-join/> representing an entity collection // // and this <return-property/> is one of that entity's properties // key = name; // } // } // } // but I am not clear enough on the intended purpose of this code block, especially // in relation to the "Reorder properties" code block above... // String key = StringHelper.root( name ); string key = name; string[] intermediateResults; if (!propertyresults.TryGetValue(key, out intermediateResults)) { propertyresults[key] = allResultColumns.ToArray(); } else { ArrayHelper.AddAll(intermediateResults, allResultColumns); // TODO: intermediateResults not used after this } } Dictionary <string, string[]> newPropertyResults = new Dictionary <string, string[]>(); foreach (KeyValuePair <string, string[]> entry in propertyresults) { newPropertyResults[entry.Key] = entry.Value; } return(newPropertyResults.Count == 0 ? (IDictionary <string, string[]>) new CollectionHelper.EmptyMapClass <string, string[]>() : newPropertyResults); }
public static ISet <Clause> FindSubsumedClauses(ISet <Clause> clauses) { ISet <Clause> subsumed = new HashedSet <Clause>(); // Group the clauses by their # of literals. // Keep track of the min and max # of literals. int min = int.MaxValue; int max = 0; IDictionary <int, ISet <Clause> > clausesGroupedBySize = new Dictionary <int, ISet <Clause> >(); foreach (Clause c in clauses) { int size = c.GetNumberLiterals(); if (size < min) { min = size; } if (size > max) { max = size; } ISet <Clause> cforsize = clausesGroupedBySize[size]; if (null == cforsize) { cforsize = new HashedSet <Clause>(); clausesGroupedBySize[size] = cforsize; } cforsize.Add(c); } // Check if each smaller clause // subsumes any of the larger clauses. for (int i = min; i < max; i++) { ISet <Clause> scs = clausesGroupedBySize[i]; // Ensure there are clauses with this # of literals if (null != scs) { for (int j = i + 1; j <= max; j++) { ISet <Clause> lcs = clausesGroupedBySize[j]; // Ensure there are clauses with this # of literals if (null != lcs) { foreach (Clause sc in scs) { // Don't bother checking clauses // that are already subsumed. if (!subsumed.Contains(sc)) { foreach (Clause lc in lcs) { if (subsumed.Contains(lc)) { continue; } if (sc.Subsumes(lc)) { subsumed.Add(lc); } } } } } } } } return(subsumed); }
private ILOG.J2CsMapping.Collections.ISet GetEquivalents2(String segment) { ILOG.J2CsMapping.Collections.ISet result = new HashedSet(); if (PROGRESS) { System.Console.Out.WriteLine("Adding: " + IBM.ICU.Impl.Utility.Hex(segment)); } ILOG.J2CsMapping.Collections.Generics.Collections.Add(result, segment); StringBuilder workingBuffer = new StringBuilder(); // cycle through all the characters int cp = 0; int[] range = new int[2]; for (int i = 0; i < segment.Length; i += IBM.ICU.Text.UTF16.GetCharCount(cp)) { // see if any character is at the start of some decomposition cp = IBM.ICU.Text.UTF16.CharAt(segment, i); USerializedSet starts = new USerializedSet(); if (!IBM.ICU.Impl.NormalizerImpl.GetCanonStartSet(cp, starts)) { continue; } int j = 0; // if so, see which decompositions match int rangeCount = starts.CountRanges(); for (j = 0; j < rangeCount; ++j) { starts.GetRange(j, range); int end = range[1]; for (int cp2 = range[0]; cp2 <= end; ++cp2) { ILOG.J2CsMapping.Collections.ISet remainder = Extract(cp2, segment, i, workingBuffer); if (remainder == null) { continue; } // there were some matches, so add all the possibilities to // the set. String prefix = segment.Substring(0, (i) - (0)); prefix += IBM.ICU.Text.UTF16.ValueOf(cp2); // int el = -1; IIterator iter = new ILOG.J2CsMapping.Collections.IteratorAdapter(remainder.GetEnumerator()); while (iter.HasNext()) { String item = (String)iter.Next(); String toAdd = prefix; toAdd += item; ILOG.J2CsMapping.Collections.Generics.Collections.Add(result, toAdd); // if (PROGRESS) printf("Adding: %s\n", // UToS(Tr(*toAdd))); } } } } return(result); /* * Set result = new HashSet(); if (PROGRESS) * System.out.println("Adding: " + NAME.transliterate(segment)); * result.add(segment); StringBuffer workingBuffer = new StringBuffer(); * * // cycle through all the characters int cp; * * for (int i = 0; i < segment.length(); i += UTF16.getCharCount(cp)) { * // see if any character is at the start of some decomposition cp = * UTF16.charAt(segment, i); NormalizerImpl.getCanonStartSet(c,fillSet) * UnicodeSet starts = AT_START.get(cp); if (starts == null) continue; * UnicodeSetIterator usi = new UnicodeSetIterator(starts); // if so, * see which decompositions match while (usi.next()) { int cp2 = * usi.codepoint; // we know that there are no strings in it // so we * don't have to check CharacterIterator.IS_STRING Set remainder = * extract(cp2, segment, i, workingBuffer); if (remainder == null) * continue; * * // there were some matches, so add all the possibilities to the set. * String prefix = segment.substring(0, i) + UTF16.valueOf(cp2); * Iterator it = remainder.iterator(); while (it.hasNext()) { String * item = (String) it.next(); if (PROGRESS) * System.out.println("Adding: " + NAME.transliterate(prefix + item)); * result.add(prefix + item); } } } return result; */ }
private BuildDetails GetBuildDetails(Credentials credentials, string buildUrl, bool ignoreUntrustedCertificate) { if (buildUrl == null) { return(null); } var url = NetUtils.ConcatUrls(buildUrl, "/api/xml", JenkinsService.buildDetailsFilter); if (logger.IsDebugEnabled) { logger.Debug("Getting build details from " + url); } var xmlStr = DownloadString(credentials, url, true, ignoreUntrustedCertificate); if (logger.IsTraceEnabled) { logger.Trace("XML: " + xmlStr); } var xml = new XmlDocument(); xml.LoadXml(xmlStr); var number = xml.SelectSingleNode("/*/number").InnerText; var fullDisplayName = xml.SelectSingleNode("/*/fullDisplayName").InnerText; var timestamp = xml.SelectSingleNode("/*/timestamp").InnerText; var estimatedDuration = xml.SelectSingleNode("/*/estimatedDuration").InnerText; var duration = xml.SelectSingleNode("/*/duration").InnerText; var xmlResult = xml.SelectSingleNode("/*/result"); var result = xmlResult == null ? string.Empty : xmlResult.InnerText; var userNodes = xml.SelectNodes("/*/culprit/fullName"); var ts = TimeSpan.FromSeconds(long.Parse(timestamp) / 1000); var date = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc); date = date.Add(ts); var estimatedts = TimeSpan.FromSeconds(long.Parse(estimatedDuration) / 1000); var durationts = TimeSpan.FromSeconds(long.Parse(estimatedDuration) / 1000); Spring.Collections.Generic.ISet <string> users = new HashedSet <string>(); foreach (XmlNode userNode in userNodes) { var userName = StringUtils.ExtractUserName(userNode.InnerText); users.Add(userName); } var res = new BuildDetails(); BuildCauses.FillInBuildCauses(res, xml); res.Number = int.Parse(number); res.DisplayName = fullDisplayName; res.Time = date; res.EstimatedDuration = estimatedts; res.Duration = durationts; res.Result = BuildStatus.StringToBuildStatus(result); res.Users = users; ClaimService.FillInBuildDetails(res, xml); if (logger.IsDebugEnabled) { logger.Debug("Done getting build details"); } return(res); }
public A() { Items = new HashedSet <B>(); }
public SessionFactoryImpl(Configuration cfg, IMapping mapping, Settings settings, EventListeners listeners) { Init(); log.Info("building session factory"); properties = new Dictionary <string, string>(cfg.Properties); interceptor = cfg.Interceptor; this.settings = settings; sqlFunctionRegistry = new SQLFunctionRegistry(settings.Dialect, cfg.SqlFunctions); eventListeners = listeners; filters = new Dictionary <string, FilterDefinition>(cfg.FilterDefinitions); if (log.IsDebugEnabled) { log.Debug("Session factory constructed with filter configurations : " + CollectionPrinter.ToString(filters)); } if (log.IsDebugEnabled) { log.Debug("instantiating session factory with properties: " + CollectionPrinter.ToString(properties)); } try { if (settings.IsKeywordsImportEnabled) { SchemaMetadataUpdater.Update(this); } if (settings.IsAutoQuoteEnabled) { SchemaMetadataUpdater.QuoteTableAndColumns(cfg); } } catch (NotSupportedException) { // Ignore if the Dialect does not provide DataBaseSchema } #region Caches settings.CacheProvider.Start(properties); #endregion #region Generators identifierGenerators = new Dictionary <string, IIdentifierGenerator>(); foreach (PersistentClass model in cfg.ClassMappings) { if (!model.IsInherited) { IIdentifierGenerator generator = model.Identifier.CreateIdentifierGenerator(settings.Dialect, settings.DefaultCatalogName, settings.DefaultSchemaName, (RootClass)model); identifierGenerators[model.EntityName] = generator; } } #endregion #region Persisters Dictionary <string, ICacheConcurrencyStrategy> caches = new Dictionary <string, ICacheConcurrencyStrategy>(); entityPersisters = new Dictionary <string, IEntityPersister>(); implementorToEntityName = new Dictionary <System.Type, string>(); Dictionary <string, IClassMetadata> classMeta = new Dictionary <string, IClassMetadata>(); foreach (PersistentClass model in cfg.ClassMappings) { model.PrepareTemporaryTables(mapping, settings.Dialect); string cacheRegion = model.RootClazz.CacheRegionName; ICacheConcurrencyStrategy cache; if (!caches.TryGetValue(cacheRegion, out cache)) { cache = CacheFactory.CreateCache(model.CacheConcurrencyStrategy, cacheRegion, model.IsMutable, settings, properties); if (cache != null) { caches.Add(cacheRegion, cache); allCacheRegions.Add(cache.RegionName, cache.Cache); } } IEntityPersister cp = PersisterFactory.CreateClassPersister(model, cache, this, mapping); entityPersisters[model.EntityName] = cp; classMeta[model.EntityName] = cp.ClassMetadata; if (model.HasPocoRepresentation) { implementorToEntityName[model.MappedClass] = model.EntityName; } } classMetadata = new UnmodifiableDictionary <string, IClassMetadata>(classMeta); Dictionary <string, ISet <string> > tmpEntityToCollectionRoleMap = new Dictionary <string, ISet <string> >(); collectionPersisters = new Dictionary <string, ICollectionPersister>(); foreach (Mapping.Collection model in cfg.CollectionMappings) { ICacheConcurrencyStrategy cache = CacheFactory.CreateCache(model.CacheConcurrencyStrategy, model.CacheRegionName, model.Owner.IsMutable, settings, properties); if (cache != null) { allCacheRegions[cache.RegionName] = cache.Cache; } ICollectionPersister persister = PersisterFactory.CreateCollectionPersister(cfg, model, cache, this); collectionPersisters[model.Role] = persister; IType indexType = persister.IndexType; if (indexType != null && indexType.IsAssociationType && !indexType.IsAnyType) { string entityName = ((IAssociationType)indexType).GetAssociatedEntityName(this); ISet <string> roles; if (!tmpEntityToCollectionRoleMap.TryGetValue(entityName, out roles)) { roles = new HashedSet <string>(); tmpEntityToCollectionRoleMap[entityName] = roles; } roles.Add(persister.Role); } IType elementType = persister.ElementType; if (elementType.IsAssociationType && !elementType.IsAnyType) { string entityName = ((IAssociationType)elementType).GetAssociatedEntityName(this); ISet <string> roles; if (!tmpEntityToCollectionRoleMap.TryGetValue(entityName, out roles)) { roles = new HashedSet <string>(); tmpEntityToCollectionRoleMap[entityName] = roles; } roles.Add(persister.Role); } } Dictionary <string, ICollectionMetadata> tmpcollectionMetadata = new Dictionary <string, ICollectionMetadata>(collectionPersisters.Count); foreach (KeyValuePair <string, ICollectionPersister> collectionPersister in collectionPersisters) { tmpcollectionMetadata.Add(collectionPersister.Key, collectionPersister.Value.CollectionMetadata); } collectionMetadata = new UnmodifiableDictionary <string, ICollectionMetadata>(tmpcollectionMetadata); collectionRolesByEntityParticipant = new UnmodifiableDictionary <string, ISet <string> >(tmpEntityToCollectionRoleMap); #endregion #region Named Queries namedQueries = new Dictionary <string, NamedQueryDefinition>(cfg.NamedQueries); namedSqlQueries = new Dictionary <string, NamedSQLQueryDefinition>(cfg.NamedSQLQueries); sqlResultSetMappings = new Dictionary <string, ResultSetMappingDefinition>(cfg.SqlResultSetMappings); #endregion imports = new Dictionary <string, string>(cfg.Imports); #region after *all* persisters and named queries are registered foreach (IEntityPersister persister in entityPersisters.Values) { persister.PostInstantiate(); } foreach (ICollectionPersister persister in collectionPersisters.Values) { persister.PostInstantiate(); } #endregion #region Serialization info name = settings.SessionFactoryName; try { uuid = (string)UuidGenerator.Generate(null, null); } catch (Exception) { throw new AssertionFailure("Could not generate UUID"); } SessionFactoryObjectFactory.AddInstance(uuid, name, this, properties); #endregion log.Debug("Instantiated session factory"); #region Schema management if (settings.IsAutoCreateSchema) { new SchemaExport(cfg).Create(false, true); } if (settings.IsAutoUpdateSchema) { new SchemaUpdate(cfg).Execute(false, true); } if (settings.IsAutoValidateSchema) { new SchemaValidator(cfg, settings).Validate(); } if (settings.IsAutoDropSchema) { schemaExport = new SchemaExport(cfg); } #endregion #region Obtaining TransactionManager // not ported yet #endregion currentSessionContext = BuildCurrentSessionContext(); if (settings.IsQueryCacheEnabled) { updateTimestampsCache = new UpdateTimestampsCache(settings, properties); queryCache = settings.QueryCacheFactory.GetQueryCache(null, updateTimestampsCache, settings, properties); queryCaches = new ThreadSafeDictionary <string, IQueryCache>(new Dictionary <string, IQueryCache>()); } else { updateTimestampsCache = null; queryCache = null; queryCaches = null; } #region Checking for named queries if (settings.IsNamedQueryStartupCheckingEnabled) { IDictionary <string, HibernateException> errors = CheckNamedQueries(); if (errors.Count > 0) { StringBuilder failingQueries = new StringBuilder("Errors in named queries: "); foreach (KeyValuePair <string, HibernateException> pair in errors) { failingQueries.Append('{').Append(pair.Key).Append('}'); log.Error("Error in named query: " + pair.Key, pair.Value); } throw new HibernateException(failingQueries.ToString()); } } #endregion Statistics.IsStatisticsEnabled = settings.IsStatisticsEnabled; // EntityNotFoundDelegate IEntityNotFoundDelegate enfd = cfg.EntityNotFoundDelegate; if (enfd == null) { enfd = new DefaultEntityNotFoundDelegate(); } entityNotFoundDelegate = enfd; }
public IRStatistics Evaluate(RecommenderBuilder recommenderBuilder, DataModel dataModel, int at, double relevanceThreshold, double evaluationPercentage) { if (recommenderBuilder == null) { throw new ArgumentNullException("recommenderBuilder is null"); } if (dataModel == null) { throw new ArgumentNullException("dataModel is null"); } if (at < 1) { throw new ArgumentException("at must be at least 1"); } if (double.IsNaN(evaluationPercentage) || evaluationPercentage <= 0.0 || evaluationPercentage > 1.0) { throw new ArgumentException("Invalid evaluationPercentage: " + evaluationPercentage); } if (double.IsNaN(relevanceThreshold)) { throw new ArgumentException("Invalid relevanceThreshold: " + evaluationPercentage); } RunningAverage precision = new FullRunningAverage(); RunningAverage recall = new FullRunningAverage(); foreach (User user in dataModel.GetUsers()) { Object id = user.ID; if (random.NextDouble() < evaluationPercentage) { ICollection <Item> relevantItems = new HashedSet <Item>(/* at */); Preference[] prefs = user.GetPreferencesAsArray(); foreach (Preference pref in prefs) { if (pref.Value >= relevanceThreshold) { relevantItems.Add(pref.Item); } } int numRelevantItems = relevantItems.Count; if (numRelevantItems > 0) { ICollection <User> trainingUsers = new List <User>(dataModel.GetNumUsers()); foreach (User user2 in dataModel.GetUsers()) { if (id.Equals(user2.ID)) { ICollection <Preference> trainingPrefs = new List <Preference>(); prefs = user2.GetPreferencesAsArray(); foreach (Preference pref in prefs) { if (!relevantItems.Contains(pref.Item)) { trainingPrefs.Add(pref); } } if (trainingPrefs.Count > 0) { User trainingUser = new GenericUser <String>(id.ToString(), trainingPrefs); trainingUsers.Add(trainingUser); } } else { trainingUsers.Add(user2); } } DataModel trainingModel = new GenericDataModel(trainingUsers); Recommender recommender = recommenderBuilder.BuildRecommender(trainingModel); try { trainingModel.GetUser(id); } catch (NoSuchElementException) { continue; // Oops we excluded all prefs for the user -- just move on } int intersectionSize = 0; foreach (RecommendedItem recommendedItem in recommender.Recommend(id, at)) { if (relevantItems.Contains(recommendedItem.Item)) { intersectionSize++; } } precision.AddDatum((double)intersectionSize / (double)at); recall.AddDatum((double)intersectionSize / (double)numRelevantItems); } } } return(new IRStatisticsImpl(precision.Average, recall.Average)); }
public void Container() { ISession s = OpenSession(); ITransaction t = s.BeginTransaction(); Container c = new Container(); Simple x = new Simple(); x.Count = 123; Simple y = new Simple(); y.Count = 456; s.Save(x, (long)1); s.Save(y, (long)0); IList o2m = new ArrayList(); o2m.Add(x); o2m.Add(null); o2m.Add(y); IList m2m = new ArrayList(); m2m.Add(x); m2m.Add(null); m2m.Add(y); c.OneToMany = o2m; c.ManyToMany = m2m; IList comps = new ArrayList(); Container.ContainerInnerClass ccic = new Container.ContainerInnerClass(); ccic.Name = "foo"; ccic.Simple = x; comps.Add(ccic); comps.Add(null); ccic = new Container.ContainerInnerClass(); ccic.Name = "bar"; ccic.Simple = y; comps.Add(ccic); var compos = new HashedSet <Container.ContainerInnerClass> { ccic }; c.Composites = compos; c.Components = comps; One one = new One(); Many many = new Many(); one.Manies = new HashedSet <Many> { many }; many.One = one; ccic.Many = many; ccic.One = one; s.Save(one); s.Save(many); s.Save(c); t.Commit(); s.Close(); s = OpenSession(); t = s.BeginTransaction(); c = (Container)s.Load(typeof(Container), c.Id); ccic = (Container.ContainerInnerClass)c.Components[2]; Assert.AreEqual(ccic.One, ccic.Many.One); Assert.AreEqual(3, c.Components.Count); Assert.AreEqual(1, c.Composites.Count); Assert.AreEqual(3, c.OneToMany.Count); Assert.AreEqual(3, c.ManyToMany.Count); for (int i = 0; i < 3; i++) { Assert.AreEqual(c.ManyToMany[i], c.OneToMany[i]); } object o1 = c.OneToMany[0]; object o2 = c.OneToMany[2]; c.OneToMany.RemoveAt(2); c.OneToMany[0] = o2; c.OneToMany[1] = o1; o1 = c.Components[2]; c.Components.RemoveAt(2); c.Components[0] = o1; c.ManyToMany[0] = c.ManyToMany[2]; c.Composites.Add((Container.ContainerInnerClass)o1); t.Commit(); s.Close(); s = OpenSession(); t = s.BeginTransaction(); c = (Container)s.Load(typeof(Container), c.Id); Assert.AreEqual(1, c.Components.Count); //WAS: 2 - h2.0.3 comment Assert.AreEqual(2, c.Composites.Count); Assert.AreEqual(2, c.OneToMany.Count); Assert.AreEqual(3, c.ManyToMany.Count); Assert.IsNotNull(c.OneToMany[0]); Assert.IsNotNull(c.OneToMany[1]); ((Container.ContainerInnerClass)c.Components[0]).Name = "a different name"; IEnumerator enumer = c.Composites.GetEnumerator(); enumer.MoveNext(); ((Container.ContainerInnerClass)enumer.Current).Name = "once again"; t.Commit(); s.Close(); s = OpenSession(); t = s.BeginTransaction(); c = (Container)s.Load(typeof(Container), c.Id); Assert.AreEqual(1, c.Components.Count); //WAS: 2 -> h2.0.3 comment Assert.AreEqual(2, c.Composites.Count); Assert.AreEqual("a different name", ((Container.ContainerInnerClass)c.Components[0]).Name); enumer = c.Composites.GetEnumerator(); bool found = false; while (enumer.MoveNext()) { if (((Container.ContainerInnerClass)enumer.Current).Name.Equals("once again")) { found = true; } } Assert.IsTrue(found); c.OneToMany.Clear(); c.ManyToMany.Clear(); c.Composites.Clear(); c.Components.Clear(); s.Delete("from s in class Simple"); s.Delete("from m in class Many"); s.Delete("from o in class One"); t.Commit(); s.Close(); s = OpenSession(); t = s.BeginTransaction(); c = (Container)s.Load(typeof(Container), c.Id); Assert.AreEqual(0, c.Components.Count); Assert.AreEqual(0, c.Composites.Count); Assert.AreEqual(0, c.OneToMany.Count); Assert.AreEqual(0, c.ManyToMany.Count); s.Delete(c); t.Commit(); s.Close(); }
public Role() { Users = new HashedSet <User>(); Permissions = new HashedSet <Permission>(); }
public FlightProgram() { Locations = new HashedSet <ProgramLocation>(); }
public BundleReferenceCollector(params AssetReferenceType[] typesToCollect) { CollectedReferences = new List <CollectedReference>(); validTypes = new HashedSet <AssetReferenceType>(typesToCollect); }
public Post() { Comments = new HashedSet <Comment>(); }
public AssignmentSpecification(IASTNode eq, IQueryable persister) { if (eq.Type != HqlSqlWalker.EQ) { throw new QueryException("assignment in set-clause not associated with equals"); } _eq = eq; _factory = persister.Factory; // Needed to bump this up to DotNode, because that is the only thing which currently // knows about the property-ref path in the correct format; it is either this, or // recurse over the DotNodes constructing the property path just like DotNode does // internally DotNode lhs; try { lhs = (DotNode)eq.GetFirstChild(); } catch (InvalidCastException e) { throw new QueryException( string.Format("Left side of assigment should be a case sensitive property or a field (depending on mapping); found '{0}'", eq.GetFirstChild()), e); } var rhs = (SqlNode)lhs.NextSibling; ValidateLhs(lhs); string propertyPath = lhs.PropertyPath; var temp = new HashedSet <string>(); // yuck! var usep = persister as UnionSubclassEntityPersister; if (usep != null) { temp.AddAll(persister.ConstraintOrderedTableNameClosure); } else { temp.Add(persister.GetSubclassTableName(persister.GetSubclassPropertyTableNumber(propertyPath))); } _tableNames = new ImmutableSet <string>(temp); if (rhs == null) { _hqlParameters = new IParameterSpecification[0]; } else if (IsParam(rhs)) { _hqlParameters = new[] { ((ParameterNode)rhs).HqlParameterSpecification }; } else { var parameterList = ASTUtil.CollectChildren(rhs, IsParam); _hqlParameters = new IParameterSpecification[parameterList.Count]; int i = 0; foreach (ParameterNode parameterNode in parameterList) { _hqlParameters[i++] = parameterNode.HqlParameterSpecification; } } }
public ConnectDomain() { Points = new HashedSet <Point>(); }