public void CollectionChangedAdapter_DeletingItemFromRealm_ShouldRaiseReset() { OrderedObject obj = null; _realm.Write(() => { for (var i = 0; i < 3; i++) { obj = _realm.CreateObject <OrderedObject>(); obj.Order = i; } }); var query = _realm.All <OrderedObject>(); Exception error = null; var events = new List <NotifyCollectionChangedAction>(); var observable = query.ToNotifyCollectionChanged(e => error = e); var handle = GCHandle.Alloc(observable); // prevent this from being collected across event loops observable.CollectionChanged += (o, e) => events.Add(e.Action); try { _realm.Write(() => _realm.Remove(obj)); TestHelpers.RunEventLoop(TimeSpan.FromMilliseconds(100)); Assert.That(error, Is.Null); Assert.That(observable, Is.EquivalentTo(query)); Assert.That(events, Is.EquivalentTo(new[] { NotifyCollectionChangedAction.Reset })); } finally { handle.Free(); } }
[TestCase(1, 3, 1, 3, NotifyCollectionChangedAction.Move)] // a b c d e -> c d a b e public void ListMove_MultipleMovedItemssTests(int oldIndex1, int newIndex1, int oldIndex2, int newIndex2, NotifyCollectionChangedAction expectedAction) { OrderedObject object1 = null; OrderedObject object2 = null; var args = TestMoves(items => { object1 = items[oldIndex1]; items.Move(object1, newIndex1); object2 = items[oldIndex2]; items.Move(object2, newIndex2); }, expectedAction); if (expectedAction == NotifyCollectionChangedAction.Move) { var oldStartIndex = Math.Min(oldIndex1, oldIndex2); var newStartIndex = Math.Min(newIndex1, newIndex2); if (oldStartIndex < newStartIndex) { // x was moved from before to after y, then y was moved to after x, which results in index being adjusted by -1. newStartIndex--; } else { // x was moved from after to before y, then y was moved to before x, which results in index being adjusted by -1. oldStartIndex--; } Assert.That(args.OldStartingIndex, Is.EqualTo(oldStartIndex)); Assert.That(args.NewStartingIndex, Is.EqualTo(newStartIndex)); Assert.That(args.OldItems, Is.EquivalentTo(new[] { object1, object2 })); Assert.That(args.NewItems, Is.EquivalentTo(new[] { object1, object2 })); } }
public void CollectionChanged_WhenTransactionHasBothAddAndRemove_ShouldReset() { var first = new OrderedObject { Order = 0, IsPartOfResults = true }; _realm.Write(() => { _realm.Add(first); }); Exception error = null; _realm.Error += (sender, e) => { error = e.GetException(); }; var query = _realm.All <OrderedObject>().Where(o => o.IsPartOfResults).OrderBy(o => o.Order).AsRealmCollection(); var handle = GCHandle.Alloc(query); // prevent this from being collected across event loops try { // wait for the initial notification to come through TestHelpers.RunEventLoop(); var eventArgs = new List <NotifyCollectionChangedEventArgs>(); query.CollectionChanged += (sender, e) => { eventArgs.Add(e); }; Assert.That(error, Is.Null); _realm.Write(() => { _realm.Add(new OrderedObject { Order = 1, IsPartOfResults = true }); _realm.Remove(first); }); TestHelpers.RunEventLoop(); Assert.That(error, Is.Null); Assert.That(eventArgs.Count, Is.EqualTo(1)); Assert.That(eventArgs[0].Action, Is.EqualTo(NotifyCollectionChangedAction.Reset)); } finally { handle.Free(); } }
public void Results_WhenTransactionHasBothAddAndRemove_ShouldReset() { AsyncContext.Run(async delegate { // The INotifyCollectionChanged API doesn't have a mechanism to report both added and removed items, // as that would mess up the indices a lot. That's why when we have both removed and added items, // we should raise a Reset. var first = new OrderedObject { Order = 0, IsPartOfResults = true }; _realm.Write(() => { _realm.Add(first); }); Exception error = null; _realm.Error += (sender, e) => { error = e.Exception; }; var query = _realm.All <OrderedObject>().Where(o => o.IsPartOfResults).OrderBy(o => o.Order).AsRealmCollection(); // wait for the initial notification to come through await Task.Yield(); var eventArgs = new List <NotifyCollectionChangedEventArgs>(); query.CollectionChanged += (sender, e) => eventArgs.Add(e); var propertyEventArgs = new List <string>(); query.PropertyChanged += (sender, e) => propertyEventArgs.Add(e.PropertyName); Assert.That(error, Is.Null); _realm.Write(() => { _realm.Add(new OrderedObject { Order = 1, IsPartOfResults = true }); _realm.Remove(first); }); await Task.Delay(MillisecondsToWaitForCollectionNotification); Assert.That(error, Is.Null); Assert.That(eventArgs.Count, Is.EqualTo(1)); Assert.That(eventArgs[0].Action, Is.EqualTo(NotifyCollectionChangedAction.Reset)); Assert.That(propertyEventArgs.Count, Is.EqualTo(2)); Assert.That(propertyEventArgs, Is.EquivalentTo(new[] { "Count", "Item[]" })); }); }
public void ListMove_SingleMovedItemTests(int oldIndex, int newIndex) { OrderedObject movedObject = null; var args = TestMoves(items => { movedObject = items[oldIndex]; items.Move(movedObject, newIndex); }, NotifyCollectionChangedAction.Move); Assert.That(args.OldStartingIndex, Is.EqualTo(oldIndex)); Assert.That(args.NewStartingIndex, Is.EqualTo(newIndex)); Assert.That(args.OldItems, Is.EquivalentTo(new[] { movedObject })); Assert.That(args.NewItems, Is.EquivalentTo(new[] { movedObject })); }
public void Results_WhenTransactionHasBothAddAndRemove_ShouldReset() { AsyncContext.Run(async delegate { // The INotifyCollectionChanged API doesn't have a mechanism to report both added and removed items, // as that would mess up the indices a lot. That's why when we have both removed and added items, // we should raise a Reset. var first = new OrderedObject { Order = 0, IsPartOfResults = true }; _realm.Write(() => { _realm.Add(first); }); Exception error = null; _realm.Error += (sender, e) => { error = e.Exception; }; var query = _realm.All<OrderedObject>().Where(o => o.IsPartOfResults).OrderBy(o => o.Order).AsRealmCollection(); // wait for the initial notification to come through await Task.Yield(); var eventArgs = new List<NotifyCollectionChangedEventArgs>(); query.CollectionChanged += (sender, e) => eventArgs.Add(e); var propertyEventArgs = new List<string>(); query.PropertyChanged += (sender, e) => propertyEventArgs.Add(e.PropertyName); Assert.That(error, Is.Null); _realm.Write(() => { _realm.Add(new OrderedObject { Order = 1, IsPartOfResults = true }); _realm.Remove(first); }); await Task.Delay(MillisecondsToWaitForCollectionNotification); Assert.That(error, Is.Null); Assert.That(eventArgs.Count, Is.EqualTo(1)); Assert.That(eventArgs[0].Action, Is.EqualTo(NotifyCollectionChangedAction.Reset)); Assert.That(propertyEventArgs.Count, Is.EqualTo(2)); Assert.That(propertyEventArgs, Is.EquivalentTo(new[] { "Count", "Item[]" })); }); }
public static Type TYPE_TO_MONITOR; // = typeof(ImageButtonSkin); // ReSharper restore MemberCanBePrivate.Global // ReSharper restore FieldCanBeMadeReadOnly.Global // ReSharper restore UnassignedField.Global // ReSharper restore InconsistentNaming // ReSharper restore UnusedMember.Global #endif /** * * Param: object - the IStyleClient to be introspected * Param: qualified - whether qualified type names should be used * Returns: an ordered map of class names, starting with the object's class * name and then each super class name until we hit a stop class, such as * mx.core::Component. */ internal static OrderedObject<bool> GetTypeHierarchy(Type type/*, bool qualified*/) { StyleManager styleManager = StyleManager.Instance; //Type type = client.GetType(); string className = type.FullName; OrderedObject<bool> hierarchy = null; if (styleManager.TypeHierarchyCache.ContainsKey(className)) hierarchy = styleManager.TypeHierarchyCache[className]; if (hierarchy == null) { hierarchy = new OrderedObject<bool>(); styleManager.TypeHierarchyCache[className] = hierarchy; while (!IsStopClass(type)) { try { if (null != type) { hierarchy.Add(className, true); type = type.BaseType; if (null != type) className = type.FullName; } } catch (Exception ex) { className = null; } //try //{ // //var type:String; // //if (qualified) // // type = className.replace("::", "."); // //else // type = NameUtil.getUnqualifiedClassName(className); // hierarchy.object_proxy::setObjectProperty(type, true); // className = getQualifiedSuperclassName( // myApplicationDomain.getDefinition(className)); //} //catch(e:ReferenceError) //{ // className = null; //} } #region Monitor #if DEBUG if (null != TYPE_TO_MONITOR) { if (type == TYPE_TO_MONITOR) Debug.Log(string.Format(@"### {0} type hierarchy ### {1}", TYPE_TO_MONITOR, hierarchy)); } #endif #endregion } return hierarchy; }
/// <summary> /// Main entry point of the application. /// </summary> /// <param name="pArguments">The arguments.</param> static void Main(string[] pArguments) { msOutputPath = Path.GetTempPath(); if (pArguments.Length == 1) { if (Directory.Exists(pArguments[1])) { // Store the arguments. msOutputPath = pArguments[1]; } } Console.WriteLine("XSerialization.TestApp [path]"); Console.WriteLine("If the path is not specified, the program will used Path.GetTempPath()"); //ObjectWithPositionAsField lObjectToSave = new ObjectWithPositionAsField //{ // Position = // { // X = 42, // Y = 43 // }, // IntProperty = 663 //}; string lFirstPath = msOutputPath + Guid.NewGuid() + ".xml"; string lSecondPath = msOutputPath + Guid.NewGuid() + ".xml"; //TestResult lResult = DoTest("Test for serialization of field instead of property", lFirstPath, lSecondPath, lObjectToSave); //if (lResult.Status != TestStatus.Sucess) //{ // Console.WriteLine("First file : " + lResult.FirstFilename); // Console.WriteLine("Second file : " + lResult.SecondFilename); //} //Console.WriteLine("The test " + lResult.Name + " has status : " + lResult.Status); //ObjectWithPositionAsField lObjectToSave1 = new ObjectWithPositionAsField //{ // Position = // { // X = 54, // Y = 67 // }, // IntProperty = 484, // SubObjectWithSameContract = lObjectToSave //}; object lModel = OrderedObject.InitializeTest0(); TestResult lResult1 = DoTest("OrderedObject.InitializeTest0", @"k:\first.xml", @"k:\second.xml", lModel); if (lResult1.Status != TestStatus.Sucess) { Console.WriteLine("First file : " + lResult1.FirstFilename); Console.WriteLine("Second file : " + lResult1.SecondFilename); } Console.WriteLine("The test " + lResult1.Name + " has status : " + lResult1.Status); Console.WriteLine("The test serialization average time is " + lResult1.SerializationPerformanceInfos.MeanTime.TotalMilliseconds + " ms"); Console.WriteLine("The test serialization min time is " + lResult1.SerializationPerformanceInfos.Min.TotalMilliseconds + " ms"); Console.WriteLine("The test serialization max time is " + lResult1.SerializationPerformanceInfos.Max.TotalMilliseconds + " ms"); Console.WriteLine("The test deserialization average time is " + lResult1.DeserializationPerformanceInfos.MeanTime.TotalMilliseconds + " ms"); Console.WriteLine("The test deserialization min time is " + lResult1.DeserializationPerformanceInfos.Min.TotalMilliseconds + " ms"); Console.WriteLine("The test deserialization max time is " + lResult1.DeserializationPerformanceInfos.Max.TotalMilliseconds + " ms"); Console.WriteLine("The test init average time is " + lResult1.InitPerformanceInfos.MeanTime.TotalMilliseconds + " ms"); Console.WriteLine("The test init min time is " + lResult1.InitPerformanceInfos.Min.TotalMilliseconds + " ms"); Console.WriteLine("The test init max time is " + lResult1.InitPerformanceInfos.Max.TotalMilliseconds + " ms"); Console.WriteLine("The test " + lResult1.Name + " has status : " + lResult1.Status); }