public void ET_Repository_BatchImport_Transaction() { int size = BATCH_IMPORT_DATA_SIZE; var bookRepo = RF.Concrete<BookRepository>(); var chapterRepo = RF.Concrete<ChapterRepository>(); Assert.AreEqual(0, bookRepo.CountAll()); using (RF.TransactionScope(bookRepo)) { var books = new BookList(); for (int i = 0; i < size; i++) { var book = new Book { ChapterList = { new Chapter(), new Chapter(), } }; books.Add(book); } var importer = bookRepo.CreateImporter(); importer.Save(books); Assert.AreEqual(size, bookRepo.CountAll()); Assert.AreEqual(size * 2, chapterRepo.CountAll()); } Assert.AreEqual(0, chapterRepo.CountAll()); Assert.AreEqual(0, bookRepo.CountAll()); }
public void ET_Repository_BatchImport_BatchSize() { int size = (int)(2.345 * BATCH_IMPORT_DATA_SIZE); var repo = RF.Concrete<BookRepository>(); using (RF.TransactionScope(repo)) { var books = new BookList(); for (int i = 0; i < size; i++) { var book = new Book(); books.Add(book); } var importer = repo.CreateImporter(); importer.BatchSize = 1000; importer.Save(books); Assert.AreEqual(size, repo.CountAll()); } }
public void ET_Repository_BatchImport_CDU_U_Aggt() { int size = BATCH_IMPORT_DATA_SIZE; var repo = RF.Concrete<BookRepository>(); using (RF.TransactionScope(repo)) { var books = new BookList(); for (int i = 0; i < size; i++) { var book = new Book { ChapterList = { new Chapter(), new Chapter(), } }; books.Add(book); } var importer = repo.CreateImporter(); importer.Save(books); Assert.AreEqual(size, repo.CountAll()); Assert.AreEqual(size * 2, RF.Concrete<ChapterRepository>().CountAll()); for (int i = 0; i < size; i++) { books[i].Code = i.ToString(); books[i].ChapterList[0].Name = i.ToString(); } importer.Save(books); var res = repo.GetByIdList(new object[] { books[0].Id, books[books.Count - 1].Id }); Assert.AreEqual("0", res[0].Code); Assert.AreEqual("0", res[0].ChapterList[0].Name); Assert.AreEqual((size - 1).ToString(), res[res.Count - 1].Code); Assert.AreEqual((size - 1).ToString(), res[res.Count - 1].ChapterList[0].Name); } }
public void ET_Repository_BatchImport_CDU_U_Status_Aggt() { int size = BATCH_IMPORT_DATA_SIZE; var repo = RF.Concrete<BookRepository>(); using (RF.TransactionScope(repo)) { var books = new BookList(); for (int i = 0; i < size; i++) { var book = new Book { ChapterList = { new Chapter(), new Chapter(), } }; books.Add(book); } var importer = repo.CreateImporter(); importer.Save(books); for (int i = 0, c = books.Count; i < c; i++) { var book = books[i]; Assert.AreEqual(PersistenceStatus.Unchanged, book.PersistenceStatus); foreach (var chapter in book.ChapterList) { Assert.AreEqual(PersistenceStatus.Unchanged, book.PersistenceStatus); } } for (int i = 0; i < size; i++) { books[i].Code = i.ToString(); books[i].ChapterList[0].Name = i.ToString(); } for (int i = 0, c = books.Count; i < c; i++) { var book = books[i]; Assert.AreEqual(PersistenceStatus.Modified, book.PersistenceStatus); Assert.AreEqual(PersistenceStatus.Modified, book.ChapterList[0].PersistenceStatus); } importer.Save(books); for (int i = 0, c = books.Count; i < c; i++) { var book = books[i]; Assert.AreEqual(PersistenceStatus.Unchanged, book.PersistenceStatus); Assert.AreEqual(PersistenceStatus.Unchanged, book.ChapterList[0].PersistenceStatus); } } }
public void ET_Repository_BatchImport_CDU_D_Status() { int size = BATCH_IMPORT_DATA_SIZE; var repo = RF.Concrete<BookRepository>(); using (RF.TransactionScope(repo)) { var books = new BookList(); for (int i = 0; i < size; i++) { var book = new Book(); books.Add(book); } repo.CreateImporter().Save(books); books.Clear(); repo.CreateImporter().Save(books); for (int i = 0, c = books.Count; i < c; i++) { var book = books[i]; Assert.AreEqual(PersistenceStatus.New, book.PersistenceStatus); } } }
public void ET_Repository_BatchImport_CDU_D() { int size = BATCH_IMPORT_DATA_SIZE; var repo = RF.Concrete<BookRepository>(); using (RF.TransactionScope(repo)) { var books = new BookList(); for (int i = 0; i < size; i++) { var book = new Book(); books.Add(book); } repo.CreateImporter().Save(books); books.Clear(); repo.CreateImporter().Save(books); Assert.AreEqual(0, repo.CountAll()); } }
public void ORM_Query_GetByIdList_5000() { var repo = RF.Concrete<BookRepository>(); using (RF.TransactionScope(repo)) { var books = new BookList(); for (int i = 0; i < 6000; i++) { var book = new Book(); books.Add(book); } repo.CreateImporter().Save(books); var idList = new object[5500]; for (int i = 0; i < 5500; i++) { idList[i] = books[i].Id; } var bookList = repo.GetByIdList(idList); Assert.AreEqual(bookList.Count, idList.Length); Assert.AreEqual(books[0].Id, bookList[0].Id); Assert.AreEqual(books[bookList.Count - 1].Id, bookList[bookList.Count - 1].Id); } }
public void ORM_Performance_Insert_Transaction() { var repo = RF.Concrete<BookRepository>(); try { var bookList = new BookList(); for (int i = 0; i < Config_LineCount; i++) { bookList.Add(new Book { Name = i.ToString() }); } var watch = new System.Diagnostics.Stopwatch(); watch.Start(); using (var tran = RF.TransactionScope(repo)) { repo.Save(bookList); tran.Complete(); } watch.Stop(); if (Config_FlushResultToFile) { System.IO.File.WriteAllText(@"D:\2.1 事务中 添加 " + Config_LineCount + " 行数据耗时(ms):" + watch.Elapsed.TotalMilliseconds + ",平均一行需要:" + watch.Elapsed.TotalMilliseconds / Config_LineCount, "1"); } Assert.IsTrue(watch.Elapsed.TotalMilliseconds < 2 * Config_LineCount, "添加一行数据,不能超过 2 ms。"); } finally { this.DeleteAllBooks(); } }
public void ORM_Performance_Insert() { var repo = RF.Concrete<BookRepository>(); try { var bookList = new BookList(); for (int i = 0; i < 100; i++) { bookList.Add(new Book { Name = i.ToString() }); } var watch = new System.Diagnostics.Stopwatch(); watch.Start(); repo.Save(bookList); watch.Stop(); if (FlushResultToFile) { System.IO.File.WriteAllText(@"D:\1.1 添加 100 行数据耗时(ms):" + watch.Elapsed.TotalMilliseconds + ",平均一行需要:" + watch.Elapsed.TotalMilliseconds / 100, "1"); } Assert.IsTrue(watch.Elapsed.TotalMilliseconds < 200, "添加一行数据,不能超过 2 ms。"); } finally { this.DeleteAllBooks(); } }