public void ET_Repository_BatchImport_BatchSize() { int size = (int)(2.345 * BATCH_IMPORT_DATA_SIZE); var repo = RF.Concrete<BookRepository>(); using (RF.TransactionScope(repo)) { var books = new BookList(); for (int i = 0; i < size; i++) { var book = new Book(); books.Add(book); } var importer = repo.CreateImporter(); importer.BatchSize = 1000; importer.Save(books); Assert.AreEqual(size, repo.CountAll()); } }
public void ET_Repository_CDUQ_D_Clear() { var repo = RF.Concrete<BookRepository>(); using (RF.TransactionScope(repo)) { var bookList = new BookList { new Book(), new Book(), new Book() }; repo.Save(bookList); Assert.IsTrue(repo.CountAll() == 3); bookList.Clear(); repo.Save(bookList); Assert.IsTrue(repo.CountAll() == 0); } }
public void ET_Property_ToDataTable() { var list = new BookList { new Book{Name = "book1", Code = "001"}, new Book{Name = "book2", Code = "002"}, new Book{Name = "book3", Code = "003"}, }; var table = list.ToDataTable(); Assert.AreEqual(table.Rows.Count, 3); Assert.AreEqual(table.Rows[0]["Name"], "book1"); Assert.AreEqual(table.Rows[0]["Code"], "001"); Assert.AreEqual(table.Rows[1]["Name"], "book2"); Assert.AreEqual(table.Rows[1]["Code"], "002"); Assert.AreEqual(table.Rows[2]["Name"], "book3"); Assert.AreEqual(table.Rows[2]["Code"], "003"); }
public void ET_Repository_BatchImport_CDU_U_Status_Aggt() { int size = BATCH_IMPORT_DATA_SIZE; var repo = RF.Concrete<BookRepository>(); using (RF.TransactionScope(repo)) { var books = new BookList(); for (int i = 0; i < size; i++) { var book = new Book { ChapterList = { new Chapter(), new Chapter(), } }; books.Add(book); } var importer = repo.CreateImporter(); importer.Save(books); for (int i = 0, c = books.Count; i < c; i++) { var book = books[i]; Assert.AreEqual(PersistenceStatus.Unchanged, book.PersistenceStatus); foreach (var chapter in book.ChapterList) { Assert.AreEqual(PersistenceStatus.Unchanged, book.PersistenceStatus); } } for (int i = 0; i < size; i++) { books[i].Code = i.ToString(); books[i].ChapterList[0].Name = i.ToString(); } for (int i = 0, c = books.Count; i < c; i++) { var book = books[i]; Assert.AreEqual(PersistenceStatus.Modified, book.PersistenceStatus); Assert.AreEqual(PersistenceStatus.Modified, book.ChapterList[0].PersistenceStatus); } importer.Save(books); for (int i = 0, c = books.Count; i < c; i++) { var book = books[i]; Assert.AreEqual(PersistenceStatus.Unchanged, book.PersistenceStatus); Assert.AreEqual(PersistenceStatus.Unchanged, book.ChapterList[0].PersistenceStatus); } } }
public void ET_Repository_BatchImport_Transaction() { int size = BATCH_IMPORT_DATA_SIZE; var bookRepo = RF.Concrete<BookRepository>(); var chapterRepo = RF.Concrete<ChapterRepository>(); Assert.AreEqual(0, bookRepo.CountAll()); using (RF.TransactionScope(bookRepo)) { var books = new BookList(); for (int i = 0; i < size; i++) { var book = new Book { ChapterList = { new Chapter(), new Chapter(), } }; books.Add(book); } var importer = bookRepo.CreateImporter(); importer.Save(books); Assert.AreEqual(size, bookRepo.CountAll()); Assert.AreEqual(size * 2, chapterRepo.CountAll()); } Assert.AreEqual(0, chapterRepo.CountAll()); Assert.AreEqual(0, bookRepo.CountAll()); }
public void ET_Repository_BatchImport_CDU_D_Status() { int size = BATCH_IMPORT_DATA_SIZE; var repo = RF.Concrete<BookRepository>(); using (RF.TransactionScope(repo)) { var books = new BookList(); for (int i = 0; i < size; i++) { var book = new Book(); books.Add(book); } repo.CreateImporter().Save(books); books.Clear(); repo.CreateImporter().Save(books); for (int i = 0, c = books.Count; i < c; i++) { var book = books[i]; Assert.AreEqual(PersistenceStatus.New, book.PersistenceStatus); } } }
public void ET_Repository_BatchImport_CDU_U_Aggt() { int size = BATCH_IMPORT_DATA_SIZE; var repo = RF.Concrete<BookRepository>(); using (RF.TransactionScope(repo)) { var books = new BookList(); for (int i = 0; i < size; i++) { var book = new Book { ChapterList = { new Chapter(), new Chapter(), } }; books.Add(book); } var importer = repo.CreateImporter(); importer.Save(books); Assert.AreEqual(size, repo.CountAll()); Assert.AreEqual(size * 2, RF.Concrete<ChapterRepository>().CountAll()); for (int i = 0; i < size; i++) { books[i].Code = i.ToString(); books[i].ChapterList[0].Name = i.ToString(); } importer.Save(books); var res = repo.GetByIdList(new object[] { books[0].Id, books[books.Count - 1].Id }); Assert.AreEqual("0", res[0].Code); Assert.AreEqual("0", res[0].ChapterList[0].Name); Assert.AreEqual((size - 1).ToString(), res[res.Count - 1].Code); Assert.AreEqual((size - 1).ToString(), res[res.Count - 1].ChapterList[0].Name); } }
public void ET_Repository_BatchImport_CDU_D() { int size = BATCH_IMPORT_DATA_SIZE; var repo = RF.Concrete<BookRepository>(); using (RF.TransactionScope(repo)) { var books = new BookList(); for (int i = 0; i < size; i++) { var book = new Book(); books.Add(book); } repo.CreateImporter().Save(books); books.Clear(); repo.CreateImporter().Save(books); Assert.AreEqual(0, repo.CountAll()); } }
public void ORM_Query_GetByIdList_5000() { var repo = RF.Concrete<BookRepository>(); using (RF.TransactionScope(repo)) { var books = new BookList(); for (int i = 0; i < 6000; i++) { var book = new Book(); books.Add(book); } repo.CreateImporter().Save(books); var idList = new object[5500]; for (int i = 0; i < 5500; i++) { idList[i] = books[i].Id; } var bookList = repo.GetByIdList(idList); Assert.AreEqual(bookList.Count, idList.Length); Assert.AreEqual(books[0].Id, bookList[0].Id); Assert.AreEqual(books[bookList.Count - 1].Id, bookList[bookList.Count - 1].Id); } }
public void ORM_Performance_Insert_Transaction() { var repo = RF.Concrete<BookRepository>(); try { var bookList = new BookList(); for (int i = 0; i < Config_LineCount; i++) { bookList.Add(new Book { Name = i.ToString() }); } var watch = new System.Diagnostics.Stopwatch(); watch.Start(); using (var tran = RF.TransactionScope(repo)) { repo.Save(bookList); tran.Complete(); } watch.Stop(); if (Config_FlushResultToFile) { System.IO.File.WriteAllText(@"D:\2.1 事务中 添加 " + Config_LineCount + " 行数据耗时(ms):" + watch.Elapsed.TotalMilliseconds + ",平均一行需要:" + watch.Elapsed.TotalMilliseconds / Config_LineCount, "1"); } Assert.IsTrue(watch.Elapsed.TotalMilliseconds < 2 * Config_LineCount, "添加一行数据,不能超过 2 ms。"); } finally { this.DeleteAllBooks(); } }
public void ORM_AggtSQL_LoadReferenceEntities() { using (RF.TransactionScope(UnitTestEntityRepositoryDataProvider.DbSettingName)) { var so = new SectionOwner(); RF.Save(so); var book = CreateAggtBook(so); RF.Save(book); var api = AggregateSQL.Instance; var loadOptions = api .BeginLoadOptions<Book>() .LoadChildren(pp => pp.ChapterList) .Continue<Chapter>().LoadChildren(c => c.SectionList) .Order<Section>().By(v => v.SectionOwner.Name) .LoadFK(v => v.SectionOwner); var sql = api.GenerateQuerySQL(loadOptions, book.Id); //聚合加载整个对象树。 var entities = new BookList(); api.LoadEntities(entities, sql, loadOptions); //无懒加载测试。 var count = Logger.DbAccessedCount; foreach (Book book2 in entities) { foreach (Chapter chapter in book2.ChapterList) { foreach (Section section in chapter.SectionList) { var so2 = section.SectionOwner; } } } Assert.IsTrue(Logger.DbAccessedCount == count, "由于数据已经全部加载完成,所以这里不会发生懒加载。"); } }
public void ORM_Performance_Insert() { var repo = RF.Concrete<BookRepository>(); try { var bookList = new BookList(); for (int i = 0; i < 100; i++) { bookList.Add(new Book { Name = i.ToString() }); } var watch = new System.Diagnostics.Stopwatch(); watch.Start(); repo.Save(bookList); watch.Stop(); if (FlushResultToFile) { System.IO.File.WriteAllText(@"D:\1.1 添加 100 行数据耗时(ms):" + watch.Elapsed.TotalMilliseconds + ",平均一行需要:" + watch.Elapsed.TotalMilliseconds / 100, "1"); } Assert.IsTrue(watch.Elapsed.TotalMilliseconds < 200, "添加一行数据,不能超过 2 ms。"); } finally { this.DeleteAllBooks(); } }