Represents the options to use for an Insert or InsertBatch operation.
        public void TestInsertUpdateAndSaveWithElementNameStartingWithDollarSign()
        {
            var server = Configuration.TestServer;
            var database = Configuration.TestDatabase;
            var collection = Configuration.TestCollection;
            collection.Drop();

            var document = new BsonDocument
            {
                { "_id", 1 },
                { "v", new BsonDocument("$x", 1) } // server doesn't allow "$" at top level
            };
            var insertOptions = new MongoInsertOptions { CheckElementNames = false };
            collection.Insert(document, insertOptions);
            document = collection.FindOne();
            Assert.AreEqual(1, document["v"]["$x"].AsInt32);

            document["v"]["$x"] = 2;
            var query = Query.EQ("_id", 1);
            var update = Update.Replace(document);
            var updateOptions = new MongoUpdateOptions { CheckElementNames = false };
            collection.Update(query, update, updateOptions);
            document = collection.FindOne();
            Assert.AreEqual(2, document["v"]["$x"].AsInt32);

            document["v"]["$x"] = 3;
            collection.Save(document, insertOptions);
            document = collection.FindOne();
            Assert.AreEqual(3, document["v"]["$x"].AsInt32);
        }
        public void TestInsertUpdateAndSaveWithElementNameStartingWithDollarSign()
        {
            // starting with version 2.5.2 the server got stricter about dollars in element names
            // so this test should only be run when testing against older servers
            var server = Configuration.TestServer;
            if (server.BuildInfo.Version < new Version(2, 6, 0))
            {
                var database = Configuration.TestDatabase;
                var collection = Configuration.TestCollection;
                collection.Drop();

                var document = new BsonDocument
                {
                    { "_id", 1 },
                    { "v", new BsonDocument("$x", 1) } // server doesn't allow "$" at top level
                };
                var insertOptions = new MongoInsertOptions { CheckElementNames = false };
                collection.Insert(document, insertOptions);
                document = collection.FindOne();
                Assert.AreEqual(1, document["v"]["$x"].AsInt32);

                document["v"]["$x"] = 2;
                var query = Query.EQ("_id", 1);
                var update = Update.Replace(document);
                var updateOptions = new MongoUpdateOptions { CheckElementNames = false };
                collection.Update(query, update, updateOptions);
                document = collection.FindOne();
                Assert.AreEqual(2, document["v"]["$x"].AsInt32);

                document["v"]["$x"] = 3;
                collection.Save(document, insertOptions);
                document = collection.FindOne();
                Assert.AreEqual(3, document["v"]["$x"].AsInt32);
            }
        }
        public void TestInsertUpdateAndSaveWithElementNameStartingWithDollarSign()
        {
            var server = MongoServer.Create("mongodb://localhost/?safe=true;slaveOk=true");
            var database = server["onlinetests"];
            var collection = database["test"];
            collection.Drop();

            var document = new BsonDocument
            {
                { "_id", 1 },
                { "v", new BsonDocument("$x", 1) } // server doesn't allow "$" at top level
            };
            var insertOptions = new MongoInsertOptions { CheckElementNames = false };
            collection.Insert(document, insertOptions);
            document = collection.FindOne();
            Assert.AreEqual(1, document["v"].AsBsonDocument["$x"].AsInt32);

            document["v"].AsBsonDocument["$x"] = 2;
            var query = Query.EQ("_id", 1);
            var update = Update.Replace(document);
            var updateOptions = new MongoUpdateOptions { CheckElementNames = false };
            collection.Update(query, update, updateOptions);
            document = collection.FindOne();
            Assert.AreEqual(2, document["v"].AsBsonDocument["$x"].AsInt32);

            document["v"].AsBsonDocument["$x"] = 3;
            collection.Save(document, insertOptions);
            document = collection.FindOne();
            Assert.AreEqual(3, document["v"].AsBsonDocument["$x"].AsInt32);
        }
        public void TestInsertBatchMultipleBatchesWriteConcernEnabledContinueOnErrorTrue()
        {
            var collectionName = Configuration.TestCollection.Name;
            var collectionSettings = new MongoCollectionSettings { WriteConcern = WriteConcern.Acknowledged };
            var collection = Configuration.TestDatabase.GetCollection<BsonDocument>(collectionName, collectionSettings);
            if (collection.Exists()) { collection.Drop(); }

            using (Configuration.TestDatabase.RequestStart())
            {
                var maxMessageLength = Configuration.TestServer.RequestConnection.ServerInstance.MaxMessageLength;

                var filler = new string('x', maxMessageLength / 3); // after overhead results in two documents per sub-batch
                var documents = new BsonDocument[]
                {
                    // first sub-batch
                    new BsonDocument { { "_id", 1 }, { "filler", filler } },
                    new BsonDocument { { "_id", 2 }, { "filler", filler } },
                    // second sub-batch
                    new BsonDocument { { "_id", 3 }, { "filler", filler } },
                    new BsonDocument { { "_id", 3 }, { "filler", filler } }, // duplicate _id error
                    // third sub-batch
                    new BsonDocument { { "_id", 4 }, { "filler", filler } },
                    new BsonDocument { { "_id", 5 }, { "filler", filler } },
                };

                try
                {
                    var options = new MongoInsertOptions { Flags = InsertFlags.ContinueOnError };
                    collection.InsertBatch(documents, options);
                }
                catch (WriteConcernException ex)
                {
                    var results = (IEnumerable<WriteConcernResult>)ex.Data["results"];
                    Assert.AreEqual(3, results.Count());

                    var result1 = results.ElementAt(0);
                    Assert.AreEqual(false, result1.HasLastErrorMessage);

                    var result2 = results.ElementAt(1);
                    Assert.AreEqual(true, result2.HasLastErrorMessage);

                    var result3 = results.ElementAt(2);
                    Assert.AreEqual(false, result3.HasLastErrorMessage);
                }

                Assert.AreEqual(1, collection.Count(Query.EQ("_id", 1)));
                Assert.AreEqual(1, collection.Count(Query.EQ("_id", 2)));
                Assert.AreEqual(1, collection.Count(Query.EQ("_id", 3)));
                Assert.AreEqual(1, collection.Count(Query.EQ("_id", 4)));
                Assert.AreEqual(1, collection.Count(Query.EQ("_id", 5)));
            }
        }
        public void TestInsertBatchMultipleBatchesWriteConcernDisabledContinueOnErrorTrue()
        {
            var collectionName = Configuration.TestCollection.Name;
            var collectionSettings = new MongoCollectionSettings { WriteConcern = WriteConcern.Unacknowledged };
            var collection = Configuration.TestDatabase.GetCollection<BsonDocument>(collectionName, collectionSettings);
            if (collection.Exists()) { collection.Drop(); }

            using (Configuration.TestDatabase.RequestStart())
            {
                var maxMessageLength = Configuration.TestServer.RequestConnection.ServerInstance.MaxMessageLength;

                var filler = new string('x', maxMessageLength / 3); // after overhead results in two documents per sub-batch
                var documents = new BsonDocument[]
                {
                    // first sub-batch
                    new BsonDocument { { "_id", 1 }, { "filler", filler } },
                    new BsonDocument { { "_id", 2 }, { "filler", filler } },
                    // second sub-batch
                    new BsonDocument { { "_id", 3 }, { "filler", filler } },
                    new BsonDocument { { "_id", 3 }, { "filler", filler } }, // duplicate _id error
                    // third sub-batch
                    new BsonDocument { { "_id", 4 }, { "filler", filler } },
                    new BsonDocument { { "_id", 5 }, { "filler", filler } },
                };

                var options = new MongoInsertOptions { Flags = InsertFlags.ContinueOnError };
                var result = collection.InsertBatch(documents, options);

                Assert.AreEqual(null, result);
                Assert.AreEqual(1, collection.Count(Query.EQ("_id", 1)));
                Assert.AreEqual(1, collection.Count(Query.EQ("_id", 2)));
                Assert.AreEqual(1, collection.Count(Query.EQ("_id", 3)));
                Assert.AreEqual(1, collection.Count(Query.EQ("_id", 4)));
                Assert.AreEqual(1, collection.Count(Query.EQ("_id", 5)));
            }
        }
        public void TestInsertBatchMultipleBatchesWriteConcernEnabledContinueOnErrorTrue()
        {
            var collectionName = Configuration.TestCollection.Name;
            var collectionSettings = new MongoCollectionSettings { WriteConcern = WriteConcern.Acknowledged };
            var collection = Configuration.TestDatabase.GetCollection<BsonDocument>(collectionName, collectionSettings);
            if (collection.Exists()) { collection.Drop(); }

            using (Configuration.TestDatabase.RequestStart())
            {
                var maxMessageLength = Configuration.TestServer.RequestConnection.ServerInstance.MaxMessageLength;

                var filler = new string('x', maxMessageLength / 3); // after overhead results in two documents per sub-batch
                var documents = new BsonDocument[]
                {
                    // first sub-batch
                    new BsonDocument { { "_id", 1 }, { "filler", filler } },
                    new BsonDocument { { "_id", 2 }, { "filler", filler } },
                    // second sub-batch
                    new BsonDocument { { "_id", 3 }, { "filler", filler } },
                    new BsonDocument { { "_id", 3 }, { "filler", filler } }, // duplicate _id error
                    // third sub-batch
                    new BsonDocument { { "_id", 4 }, { "filler", filler } },
                    new BsonDocument { { "_id", 5 }, { "filler", filler } },
                };

                var options = new MongoInsertOptions { Flags = InsertFlags.ContinueOnError };
                var exception = Assert.Throws<MongoDuplicateKeyException>(() => { collection.InsertBatch(documents, options); });
                var result = exception.WriteConcernResult;

                var expectedResult = new ExpectedWriteConcernResult()
                {
                    HasLastErrorMessage = true
                };
                CheckExpectedResult(expectedResult, result);

                var results = ((IEnumerable<WriteConcernResult>)exception.Data["results"]).ToArray();
                if (results.Length == 3)
                {
                    Assert.AreEqual(false, results[0].HasLastErrorMessage);
                    Assert.AreEqual(true, results[1].HasLastErrorMessage);
                    Assert.AreEqual(false, results[2].HasLastErrorMessage);
                }
                else
                {
                    // it the opcode was emulated there will just be one synthesized result
                    Assert.AreEqual(1, results.Length);
                    Assert.AreEqual(true, results[0].HasLastErrorMessage);
                }

                Assert.AreEqual(1, collection.Count(Query.EQ("_id", 1)));
                Assert.AreEqual(1, collection.Count(Query.EQ("_id", 2)));
                Assert.AreEqual(1, collection.Count(Query.EQ("_id", 3)));
                Assert.AreEqual(1, collection.Count(Query.EQ("_id", 4)));
                Assert.AreEqual(1, collection.Count(Query.EQ("_id", 5)));
            }
        }
        public void TestInsertBatchContinueOnError()
        {
            var collection = Configuration.TestCollection;
            collection.Drop();
            collection.CreateIndex(IndexKeys.Ascending("x"), IndexOptions.SetUnique(true));

            var batch = new BsonDocument[]
            {
                new BsonDocument("x", 1),
                new BsonDocument("x", 1), // duplicate
                new BsonDocument("x", 2),
                new BsonDocument("x", 2), // duplicate
                new BsonDocument("x", 3),
                new BsonDocument("x", 3) // duplicate
            };

            // try the batch without ContinueOnError
            var exception = Assert.Throws<MongoDuplicateKeyException>(() => collection.InsertBatch(batch));
            var result = exception.WriteConcernResult;

            var expectedResult = new ExpectedWriteConcernResult
            {
                HasLastErrorMessage = true
            };
            CheckExpectedResult(expectedResult, result);

            Assert.AreEqual(1, collection.Count());
            Assert.AreEqual(1, collection.FindOne()["x"].AsInt32);

            // try the batch again with ContinueOnError
            if (_server.BuildInfo.Version >= new Version(2, 0, 0))
            {
                var options = new MongoInsertOptions { Flags = InsertFlags.ContinueOnError };
                exception = Assert.Throws<MongoDuplicateKeyException>(() => collection.InsertBatch(batch, options));
                result = exception.WriteConcernResult;

                expectedResult = new ExpectedWriteConcernResult
                {
                    HasLastErrorMessage = true
                };
                CheckExpectedResult(expectedResult, result);

                Assert.AreEqual(3, collection.Count());
            }
        }
 public void TestCountWithReadPreferenceFromFind()
 {
     _collection.Drop();
     var all = LegacyTestConfiguration.Server.Secondaries.Length + 1;
     var options = new MongoInsertOptions { WriteConcern = new WriteConcern(w: all) };
     _collection.Insert(new BsonDocument("x", 1), options);
     _collection.Insert(new BsonDocument("x", 2), options);
     var count = _collection.Find(Query.EQ("x", 1)).SetReadPreference(ReadPreference.Secondary).Count();
     Assert.AreEqual(1, count);
 }
        private void Initialize()
        {
            lock (Sync)
            {
                var database = MongoDatabase.Create(_connectionString);
                if (!database.CollectionExists(_collectionName))
                {
                    var options = CollectionOptions
                        .SetCapped(true)
                        .SetAutoIndexId(true)
                        .SetMaxSize(_maxSize);

                    if (_maxDocuments != int.MaxValue)
                        options.SetMaxDocuments(_maxDocuments);

                    database.CreateCollection(_collectionName, options);
                }

                _collection = database.GetCollection(_collectionName);
                _mongoInsertOptions = new MongoInsertOptions { CheckElementNames = false };
            }
        }
 private void addNewUser()
 {
     var options = new MongoInsertOptions(names) { SafeMode = SafeMode.True };
     BsonDocument name = new BsonDocument {
         { "name", newName }
         };
     names.Save(name, options);
 }
        public void TestInsertBatchContinueOnError() {
            var collection = database["continueonerror"];
            collection.Drop();
            collection.CreateIndex(IndexKeys.Ascending("x"), IndexOptions.SetUnique(true));

            var batch = new BsonDocument[] {
                new BsonDocument("x", 1),
                new BsonDocument("x", 1), // duplicate
                new BsonDocument("x", 2),
                new BsonDocument("x", 2), // duplicate
                new BsonDocument("x", 3),
                new BsonDocument("x", 3) // duplicate
            };

            // try the batch without ContinueOnError
            try {
                collection.InsertBatch(batch);
            } catch (MongoSafeModeException) {
                Assert.AreEqual(1, collection.Count());
                Assert.AreEqual(1, collection.FindOne()["x"].AsInt32);
            }

            // try the batch again with ContinueOnError
            try {
                var options = new MongoInsertOptions(collection) { Flags = InsertFlags.ContinueOnError };
                collection.InsertBatch(batch, options);
            } catch (MongoSafeModeException) {
                Assert.AreEqual(3, collection.Count());
            }
        }
Esempio n. 12
0
 public void Save(IEnumerable<Object> docs)
 {
     var mongoInsertOptions = new MongoInsertOptions();
     mongoInsertOptions.WriteConcern = WriteConcern.Acknowledged;
     _collection.InsertBatch(docs, mongoInsertOptions);
 }
Esempio n. 13
0
        // 将积累的内存对象保存到数据库中
        public int Flush(out string strError)
        {
            strError = "";

            if (this.SearchLogCollection == null)
            {
                this._searchLogCache.Clear();
                return 0;
            }

            try
            {
                List<SearchLogItem> whole = new List<SearchLogItem>();

                // 将打算写入数据库的内存对象移出容器,这样可以减少锁定时间
                if (this.m_lock.TryEnterWriteLock(m_nLockTimeout) == false)
                    throw new ApplicationException("锁定尝试中超时");
                try
                {
                    if (this._searchLogCache.Count == 0)
                        return 0;

                    whole.AddRange(this._searchLogCache);
                    this._searchLogCache.Clear();
                    // this.RemoveRange(0, nCount);
                }
                finally
                {
                    this.m_lock.ExitWriteLock();
                }

                if (this.m_lock.TryEnterReadLock(m_nLockTimeout) == false)
                    throw new ApplicationException("锁定尝试中超时");
                try
                {
                    MongoCollection<SearchLogItem> db_items = this.SearchLogCollection;
                    MongoInsertOptions options = new MongoInsertOptions() { WriteConcern = WriteConcern.Unacknowledged };
                    foreach (SearchLogItem item in whole)
                    {
                        db_items.Insert(item, options);
                    }
                }
                finally
                {
                    this.m_lock.ExitReadLock();
                }

                // TODO: 是否考虑失败后把元素重新插入回this数组?

                return 1;
            }
            catch (Exception ex)
            {
                strError = "检索日志写入数据库的过程发生错误: " + ex.Message;
                return -1;
            }
        }
Esempio n. 14
0
        public void Flush()
        {
            long tobson = 0;

            int index = 0;
            Task[] tasks = new Task[_inMemoryDatabase.Collections.Keys.Count];

            foreach (var pair in _inMemoryDatabase.Collections)
            {
                var mongoSettings = new MongoCollectionSettings {AssignIdOnInsert = false};

                var mongoCollection = Database.GetCollection(typeof(BsonDocument), pair.Key.CollectionName, mongoSettings);
                var inMemoryCollection = (IInMemoryCollection) pair.Value;

                var stopwatch = Stopwatch.StartNew();
                var docs = BsonDocumentWrapper.CreateMultiple(inMemoryCollection.Documents.Values);
                stopwatch.Stop();
                //Console.WriteLine("Collection {0} serialized to bson in {1:n0} ms", pair.Key, stopwatch.ElapsedMilliseconds);
                tobson += stopwatch.ElapsedMilliseconds;

                stopwatch.Start();

                tasks[index] = Task.Factory.StartNew(() =>
                {
                    var mongoInsertOptions = new MongoInsertOptions();
                    mongoInsertOptions.CheckElementNames = false;
                    mongoInsertOptions.WriteConcern = WriteConcern.Acknowledged;
                    mongoCollection.InsertBatch(docs);
                }, TaskCreationOptions.LongRunning);

                stopwatch.Stop();
                //Console.WriteLine("Collection {0} inserted to MongoDB in {1:n0} ms", pair.Key, stopwatch.ElapsedMilliseconds);

                index++;
            }

            Task.WaitAll(tasks);

            //Console.WriteLine("Total time for serialization: {0:n0} ms", tobson);
        }
        private void Initialize()
        {
            lock (Sync)
            {
                var mongoUrl = MongoUrl.Create(_connectionString);
                var mongoClient = new MongoClient(mongoUrl);
                var server = mongoClient.GetServer();
                var database = server.GetDatabase(mongoUrl.DatabaseName);
                if (!database.CollectionExists(_collectionName))
                {
                    var options = CollectionOptions
                        .SetCapped(true)
                        .SetAutoIndexId(true)
                        .SetMaxSize(_maxSize);

                    if (_maxDocuments != int.MaxValue)
                        options.SetMaxDocuments(_maxDocuments);

                    database.CreateCollection(_collectionName, options);
                }

                _collection = database.GetCollection(_collectionName);
                _mongoInsertOptions = new MongoInsertOptions { Flags = InsertFlags.ContinueOnError };
            }
        }
        private void chartCurrent(string[] scores, DateTime dateTime)
        {
            MongoCollection<BsonDocument> history = exerHist.GetCollection<BsonDocument>("history");

            // graph
            //We need one data series for each chart series
            DataSeries<int, double> cc1 = new DataSeries<int, double>(cc1legend);
            DataSeries<int, double> cc2 = new DataSeries<int, double>(cc2legend);
            DataSeries<int, double> cc3 = new DataSeries<int, double>(cc3legend);

            bool perfect1 = true;
            bool perfect2 = true;
            bool perfect3 = true;

            int j = 0;

            for(int i = 1; i <=10; i++)
            {
                double cc1v = double.Parse(scores[(i*3)-3]);
                double cc2v = double.Parse(scores[(i*3)-2]);
                double cc3v = double.Parse(scores[(i*3)-1]);

                if (cc1v == -1 | cc1v == -2)
                {
                    j = i;
                    break;
                }

                if (cc1v == 0)
                {
                    cc1v = 0.05;
                }
                if (cc2v == 0)
                {
                    cc2v = 0.05;
                }
                if (cc3v == 0)
                {
                    cc3v = 0.05;
                }

                BsonDocument rep = new BsonDocument {
                { "name", user.Content.ToString() },
                { "exercise", exercisesCB.SelectedValue.ToString() },
                { "cc1", cc1v },
                { "cc2", cc2v },
                { "cc3", cc3v },
                { "time", dateTime.AddSeconds(i)},
                {"timeid", dateTime}
                };
                var options = new MongoInsertOptions(history) { SafeMode = SafeMode.True };
                history.Save(rep, options);

                if (perfect1 & (cc1v == 0.05 | cc1v == 1))
                {
                    perfect1 = false;
                }
                if (perfect2 & (cc2v == 0.05 | cc2v == 1))
                {
                    perfect2 = false;
                }
                if (perfect3 & (cc3v == 0.05 | cc3v == 1))
                {
                    perfect3 = false;
                }

                cc1.Add(new DataPoint<int, double>() { X = i, Y = cc1v });
                cc2.Add(new DataPoint<int, double>() { X = i, Y = cc2v });
                cc3.Add(new DataPoint<int, double>() { X = i, Y = cc3v });
            }

            if (j == 1)
            {
                // plot nothing
                for (int k = 1; k <= 5; k++)
                {
                    cc1.Add(new DataPoint<int, double>() { X = k, Y = 0 });
                    cc2.Add(new DataPoint<int, double>() { X = k, Y = 0 });
                    cc3.Add(new DataPoint<int, double>() { X = k, Y = 0 });
                }
                criticalComponent1.Text = "";
                criticalComponent1.Text = "";
                criticalComponent1.Text = "";
                date.Content = "N/A";
            }
            else
            {
                if (perfect1)
                {
                    criticalComponent1.Text = cc1legend + ": Excellent!";
                    buttoncc1.IsEnabled = false;
                }
                else
                {
                    criticalComponent1.Text = criticalComponent1Feedback;
                    buttoncc1.IsEnabled = true;
                }
                if (perfect2)
                {
                    criticalComponent2.Text = cc2legend + ": Excellent!";
                    buttoncc2.IsEnabled = false;
                }
                else
                {
                    criticalComponent2.Text = criticalComponent2Feedback;
                    buttoncc2.IsEnabled = true;
                }
                if (perfect3)
                {
                    criticalComponent3.Text = cc3legend + ": Excellent!";
                    buttoncc3.IsEnabled = false;
                }
                else
                {
                    criticalComponent3.Text = criticalComponent3Feedback;
                    buttoncc3.IsEnabled = true;
                }
                date.Content = dateTime.ToLongDateString() + " " + dateTime.ToShortTimeString(); ;
            }

            //Finally, associate the data series with the chart series
            userchart.Series[0].DataSeries = cc1;
            userchart.Series[1].DataSeries = cc2;
            userchart.Series[2].DataSeries = cc3;
        }
Esempio n. 17
0
 static OrderAccess()
 {
     DefaultInsertOptions = new MongoInsertOptions();
     DefaultInsertOptions.Flags = InsertFlags.None;
     DefaultInsertOptions.SafeMode = SafeMode.True;
 }
        public void TestInsertBatchMultipleBatchesWriteConcernDisabledContinueOnErrorTrue()
        {
            var collectionName = LegacyTestConfiguration.Collection.Name;
            var collectionSettings = new MongoCollectionSettings { WriteConcern = WriteConcern.Unacknowledged };
            var collection = LegacyTestConfiguration.Database.GetCollection<BsonDocument>(collectionName, collectionSettings);
            if (collection.Exists()) { collection.Drop(); }

            var maxMessageLength = _primary.MaxMessageLength;

            var filler = new string('x', maxMessageLength / 3); // after overhead results in two documents per sub-batch
            var documents = new BsonDocument[]
                {
                    // first sub-batch
                    new BsonDocument { { "_id", 1 }, { "filler", filler } },
                    new BsonDocument { { "_id", 2 }, { "filler", filler } },
                    // second sub-batch
                    new BsonDocument { { "_id", 3 }, { "filler", filler } },
                    new BsonDocument { { "_id", 3 }, { "filler", filler } }, // duplicate _id error
                    // third sub-batch
                    new BsonDocument { { "_id", 4 }, { "filler", filler } },
                    new BsonDocument { { "_id", 5 }, { "filler", filler } },
                };

            var options = new MongoInsertOptions { Flags = InsertFlags.ContinueOnError };
            var results = collection.InsertBatch(documents, options);
            Assert.AreEqual(null, results);

            for (int i = 1; i <= 5; i++)
            {
                if (!SpinWait.SpinUntil(() => collection.Count(Query.EQ("_id", i)) == 1, TimeSpan.FromSeconds(4)))
                {
                    Assert.Fail("_id {0} does not exist.", i);
                }
            }
        }
        public void TestInsertBatchContinueOnError()
        {
            var collection = Configuration.TestCollection;
            collection.Drop();
            collection.CreateIndex(IndexKeys.Ascending("x"), IndexOptions.SetUnique(true));

            var batch = new BsonDocument[]
            {
                new BsonDocument("x", 1),
                new BsonDocument("x", 1), // duplicate
                new BsonDocument("x", 2),
                new BsonDocument("x", 2), // duplicate
                new BsonDocument("x", 3),
                new BsonDocument("x", 3) // duplicate
            };

            // try the batch without ContinueOnError
            try
            {
                collection.InsertBatch(batch);
            }
            catch (WriteConcernException)
            {
                Assert.AreEqual(1, collection.Count());
                Assert.AreEqual(1, collection.FindOne()["x"].AsInt32);
            }

            // try the batch again with ContinueOnError
            if (_server.BuildInfo.Version >= new Version(2, 0, 0))
            {
                try
                {
                    var options = new MongoInsertOptions { Flags = InsertFlags.ContinueOnError };
                    collection.InsertBatch(batch, options);
                }
                catch (WriteConcernException)
                {
                    Assert.AreEqual(3, collection.Count());
                }
            }
        }
Esempio n. 20
0
        //        private static void LoadHandlingEventData()
        //        {
        //            const string handlingEventSql =
        //                "insert into HandlingEvent (completionTime, registrationTime, type, location_id, voyage_id, cargo_id) " +
        //                "values (?, ?, ?, ?, ?, ?)";
        //
        //            var handlingEventArgs = new[]
        //                {
        //                    //XYZ (SESTO-FIHEL-DEHAM-CNHKG-JPTOK-AUMEL)
        //                    new object[] {Ts(0), Ts((0)), "RECEIVE", 1, null, 1},
        //                    new object[] {Ts((4)), Ts((5)), "LOAD", 1, 1, 1},
        //                    new object[] {Ts((14)), Ts((14)), "UNLOAD", 5, 1, 1},
        //                    new object[] {Ts((15)), Ts((15)), "LOAD", 5, 1, 1},
        //                    new object[] {Ts((30)), Ts((30)), "UNLOAD", 6, 1, 1},
        //                    new object[] {Ts((33)), Ts((33)), "LOAD", 6, 1, 1},
        //                    new object[] {Ts((34)), Ts((34)), "UNLOAD", 3, 1, 1},
        //                    new object[] {Ts((60)), Ts((60)), "LOAD", 3, 1, 1},
        //                    new object[] {Ts((70)), Ts((71)), "UNLOAD", 4, 1, 1},
        //                    new object[] {Ts((75)), Ts((75)), "LOAD", 4, 1, 1},
        //                    new object[] {Ts((88)), Ts((88)), "UNLOAD", 2, 1, 1},
        //                    new object[] {Ts((100)), Ts((102)), "CLAIM", 2, null, 1},
        //                    //ZYX (AUMEL - USCHI - DEHAM -)
        //                    new object[] {Ts((200)), Ts((201)), "RECEIVE", 2, null, 3},
        //                    new object[] {Ts((202)), Ts((202)), "LOAD", 2, 2, 3},
        //                    new object[] {Ts((208)), Ts((208)), "UNLOAD", 7, 2, 3},
        //                    new object[] {Ts((212)), Ts((212)), "LOAD", 7, 2, 3},
        //                    new object[] {Ts((230)), Ts((230)), "UNLOAD", 6, 2, 3},
        //                    new object[] {Ts((235)), Ts((235)), "LOAD", 6, 2, 3},
        //                    //ABC
        //                    new object[] {Ts((20)), Ts((21)), "CLAIM", 2, null, 2},
        //                    //CBA
        //                    new object[] {Ts((0)), Ts((1)), "RECEIVE", 2, null, 4},
        //                    new object[] {Ts((10)), Ts((11)), "LOAD", 2, 2, 4},
        //                    new object[] {Ts((20)), Ts((21)), "UNLOAD", 7, 2, 4},
        //                    //FGH
        //                    new object[] {Ts(100), Ts(160), "RECEIVE", 3, null, 5},
        //                    new object[] {Ts(150), Ts(110), "LOAD", 3, 3, 5},
        //                    //JKL
        //                    new object[] {Ts(200), Ts(220), "RECEIVE", 6, null, 6},
        //                    new object[] {Ts(300), Ts(330), "LOAD", 6, 3, 6},
        //                    new object[] {Ts(400), Ts(440), "UNLOAD", 5, 3, 6} // Unexpected event
        //                };
        //
        //            ExecuteUpdate(session, handlingEventSql, handlingEventArgs);
        //        }
        //
        //        private static void LoadCarrierMovementData()
        //        {
        //            const string voyageSql = "insert into Voyage (id, voyage_number) values (?, ?)";
        //            var voyageArgs = new[]
        //                {
        //                    new object[] {1, "0101"},
        //                    new object[] {2, "0202"},
        //                    new object[] {3, "0303"}
        //                };
        //            ExecuteUpdate(session, voyageSql, voyageArgs);
        //
        //            const string carrierMovementSql =
        //                "insert into CarrierMovement (id, voyage_id, departure_location_id, arrival_location_id, departure_time, arrival_time, cm_index) " +
        //                "values (?,?,?,?,?,?,?)";
        //
        //            var carrierMovementArgs = new[]
        //                {
        //                    // SESTO - FIHEL - DEHAM - CNHKG - JPTOK - AUMEL (voyage 0101)
        //                    new object[] {1, 1, 1, 5, Ts(1), Ts(2), 0},
        //                    new object[] {2, 1, 5, 6, Ts(1), Ts(2), 1},
        //                    new object[] {3, 1, 6, 3, Ts(1), Ts(2), 2},
        //                    new object[] {4, 1, 3, 4, Ts(1), Ts(2), 3},
        //                    new object[] {5, 1, 4, 2, Ts(1), Ts(2), 4},
        //                    // AUMEL - USCHI - DEHAM - SESTO - FIHEL (voyage 0202)
        //                    new object[] {7, 2, 2, 7, Ts(1), Ts(2), 0},
        //                    new object[] {8, 2, 7, 6, Ts(1), Ts(2), 1},
        //                    new object[] {9, 2, 6, 1, Ts(1), Ts(2), 2},
        //                    new object[] {6, 2, 1, 5, Ts(1), Ts(2), 3},
        //                    // CNHKG - AUMEL - FIHEL - DEHAM - SESTO - USCHI - JPTKO (voyage 0303)
        //                    new object[] {10, 3, 3, 2, Ts(1), Ts(2), 0},
        //                    new object[] {11, 3, 2, 5, Ts(1), Ts(2), 1},
        //                    new object[] {12, 3, 6, 1, Ts(1), Ts(2), 2},
        //                    new object[] {13, 3, 1, 7, Ts(1), Ts(2), 3},
        //                    new object[] {14, 3, 7, 4, Ts(1), Ts(2), 4}
        //                };
        //            ExecuteUpdate(session, carrierMovementSql, carrierMovementArgs);
        //        }
        //
        //        private static void LoadCargoData(ISession session)
        //        {
        //            const string cargoSql =
        //                "insert into Cargo (id, tracking_id, origin_id, spec_origin_id, spec_destination_id, spec_arrival_deadline, transport_status, current_voyage_id, last_known_location_id, is_misdirected, routing_status, calculated_at, unloaded_at_dest) " +
        //                "values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)";
        //
        //            var cargoArgs = new[]
        //                {
        //                    new object[]
        //                        {
        //                            1, "XYZ", 1, 1, 2, Ts(10), "IN_PORT", null, 1, false, "ROUTED", Ts(100),
        //                            false
        //                        },
        //                    new object[]
        //                        {
        //                            2, "ABC", 1, 1, 5, Ts(20), "IN_PORT", null, 1, false, "ROUTED", Ts(100),
        //                            false
        //                        },
        //                    new object[]
        //                        {
        //                            3, "ZYX", 2, 2, 1, Ts(30), "IN_PORT", null, 1, false, "NOT_ROUTED", Ts(100),
        //                            false
        //                        },
        //                    new object[]
        //                        {
        //                            4, "CBA", 5, 5, 1, Ts(40), "IN_PORT", null, 1, false, "MISROUTED", Ts(100),
        //                            false
        //                        },
        //                    new object[]
        //                        {
        //                            5, "FGH", 1, 3, 5, Ts(50), "IN_PORT", null, 1, false, "ROUTED", Ts(100),
        //                            false
        //                        }, // Cargo origin differs from spec origin
        //                    new object[]
        //                        {
        //                            6, "JKL", 6, 6, 4, Ts(60), "IN_PORT", null, 1, true, "ROUTED", Ts(100),
        //                            false
        //                        }
        //                };
        //            ExecuteUpdate(session, cargoSql, cargoArgs);
        //        }
        //
        //        private static void LoadLocationData()
        //        {
        //
        //            var locationArgs = new List<Location>
        //                {
        //                    new Location( new UnLocode("SESTO"), "Stockholm") {Id = 1},
        //                    new Location( new UnLocode("AUMEL"), "Melbourne") {Id = 2},
        //                    new Location( new UnLocode("CNHKG"), "Hongkong") {Id = 3},
        //                    new Location( new UnLocode("JPTOK"), "Tokyo") {Id = 4},
        //                    new Location( new UnLocode("FIHEL"), "Helsinki") {Id = 5},
        //                    new Location( new UnLocode("DEHAM"), "Hamburg") {Id = 6},
        //                    new Location( new UnLocode("USCHI"), "Chicago") {Id = 7},
        //                };
        //            ExecuteUpdate(session, locationSql, locationArgs);
        //        }
        //
        //        private static void LoadItineraryData(ISession session)
        //        {
        //            const string legSql =
        //                "insert into Leg (id, cargo_id, voyage_id, load_location_id, unload_location_id, load_time, unload_time, leg_index) " +
        //                "values (?,?,?,?,?,?,?,?)";
        //
        //            var legArgs = new[]
        //                {
        //                    // Cargo 5: Hongkong - Melbourne - Stockholm - Helsinki
        //                    new object[] {1, 5, 1, 3, 2, Ts(1), Ts(2), 0},
        //                    new object[] {2, 5, 1, 2, 1, Ts(3), Ts(4), 1},
        //                    new object[] {3, 5, 1, 1, 5, Ts(4), Ts(5), 2},
        //                    // Cargo 6: Hamburg - Stockholm - Chicago - Tokyo
        //                    new object[] {4, 6, 2, 6, 1, Ts(1), Ts(2), 0},
        //                    new object[] {5, 6, 2, 1, 7, Ts(3), Ts(4), 1},
        //                    new object[] {6, 6, 2, 7, 4, Ts(5), Ts(6), 2}
        //                };
        //            ExecuteUpdate(session, legSql, legArgs);
        //        }
        public static void LoadMongoData(HandlingEventFactory handlingEventFactory,
            IHandlingEventRepository handlingEventRepository)
        {
            Console.WriteLine("*** Loading data ***");

            var db = Utils.ShippingDb;
            var locations = GetLocationCollection();
            var mongoInsertOptions = new MongoInsertOptions() { Flags = InsertFlags.ContinueOnError };
            try
            {
                locations.InsertBatch(SampleLocations.GetAll(), mongoInsertOptions);

            }
            catch (WriteConcernException ex)
            {
            }
            var voyages = db.GetCollection<Location>("voyages");

            try
            {
                voyages.InsertBatch(SampleVoyages.GetAll(), mongoInsertOptions);
            }
            catch (WriteConcernException ex)
            {
            }

            var cargo = db.GetCollection<Cargo>("cargo");

            var routeSpecification = new RouteSpecification(SampleLocations.HONGKONG,
                                                            SampleLocations.HELSINKI,
                                                            DateUtil.ToDate("2009-03-15"));
            var trackingId = new TrackingId("ABC123");
            var abc123exists = new CargoRepositoryMongo(db).Find(trackingId) != null;
            var handlingEvents = db.GetCollection<HandlingEvent>("handlingEvents");
            if (!abc123exists)
            {
                var abc123 = new Cargo(trackingId, routeSpecification);
                var itinerary = new Itinerary(
                    new List<Leg>
                        {
                            new Leg(SampleVoyages.HONGKONG_TO_NEW_YORK, SampleLocations.HONGKONG,
                                    SampleLocations.NEWYORK,
                                    DateUtil.ToDate("2009-03-02"), DateUtil.ToDate("2009-03-05")),
                            new Leg(SampleVoyages.NEW_YORK_TO_DALLAS, SampleLocations.NEWYORK, SampleLocations.DALLAS,
                                    DateUtil.ToDate("2009-03-06"), DateUtil.ToDate("2009-03-08")),
                            new Leg(SampleVoyages.DALLAS_TO_HELSINKI, SampleLocations.DALLAS, SampleLocations.HELSINKI,
                                    DateUtil.ToDate("2009-03-09"), DateUtil.ToDate("2009-03-12"))
                        });
                abc123.AssignToRoute(itinerary);

                cargo.Insert(abc123);

                HandlingEvent event1 = handlingEventFactory.CreateHandlingEvent(
                    new DateTime(), DateUtil.ToDate("2009-03-01"), trackingId, null, SampleLocations.HONGKONG.UnLocode,
                    HandlingType.RECEIVE
                    );

                handlingEvents.Insert(event1);

                HandlingEvent event2 = handlingEventFactory.CreateHandlingEvent(
                    new DateTime(), DateUtil.ToDate("2009-03-02"), trackingId,
                    SampleVoyages.HONGKONG_TO_NEW_YORK.voyageNumber, SampleLocations.HONGKONG.UnLocode,
                    HandlingType.LOAD
                    );
                handlingEvents.Insert(event2);

                HandlingEvent event3 = handlingEventFactory.CreateHandlingEvent(
                    new DateTime(), DateUtil.ToDate("2009-03-05"), trackingId,
                    SampleVoyages.HONGKONG_TO_NEW_YORK.voyageNumber, SampleLocations.NEWYORK.UnLocode,
                    HandlingType.UNLOAD
                    );
                handlingEvents.Insert(event3);

                HandlingHistory handlingHistory = handlingEventRepository.LookupHandlingHistoryOfCargo(trackingId);
                abc123.DeriveDeliveryProgress(handlingHistory);
                cargo.Save(abc123);

            }

            var trackingId1 = new TrackingId("JKL567");
            var jkl567exists = new CargoRepositoryMongo(db).Find(trackingId) != null;
            if (!jkl567exists)
            {

                var routeSpecification1 = new RouteSpecification(SampleLocations.HANGZOU,
                                                                 SampleLocations.STOCKHOLM,
                                                                 DateUtil.ToDate("2009-03-18"));
                var jkl567 = new Cargo(trackingId1, routeSpecification1);

                var itinerary1 = new Itinerary(new List<Leg>
                    {
                        new Leg(SampleVoyages.HONGKONG_TO_NEW_YORK,
                                SampleLocations.HANGZOU, SampleLocations.NEWYORK,
                                DateUtil.ToDate("2009-03-03"),
                                DateUtil.ToDate("2009-03-05")),
                        new Leg(SampleVoyages.NEW_YORK_TO_DALLAS,
                                SampleLocations.NEWYORK, SampleLocations.DALLAS,
                                DateUtil.ToDate("2009-03-06"),
                                DateUtil.ToDate("2009-03-08")),
                        new Leg(SampleVoyages.DALLAS_TO_HELSINKI,
                                SampleLocations.DALLAS, SampleLocations.STOCKHOLM,
                                DateUtil.ToDate("2009-03-09"),
                                DateUtil.ToDate("2009-03-11"))
                    });
                jkl567.AssignToRoute(itinerary1);
                cargo.Insert(jkl567);

                HandlingEvent event21 = handlingEventFactory.CreateHandlingEvent(
                    new DateTime(), DateUtil.ToDate("2009-03-01"), trackingId1, null, SampleLocations.HANGZOU.UnLocode,
                    HandlingType.RECEIVE);

                handlingEvents.Insert(event21);

                HandlingEvent event22 = handlingEventFactory.CreateHandlingEvent(
                    new DateTime(), DateUtil.ToDate("2009-03-03"), trackingId1,
                    SampleVoyages.HONGKONG_TO_NEW_YORK.voyageNumber, SampleLocations.HANGZOU.UnLocode,
                    HandlingType.LOAD
                    );
                handlingEvents.Insert(event22);

                HandlingEvent event23 = handlingEventFactory.CreateHandlingEvent(
                    new DateTime(), DateUtil.ToDate("2009-03-05"), trackingId1,
                    SampleVoyages.HONGKONG_TO_NEW_YORK.voyageNumber, SampleLocations.NEWYORK.UnLocode,
                    HandlingType.UNLOAD
                    );
                handlingEvents.Insert(event23);

                HandlingEvent event24 = handlingEventFactory.CreateHandlingEvent(
                    new DateTime(), DateUtil.ToDate("2009-03-06"), trackingId1,
                    SampleVoyages.HONGKONG_TO_NEW_YORK.voyageNumber, SampleLocations.NEWYORK.UnLocode,
                    HandlingType.LOAD
                    );
                handlingEvents.Insert(event24);

                HandlingHistory handlingHistory1 = handlingEventRepository.LookupHandlingHistoryOfCargo(trackingId1);
                jkl567.DeriveDeliveryProgress(handlingHistory1);

                cargo.Save(jkl567);
            }
        }