public GridChunk(Document doc)
 {
     this.id = (Oid)doc["_id"];
     this.filesId = (Object)doc["files_id"];
     this.n = Convert.ToInt32(doc["n"]);
     this.data = (Binary)doc["data"];
 }
 public GridChunk(object filesId, int n, byte[] data)
 {
     //            OidGenerator oidGenerator = new OidGenerator();
     //            this.id = oidGenerator.Generate();
     this.filesId = filesId;
     this.n = n;
     this.data = new Binary(data);
 }
Beispiel #3
0
        public void TestSimpleBinaryStorage()
        {
            Binary b = new Binary();
            byte[] data = new byte[2];
            data[0] = (byte)1;
            data[1] = (byte)2;
            b.Subtype = Binary.TypeCode.General;

            BsonBinary binaryIn = new BsonBinary (new Binary (data));
            MemoryStream stream = new MemoryStream ();
            BsonWriter bsonWriter = new BsonWriter (stream);
            binaryIn.Write (bsonWriter);

            string hex = BitConverter.ToString (stream.ToArray());
            hex = hex.Replace ("-", "");
            Assert.AreEqual("0600000002020000000102",hex);
        }
 public void TestInsertLargerThan4MBDocument()
 {
     Binary b = new Binary(new byte[1024 * 1024]);
     Document big = new Document(){{"name", "Big Document"}, {"b1", b}, {"b2", b}, {"b3", b}, {"b4", b}};
     IMongoCollection inserts = DB["inserts"];
     bool thrown = false;
     try{
         inserts.Insert(big);
     }catch(MongoException){
         thrown = true;
     }catch(Exception e){
         Assert.Fail("Wrong Exception thrown " + e.GetType().Name);
     }
     Assert.IsTrue(thrown, "Shouldn't be able to insert large document");
 }
 public void TestInsertBulkLargerThan4MBOfDocuments()
 {
     Binary b = new Binary(new byte[1024 * 1024 * 2]);
     IMongoCollection inserts = DB["inserts"];
     try{
         Document[] docs = new Document[10];
             //6MB+ of documents
         for(int x = 0; x < docs.Length; x++){
             docs[x] = new Document(){{"name", "bulk"}, {"b", b}, {"x", x}};
         }
         inserts.Insert(docs,true);
         long count = inserts.Count(new Document(){{"name", "bulk"}});
         Assert.AreEqual(docs.Length, count, "Wrong number of documents inserted");
     }catch(MongoException){
         Assert.Fail("MongoException should not have been thrown.");
     }
 }
        /// <summary>
        /// Makes sure that at least a skelton chunk exists for all numbers.  If not the MD5 calculation will fail on a sparse file.
        /// </summary>
        private void EnsureNoHoles()
        {
            int highChunk = CalcChunkNum(this.GridFileInfo.Length);
            Document query = new Document().Append("files_id", this.GridFileInfo.Id)
                                            .Append("n", new Document()
                                            .Append("$lte",highChunk));
            Document sort = new Document().Append("n",1);
            Document fields = new Document().Append("_id", 1).Append("n",1);

            Binary data = new Binary(this.blankBuffer);
            int i = 0;
            using (ICursor cur = chunks.Find(new Document().Append("query",query).Append("sort",sort),0,0,fields)){
                foreach(Document doc in cur.Documents){
                    int n = Convert.ToInt32(doc["n"]);
                    if(i < n){
                        while(i < n){
                            chunks.Insert(new Document().Append("files_id", this.gridFileInfo.Id)
                                          .Append("n", i)
                                          .Append("data", data)
                                          );
                            i++;
                        }
                    }else{
                        i++;
                    }
                }
            }
        }
        /// <summary>
        /// Flushes any changes to current chunk to the database.  It can be called in client code at any time or it
        /// will automatically be called on Close() and when the stream position moves off the bounds of the current
        /// chunk.
        /// </summary>
        public override void Flush()
        {
            if(chunkDirty == false) return;
            //avoid a copy if possible.
            if(highestBuffPosition == buffer.Length){
                chunk["data"] = new Binary(buffer);
            }else{
                byte[] data = new byte[highestBuffPosition];
                Array.Copy(buffer,data,highestBuffPosition);
                chunk["data"] = new Binary(data);
            }

            if(chunk.Contains("_id")){
                chunks.Update(chunk);
            }else{
                chunks.Insert(chunk);
            }
            this.gridFileInfo.Length = highestPosWritten;
        }
        public void TestWritingTooLargeDocument()
        {
            MemoryStream ms = new MemoryStream();
            BsonWriter writer = new BsonWriter(ms);
            Binary b = new Binary(new byte[BsonInfo.MaxDocumentSize]);
            Document big = new Document().Append("x", b);
            bool thrown = false;
            try{
                writer.Write(big);
            }catch(ArgumentException){
                thrown = true;
            }catch(Exception e){
                Assert.Fail("Wrong Exception thrown " + e.GetType().Name);
            }

            Assert.IsTrue(thrown, "Shouldn't be able to write large document");
        }
Beispiel #9
0
        public Object ReadElementType(sbyte typeNum)
        {
            switch ((BsonDataType)typeNum) {
            case BsonDataType.Null:
            case BsonDataType.Undefined:
                return MongoDBNull.Value;
            case BsonDataType.MinKey:
                return MongoMinKey.Value;
            case BsonDataType.MaxKey:
                return MongoMaxKey.Value;
            case BsonDataType.Boolean:
                position++;
                return reader.ReadBoolean ();
            case BsonDataType.Integer:
                position += 4;
                return reader.ReadInt32 ();
            case BsonDataType.Long:
                position += 8;
                return reader.ReadInt64 ();
            case BsonDataType.Date:
                position += 8;
                long millis = reader.ReadInt64 ();
                return epoch.AddMilliseconds (millis);
            case BsonDataType.Oid:
                position += 12;
                return new Oid (reader.ReadBytes (12));
            case BsonDataType.Number:
                position += 8;
                return reader.ReadDouble ();
            case BsonDataType.String:{
                return ReadLenString ();
            }
            case BsonDataType.Obj:{
                Document doc = this.ReadDocument();
                if(DBRef.IsDocumentDBRef(doc)){
                    return DBRef.FromDocument(doc);
                }
                return doc;
            }

            case BsonDataType.Array:{
                Document doc = this.ReadDocument();
                if (ElementsSameType (doc)) {
                    return ConvertToArray (doc);
                } else {
                    return doc;
                }
            }
            case BsonDataType.Regex:{
                MongoRegex r = new MongoRegex ();
                r.Expression = this.ReadString ();
                r.Options = this.ReadString ();
                return r;
            }
            case BsonDataType.Code:{
                Code c = new Code ();
                c.Value = ReadLenString();
                return c;
            }
            case BsonDataType.CodeWScope:{
                int startpos = position;
                int size = reader.ReadInt32 ();
                position += 4;

                String val = this.ReadLenString();
                Document scope = this.ReadDocument();
                if (size != position - startpos) {
                    throw new System.IO.InvalidDataException (string.Format ("Should have read {0} bytes from stream but read {1} in CodeWScope", size, position - startpos));
                }
                return new CodeWScope (val, scope);
            }
            case BsonDataType.Binary:{
                int size = reader.ReadInt32 ();
                position += 4;
                byte subtype = reader.ReadByte ();
                position ++;
                if (subtype == (byte)Binary.TypeCode.General) {
                    size = reader.ReadInt32 ();
                    position += 4;
                }
                byte[] bytes = reader.ReadBytes (size);
                position += size;
                Binary b = new Binary ();
                b.Bytes = bytes;
                b.Subtype = (Binary.TypeCode)subtype;
                return b;
            }
            default:
                throw new ArgumentOutOfRangeException (String.Format ("Type Number: {0} not recognized", typeNum));
            }
        }
Beispiel #10
0
 public void TestToString()
 {
     Binary b = new Binary();
 }