public GetRequest(Qoollo.BobClient.BobKey key, bool fullGet = false) { Key = new BlobKey { //Key = ByteString.CopyFrom(key.GetKeyBytes()) Key = ProtoBufByteStringHelper.CreateFromByteArrayOptimized(key.GetKeyBytes()) }; Options = new GetOptions { Source = fullGet ? GetSource.All : GetSource.Normal, }; }
public PutRequest(Qoollo.BobClient.BobKey key, byte[] data) { Key = new BlobKey { //Key = ByteString.CopyFrom(key.GetKeyBytes()) Key = ProtoBufByteStringHelper.CreateFromByteArrayOptimized(key.GetKeyBytes()) }; Data = new Blob { //Data = ByteString.CopyFrom(data), Data = ProtoBufByteStringHelper.CreateFromByteArrayOptimized(data), Meta = new BlobMeta { Timestamp = unchecked ((ulong)DateTimeOffset.UtcNow.ToUnixTimeSeconds()) } }; }
public ICollection <BlobKey> FindAllAttachmentKeys() { var keys = new HashSet <BlobKey>(); var options = C4EnumeratorOptions.DEFAULT; options.flags &= ~C4EnumeratorFlags.IncludeBodies; options.flags |= C4EnumeratorFlags.IncludeDeleted; var e = new CBForestDocEnumerator(Forest, null, null, options); foreach (var next in e) { var docInfo = next.DocumentInfo; if (!docInfo->HasAttachments || (docInfo->IsDeleted && !docInfo->IsConflicted)) { continue; } var doc = next.GetDocument(); // Since db is assumed to have just been compacted, we know that non-current revisions // won't have any bodies. So only scan the current revs. do { if (doc->selectedRev.IsActive && doc->selectedRev.HasAttachments) { ForestDBBridge.Check(err => Native.c4doc_loadRevisionBody(doc, err)); var body = doc->selectedRev.body; if (body.size > 0) { var rev = Manager.GetObjectMapper().ReadValue <IDictionary <string, object> >(body); foreach (var entry in rev.Get("_attachments").AsDictionary <string, IDictionary <string, object> >()) { try { var key = new BlobKey(entry.Value.GetCast <string>("digest")); keys.Add(key); } catch (Exception) { Log.W(TAG, "Invalid digest {0}; skipping", entry.Value.GetCast <string>("digest")); } } } } } while(Native.c4doc_selectNextLeafRevision(doc, true, true, null)); } return(keys); }
private bool UploadMultipartRevision(RevisionInternal revision) { MultipartContent multiPart = null; var revProps = revision.GetProperties(); var attachments = revProps.Get("_attachments").AsDictionary <string, object>(); foreach (var attachmentKey in attachments.Keys) { var attachment = attachments.Get(attachmentKey).AsDictionary <string, object>(); if (attachment.ContainsKey("follows")) { if (multiPart == null) { multiPart = new MultipartContent("related"); try { var json = Manager.GetObjectMapper().WriteValueAsString(revProps); var utf8charset = Encoding.UTF8; //multiPart.Add(new StringContent(json, utf8charset, "application/json"), "param1"); var jsonContent = new StringContent(json, utf8charset, "application/json"); //jsonContent.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment"); multiPart.Add(jsonContent); } catch (IOException e) { throw new ArgumentException("Not able to serialize revision properties into a multipart request content.", e); } } var blobStore = LocalDatabase.Attachments; var base64Digest = (string)attachment.Get("digest"); var blobKey = new BlobKey(base64Digest); var inputStream = blobStore.BlobStreamForKey(blobKey); if (inputStream == null) { Log.W(Tag, "Unable to find blob file for blobKey: " + blobKey + " - Skipping upload of multipart revision."); multiPart = null; } else { string contentType = null; if (attachment.ContainsKey("content_type")) { contentType = (string)attachment.Get("content_type"); } else { if (attachment.ContainsKey("content-type")) { var message = string.Format("Found attachment that uses content-type" + " field name instead of content_type (see couchbase-lite-android" + " issue #80): " + attachment); Log.W(Tag, message); } } var content = new StreamContent(inputStream); content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = Path.GetFileName(blobStore.PathForKey(blobKey)) }; content.Headers.ContentType = new MediaTypeHeaderValue(contentType); multiPart.Add(content); } } } if (multiPart == null) { return(false); } var path = string.Format("/{0}?new_edits=false", revision.GetDocId()); // TODO: need to throttle these requests Log.D(Tag, "Uploading multipart request. Revision: " + revision); Log.D(Tag, "uploadMultipartRevision() calling asyncTaskStarted()"); ChangesCount += 1; AsyncTaskStarted(); SendAsyncMultipartRequest(HttpMethod.Put, path, multiPart, (result, e) => { try { if (e != null) { var httpError = e as HttpResponseException; if (httpError != null) { if (httpError.StatusCode == System.Net.HttpStatusCode.UnsupportedMediaType) { dontSendMultipart = true; UploadJsonRevision(revision); } } else { Log.E(Tag, "Exception uploading multipart request", e); SetLastError(e); RevisionFailed(); } } else { Log.D(Tag, "Uploaded multipart request. Result: " + result); RemovePending(revision); } } finally { Log.D(Tag, "uploadMultipartRevision() calling asyncTaskFinished()"); // TODO: calling addToCompleteChangesCount(1) AsyncTaskFinished(1); } }); return(true); }
public ICollection<BlobKey> FindAllAttachmentKeys() { var keys = new HashSet<BlobKey>(); var options = C4EnumeratorOptions.DEFAULT; options.flags &= ~C4EnumeratorFlags.IncludeBodies; options.flags |= C4EnumeratorFlags.IncludeDeleted; var e = new CBForestDocEnumerator(Forest, null, null, options); foreach(var next in e) { var doc = next.Document; if (!doc->HasAttachments || (doc->IsDeleted && !doc->IsConflicted)) { continue; } // Since db is assumed to have just been compacted, we know that non-current revisions // won't have any bodies. So only scan the current revs. do { if(doc->selectedRev.IsActive && doc->selectedRev.HasAttachments) { ForestDBBridge.Check(err => Native.c4doc_loadRevisionBody(doc, err)); var body = doc->selectedRev.body; if(body.size > 0) { var rev = Manager.GetObjectMapper().ReadValue<IDictionary<string, object>>(body); foreach(var entry in rev.Get("_attachments").AsDictionary<string, IDictionary<string, object>>()) { try { var key = new BlobKey(entry.Value.GetCast<string>("digest")); keys.Add(key); } catch(Exception){ Log.W(TAG, "Invalid digest {0}; skipping", entry.Value.GetCast<string>("digest")); } } } } } while(Native.c4doc_selectNextLeafRevision(doc, true, true, null)); } return keys; }
private bool UploadMultipartRevision(RevisionInternal revision) { MultipartContent multiPart = null; var length = default(double); var revProps = revision.GetProperties(); var attachments = revProps.Get("_attachments").AsDictionary <string, object>(); foreach (var attachmentKey in attachments.Keys) { var attachment = attachments.Get(attachmentKey).AsDictionary <string, object>(); if (attachment.ContainsKey("follows")) { if (multiPart == null) { multiPart = new MultipartContent("related"); try { var json = Manager.GetObjectMapper().WriteValueAsString(revProps); var utf8charset = Encoding.UTF8; //multiPart.Add(new StringContent(json, utf8charset, "application/json"), "param1"); var jsonContent = new StringContent(json, utf8charset, "application/json"); //jsonContent.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment"); multiPart.Add(jsonContent); length += json.Length; } catch (Exception e) { throw Misc.CreateExceptionAndLog(Log.To.Sync, e, TAG, "Not able to serialize revision properties into a multipart request content."); } } var blobStore = LocalDatabase.Attachments; var base64Digest = (string)attachment.Get("digest"); var blobKey = new BlobKey(base64Digest); var inputStream = blobStore.BlobStreamForKey(blobKey); if (inputStream == null) { Log.To.Sync.W(TAG, "Unable to find blob file for blobKey: {0} - Skipping upload of multipart revision.", blobKey); multiPart = null; length = 0; } else { string contentType = null; if (attachment.ContainsKey("content_type")) { contentType = (string)attachment.Get("content_type"); } else { if (attachment.ContainsKey("content-type")) { var message = string.Format("Found attachment that uses content-type" + " field name instead of content_type (see couchbase-lite-android" + " issue #80): " + attachment); Log.To.Sync.W(TAG, message); } } var content = new StreamContent(inputStream); content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = attachmentKey }; content.Headers.ContentType = new MediaTypeHeaderValue(contentType ?? "application/octet-stream"); multiPart.Add(content); length += inputStream.Length; } } } if (multiPart == null) { return(false); } var path = string.Format("/{0}?new_edits=false", revision.DocID); // TODO: need to throttle these requests Log.To.Sync.D(TAG, "{0} uploading multipart request. Revision: {1}", this, revision); SafeAddToChangesCount(1); SendAsyncMultipartRequest(HttpMethod.Put, path, multiPart, (result, e) => { if (e != null) { var httpError = Misc.Flatten(e) as HttpResponseException; if (httpError != null) { if (httpError.StatusCode == System.Net.HttpStatusCode.UnsupportedMediaType) { _dontSendMultipart = true; UploadJsonRevision(revision); } } else { LastError = e; RevisionFailed(); } } else { Log.To.Sync.V(TAG, "{0} sent multipart {1}", this, revision); SafeIncrementCompletedChangesCount(); RemovePending(revision); } }); Log.To.Sync.V(TAG, "{0} queuing revision (multipart, {1}kb)", this, length / 1024.0); return(true); }
public AttachmentInternal(string name, IDictionary <string, object> info) : this(name, info.GetCast <string>("content_type")) { Length = info.GetCast <long>("length"); EncodedLength = info.GetCast <long>("encoded_length"); _digest = info.GetCast <string>("digest"); if (_digest != null) { BlobKey = new BlobKey(_digest); } string encodingString = info.GetCast <string>("encoding"); if (encodingString != null) { if (encodingString.Equals("gzip")) { Encoding = AttachmentEncoding.GZIP; } else { throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.BadEncoding, TAG, "Invalid encoding type ({0}) in ctor", encodingString); } } var data = info.Get("data"); if (data != null) { // If there's inline attachment data, decode and store it: if (data is string) { _data = Convert.FromBase64String((string)data); } else { _data = data as IEnumerable <byte>; } if (_data == null) { throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.BadEncoding, TAG, "Invalid data type ({0}) in ctor", data.GetType().Name); } SetPossiblyEncodedLength(_data.LongCount()); } else if (info.GetCast <bool>("stub", false)) { // This item is just a stub; validate and skip it if (info.ContainsKey("revpos")) { var revPos = info.GetCast("revpos", -1); // PouchDB has a bug that generates "revpos":0; allow this (#627) if (revPos < 0) { throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.BadAttachment, TAG, "Invalid revpos ({0}) in ctor", revPos); } RevPos = revPos; } } else if (info.GetCast <bool>("follows", false)) { // I can't handle this myself; my caller will look it up from the digest if (Digest == null) { throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.BadAttachment, TAG, "follows is true, but the attachment digest is null in ctor"); } if (info.ContainsKey("revpos")) { var revPos = info.GetCast("revpos", -1); // PouchDB has a bug that generates "revpos":0; allow this (#627) if (revPos < 0) { throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.BadAttachment, TAG, "Invalid revpos ({0}) in ctor", revPos); } RevPos = revPos; } } else { throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.BadAttachment, TAG, "Neither data nor stub nor follows was specified on the attachment data"); } }
private bool UploadMultipartRevision(RevisionInternal revision) { MultipartFormDataContent multiPart = null; var revProps = revision.GetProperties(); revProps.Put("_revisions", LocalDatabase.GetRevisionHistoryDict(revision)); var attachments = (IDictionary <string, object>)revProps.Get("_attachments"); foreach (var attachmentKey in attachments.Keys) { var attachment = (IDictionary <String, Object>)attachments.Get(attachmentKey); if (attachment.ContainsKey("follows")) { if (multiPart == null) { multiPart = new MultipartFormDataContent(); try { var json = Manager.GetObjectMapper().WriteValueAsString(revProps); var utf8charset = Encoding.UTF8; multiPart.Add(new StringContent(json, utf8charset, "application/json"), "param1"); } catch (IOException e) { throw new ArgumentException("Not able to serialize revision properties into a multipart request content.", e); } } var blobStore = LocalDatabase.Attachments; var base64Digest = (string)attachment.Get("digest"); var blobKey = new BlobKey(base64Digest); var inputStream = blobStore.BlobStreamForKey(blobKey); if (inputStream == null) { Log.W(Tag, "Unable to find blob file for blobKey: " + blobKey + " - Skipping upload of multipart revision."); multiPart = null; } else { string contentType = null; if (attachment.ContainsKey("content_type")) { contentType = (string)attachment.Get("content_type"); } else { if (attachment.ContainsKey("content-type")) { var message = string.Format("Found attachment that uses content-type" + " field name instead of content_type (see couchbase-lite-android" + " issue #80): " + attachment); Log.W(Tag, message); } } var content = new StreamContent(inputStream); content.Headers.ContentType = new MediaTypeHeaderValue(contentType); multiPart.Add(content, attachmentKey); } } } if (multiPart == null) { return(false); } var path = string.Format("/{0}?new_edits=false", revision.GetDocId()); // TODO: need to throttle these requests Log.D(Tag, "Uploadeding multipart request. Revision: " + revision); Log.D(Tag, this + "|" + Thread.CurrentThread() + ": uploadMultipartRevision() calling asyncTaskStarted()"); AsyncTaskStarted(); SendAsyncMultipartRequest(HttpMethod.Put, path, multiPart, (result, e) => { try { if (e != null) { Log.E(Tag, "Exception uploading multipart request", e); LastError = e; RevisionFailed(); } else { Log.D(Tag, "Uploaded multipart request. Result: " + result); } } finally { AsyncTaskFinished(1); } }); return(true); }
public virtual void SetBlobKey(BlobKey blobKey) { this.blobKey = blobKey; }
private void AddAttachmentsToSequence(long sequence, List<byte> json) { // CREATE TABLE attachments ( // sequence INTEGER NOT NULL REFERENCES revs(sequence) ON DELETE CASCADE, // filename TEXT NOT NULL, // key BLOB NOT NULL, // type TEXT, // length INTEGER NOT NULL, // revpos INTEGER DEFAULT 0, // encoding INTEGER DEFAULT 0, // encoded_length INTEGER ); sqlite3_stmt attQuery = null; try { PrepareSQL(ref attQuery, "SELECT filename, key, type, length," + " revpos, encoding, encoded_length FROM attachments WHERE sequence=?"); } catch(CouchbaseLiteException) { Log.W(TAG, "Failed to create SQLite query for attachments table in source database '{0}'", _path); throw; } catch(Exception e) { throw new CouchbaseLiteException(String.Format( "Error creating SQLite query for attachments table in source database '{0}'", _path), e) { Code = StatusCode.DbError }; } raw.sqlite3_bind_int64(attQuery, 1, sequence); var attachments = new Dictionary<string, object>(); int err; while (raw.SQLITE_ROW == (err = raw.sqlite3_step(attQuery))) { string name = raw.sqlite3_column_text(attQuery, 0); var key = raw.sqlite3_column_blob(attQuery, 1); string mimeType = raw.sqlite3_column_text(attQuery, 2); long length = raw.sqlite3_column_int64(attQuery, 3); int revpos = raw.sqlite3_column_int(attQuery, 4); int encoding = raw.sqlite3_column_int(attQuery, 5); long encodedLength = raw.sqlite3_column_int64(attQuery, 6); if (key.Length != SHA1.Create().HashSize / 8) { raw.sqlite3_finalize(attQuery); throw new CouchbaseLiteException(String.Format( "Digest key length incorrect ({0})", Convert.ToBase64String(key)), StatusCode.CorruptError); } var blobKey = new BlobKey(key); var att = new NonNullDictionary<string, object> { { "type", mimeType }, { "digest", blobKey.Base64Digest() }, { "length", length }, { "revpos", revpos }, { "follows", true }, { "encoding", encoding != 0 ? "gzip" : null }, { "encoded_length", encoding != 0 ? (object)encodedLength : null } }; attachments[name] = att; } raw.sqlite3_finalize(attQuery); if (err != raw.SQLITE_DONE) { throw new CouchbaseLiteException(String.Format( "Failed to finalize attachment query ({0}: {1})", err, raw.sqlite3_errmsg(_sqlite)), SqliteErrToStatus(err).Code); } if (attachments.Count > 0) { // Splice attachment JSON into the document JSON: var attJson = Manager.GetObjectMapper().WriteValueAsBytes(new Dictionary<string, object> { { "_attachments", attachments } }); if (json.Count > 2) { json.Insert(json.Count - 1, (byte)','); } json.InsertRange(json.Count - 1, attJson.Skip(1).Take(attJson.Count() - 2)); } }
private Status AddAttachmentsToSequence(long sequence, List<byte> json) { // CREATE TABLE attachments ( // sequence INTEGER NOT NULL REFERENCES revs(sequence) ON DELETE CASCADE, // filename TEXT NOT NULL, // key BLOB NOT NULL, // type TEXT, // length INTEGER NOT NULL, // revpos INTEGER DEFAULT 0, // encoding INTEGER DEFAULT 0, // encoded_length INTEGER ); sqlite3_stmt attQuery = null; Status status = PrepareSQL(ref attQuery, "SELECT filename, key, type, length," + " revpos, encoding, encoded_length FROM attachments WHERE sequence=?"); if (status.IsError) { return status; } raw.sqlite3_bind_int64(attQuery, 1, sequence); var attachments = new Dictionary<string, object>(); int err; while (raw.SQLITE_ROW == (err = raw.sqlite3_step(attQuery))) { string name = raw.sqlite3_column_text(attQuery, 0); var key = raw.sqlite3_column_blob(attQuery, 1); string mimeType = raw.sqlite3_column_text(attQuery, 2); long length = raw.sqlite3_column_int64(attQuery, 3); int revpos = raw.sqlite3_column_int(attQuery, 4); int encoding = raw.sqlite3_column_int(attQuery, 5); long encodedLength = raw.sqlite3_column_int64(attQuery, 6); if (key.Length != SHA1.Create().HashSize / 8) { raw.sqlite3_finalize(attQuery); return new Status(StatusCode.CorruptError); } var blobKey = new BlobKey(key); var att = new NonNullDictionary<string, object> { { "type", mimeType }, { "digest", blobKey.Base64Digest() }, { "length", length }, { "revpos", revpos }, { "follows", true }, { "encoding", encoding != 0 ? "gzip" : null }, { "encoded_length", encoding != 0 ? (object)encodedLength : null } }; attachments[name] = att; } raw.sqlite3_finalize(attQuery); if (err != raw.SQLITE_DONE) { return SqliteErrToStatus(err); } if (attachments.Count > 0) { // Splice attachment JSON into the document JSON: var attJson = Manager.GetObjectMapper().WriteValueAsBytes(new Dictionary<string, object> { { "_attachments", attachments } }); if (json.Count > 2) { json.Insert(json.Count - 1, (byte)','); } json.InsertRange(json.Count - 1, attJson.Skip(1).Take(attJson.Count() - 2)); } return new Status(StatusCode.Ok); }
public AttachmentInternal(string name, IDictionary <string, object> info) : this(name, info.GetCast <string>("content_type")) { Length = info.GetCast <long>("length"); EncodedLength = info.GetCast <long>("encoded_length"); _digest = info.GetCast <string>("digest"); if (_digest != null) { BlobKey = new BlobKey(_digest); } string encodingString = info.GetCast <string>("encoding"); if (encodingString != null) { if (encodingString.Equals("gzip")) { Encoding = AttachmentEncoding.GZIP; } else { throw new CouchbaseLiteException(StatusCode.BadEncoding); } } var data = info.Get("data"); if (data != null) { // If there's inline attachment data, decode and store it: if (data is string) { _data = Convert.FromBase64String((string)data); } else { _data = data as IEnumerable <byte>; } if (_data == null) { throw new CouchbaseLiteException(StatusCode.BadEncoding); } SetPossiblyEncodedLength(_data.LongCount()); } else if (info.GetCast <bool>("stub", false)) { // This item is just a stub; validate and skip it if (info.ContainsKey("revpos")) { var revPos = info.GetCast <int>("revpos"); if (revPos <= 0) { throw new CouchbaseLiteException(StatusCode.BadAttachment); } RevPos = revPos; } } else if (info.GetCast <bool>("follows", false)) { // I can't handle this myself; my caller will look it up from the digest if (Digest == null) { throw new CouchbaseLiteException(StatusCode.BadAttachment); } } else { throw new CouchbaseLiteException(StatusCode.BadAttachment); } }
private Status AddAttachmentsToSequence(long sequence, List <byte> json) { // CREATE TABLE attachments ( // sequence INTEGER NOT NULL REFERENCES revs(sequence) ON DELETE CASCADE, // filename TEXT NOT NULL, // key BLOB NOT NULL, // type TEXT, // length INTEGER NOT NULL, // revpos INTEGER DEFAULT 0, // encoding INTEGER DEFAULT 0, // encoded_length INTEGER ); sqlite3_stmt attQuery = null; Status status = PrepareSQL(ref attQuery, "SELECT filename, key, type, length," + " revpos, encoding, encoded_length FROM attachments WHERE sequence=?"); if (status.IsError) { return(status); } raw.sqlite3_bind_int64(attQuery, 1, sequence); var attachments = new Dictionary <string, object>(); int err; while (raw.SQLITE_ROW == (err = raw.sqlite3_step(attQuery))) { string name = raw.sqlite3_column_text(attQuery, 0); var key = raw.sqlite3_column_blob(attQuery, 1); string mimeType = raw.sqlite3_column_text(attQuery, 2); long length = raw.sqlite3_column_int64(attQuery, 3); int revpos = raw.sqlite3_column_int(attQuery, 4); int encoding = raw.sqlite3_column_int(attQuery, 5); long encodedLength = raw.sqlite3_column_int64(attQuery, 6); if (key.Length != SHA1.Create().HashSize / 8) { raw.sqlite3_finalize(attQuery); return(new Status(StatusCode.CorruptError)); } var blobKey = new BlobKey(key); var att = new NonNullDictionary <string, object> { { "type", mimeType }, { "digest", blobKey.Base64Digest() }, { "length", length }, { "revpos", revpos }, { "follows", true }, { "encoding", encoding != 0 ? "gzip" : null }, { "encoded_length", encoding != 0 ? (object)encodedLength : null } }; attachments[name] = att; } raw.sqlite3_finalize(attQuery); if (err != raw.SQLITE_DONE) { return(SqliteErrToStatus(err)); } if (attachments.Count > 0) { // Splice attachment JSON into the document JSON: var attJson = Manager.GetObjectMapper().WriteValueAsBytes(new Dictionary <string, object> { { "_attachments", attachments } }); if (json.Count > 2) { json.Insert(json.Count - 1, (byte)','); } json.InsertRange(json.Count - 1, attJson.Skip(1).Take(attJson.Count() - 2)); } return(new Status(StatusCode.Ok)); }
private bool UploadMultipartRevision(RevisionInternal revision) { MultipartEntity multiPart = null; IDictionary <string, object> revProps = revision.GetProperties(); revProps.Put("_revisions", db.GetRevisionHistoryDict(revision)); // TODO: refactor this to IDictionary <string, object> attachments = (IDictionary <string, object>)revProps.Get ("_attachments"); foreach (string attachmentKey in attachments.Keys) { IDictionary <string, object> attachment = (IDictionary <string, object>)attachments .Get(attachmentKey); if (attachment.ContainsKey("follows")) { if (multiPart == null) { multiPart = new MultipartEntity(); try { string json = Manager.GetObjectMapper().WriteValueAsString(revProps); Encoding utf8charset = Sharpen.Extensions.GetEncoding("UTF-8"); multiPart.AddPart("param1", new StringBody(json, "application/json", utf8charset) ); } catch (IOException e) { throw new ArgumentException(e); } } BlobStore blobStore = this.db.GetAttachments(); string base64Digest = (string)attachment.Get("digest"); BlobKey blobKey = new BlobKey(base64Digest); InputStream inputStream = blobStore.BlobStreamForKey(blobKey); if (inputStream == null) { Log.W(Database.Tag, "Unable to find blob file for blobKey: " + blobKey + " - Skipping upload of multipart revision." ); multiPart = null; } else { string contentType = null; if (attachment.ContainsKey("content_type")) { contentType = (string)attachment.Get("content_type"); } else { if (attachment.ContainsKey("content-type")) { string message = string.Format("Found attachment that uses content-type" + " field name instead of content_type (see couchbase-lite-android" + " issue #80): " + attachment); Log.W(Database.Tag, message); } } multiPart.AddPart(attachmentKey, new InputStreamBody(inputStream, contentType, attachmentKey )); } } } if (multiPart == null) { return(false); } string path = string.Format("/%s?new_edits=false", revision.GetDocId()); // TODO: need to throttle these requests Log.D(Database.Tag, "Uploadeding multipart request. Revision: " + revision); Log.D(Database.Tag, this + "|" + Sharpen.Thread.CurrentThread() + ": uploadMultipartRevision() calling asyncTaskStarted()" ); AsyncTaskStarted(); SendAsyncMultipartRequest("PUT", path, multiPart, new _RemoteRequestCompletionBlock_411 (this)); // TODO: return(true); }
private void AddAttachmentsToSequence(long sequence, List <byte> json) { // CREATE TABLE attachments ( // sequence INTEGER NOT NULL REFERENCES revs(sequence) ON DELETE CASCADE, // filename TEXT NOT NULL, // key BLOB NOT NULL, // type TEXT, // length INTEGER NOT NULL, // revpos INTEGER DEFAULT 0, // encoding INTEGER DEFAULT 0, // encoded_length INTEGER ); sqlite3_stmt attQuery = null; try { PrepareSQL(ref attQuery, "SELECT filename, key, type, length," + " revpos, encoding, encoded_length FROM attachments WHERE sequence=?"); } catch (CouchbaseLiteException) { Log.To.Upgrade.E(TAG, "Failed to create SQLite query for attachments table in " + "source database '{0}', rethrowing...", _path); throw; } catch (Exception e) { throw Misc.CreateExceptionAndLog(Log.To.Upgrade, e, TAG, "Error creating SQLite query for attachments table in source database '{0}'", _path); } raw.sqlite3_bind_int64(attQuery, 1, sequence); var attachments = new Dictionary <string, object>(); int err; while (raw.SQLITE_ROW == (err = raw.sqlite3_step(attQuery))) { string name = raw.sqlite3_column_text(attQuery, 0); var key = raw.sqlite3_column_blob(attQuery, 1); string mimeType = raw.sqlite3_column_text(attQuery, 2); long length = raw.sqlite3_column_int64(attQuery, 3); int revpos = raw.sqlite3_column_int(attQuery, 4); int encoding = raw.sqlite3_column_int(attQuery, 5); long encodedLength = raw.sqlite3_column_int64(attQuery, 6); if (key.Length != SHA1.Create().HashSize / 8) { raw.sqlite3_finalize(attQuery); throw Misc.CreateExceptionAndLog(Log.To.Upgrade, StatusCode.CorruptError, TAG, "Digest key length incorrect ({0})", Convert.ToBase64String(key)); } var blobKey = new BlobKey(key); var att = new NonNullDictionary <string, object> { { "type", mimeType }, { "digest", blobKey.Base64Digest() }, { "length", length }, { "revpos", revpos }, { "follows", true }, { "encoding", encoding != 0 ? "gzip" : null }, { "encoded_length", encoding != 0 ? (object)encodedLength : null } }; attachments[name] = att; } raw.sqlite3_finalize(attQuery); if (err != raw.SQLITE_DONE) { throw Misc.CreateExceptionAndLog(Log.To.Upgrade, SqliteErrToStatus(err).Code, TAG, "Failed to finalize attachment query ({0}: {1})", err, raw.sqlite3_errmsg(_sqlite)); } if (attachments.Count > 0) { // Splice attachment JSON into the document JSON: var attJson = Manager.GetObjectMapper().WriteValueAsBytes(new Dictionary <string, object> { { "_attachments", attachments } }); if (json.Count > 2) { json.Insert(json.Count - 1, (byte)','); } json.InsertRange(json.Count - 1, attJson.Skip(1).Take(attJson.Count() - 2)); } }
private bool UploadMultipartRevision(RevisionInternal revision) { MultipartEntity multiPart = null; IDictionary <string, object> revProps = revision.GetProperties(); // TODO: refactor this to IDictionary <string, object> attachments = (IDictionary <string, object>)revProps.Get ("_attachments"); foreach (string attachmentKey in attachments.Keys) { IDictionary <string, object> attachment = (IDictionary <string, object>)attachments .Get(attachmentKey); if (attachment.ContainsKey("follows")) { if (multiPart == null) { multiPart = new MultipartEntity(); try { string json = Manager.GetObjectMapper().WriteValueAsString(revProps); Encoding utf8charset = Sharpen.Extensions.GetEncoding("UTF-8"); multiPart.AddPart("param1", new StringBody(json, "application/json", utf8charset) ); } catch (IOException e) { throw new ArgumentException(e); } } BlobStore blobStore = this.db.GetAttachments(); string base64Digest = (string)attachment.Get("digest"); BlobKey blobKey = new BlobKey(base64Digest); InputStream inputStream = blobStore.BlobStreamForKey(blobKey); if (inputStream == null) { Log.W(Log.TagSync, "Unable to find blob file for blobKey: %s - Skipping upload of multipart revision." , blobKey); multiPart = null; } else { string contentType = null; if (attachment.ContainsKey("content_type")) { contentType = (string)attachment.Get("content_type"); } else { if (attachment.ContainsKey("content-type")) { Log.W(Log.TagSync, "Found attachment that uses content-type" + " field name instead of content_type (see couchbase-lite-android" + " issue #80): %s", attachment); } } multiPart.AddPart(attachmentKey, new InputStreamBody(inputStream, contentType, attachmentKey )); } } } if (multiPart == null) { return(false); } string path = string.Format("/%s?new_edits=false", revision.GetDocId()); Log.D(Log.TagSync, "Uploading multipart request. Revision: %s", revision); AddToChangesCount(1); Log.V(Log.TagSync, "%s | %s: uploadMultipartRevision() calling asyncTaskStarted()" , this, Sharpen.Thread.CurrentThread()); AsyncTaskStarted(); SendAsyncMultipartRequest("PUT", path, multiPart, new _RemoteRequestCompletionBlock_542 (this, revision)); // Server doesn't like multipart, eh? Fall back to JSON. //status 415 = "bad_content_type" return(true); }