public static BlobKey KeyForBlobFromFile(FilePath file) { MessageDigest md; try { md = MessageDigest.GetInstance("SHA-1"); } catch (NoSuchAlgorithmException) { Log.E(Log.TagBlobStore, "Error, SHA-1 digest is unavailable."); return(null); } byte[] sha1hash = new byte[40]; try { FileInputStream fis = new FileInputStream(file); byte[] buffer = new byte[65536]; int lenRead = fis.Read(buffer); while (lenRead > 0) { md.Update(buffer, 0, lenRead); lenRead = fis.Read(buffer); } fis.Close(); } catch (IOException) { Log.E(Log.TagBlobStore, "Error readin tmp file to compute key"); } sha1hash = md.Digest(); BlobKey result = new BlobKey(sha1hash); return(result); }
public virtual int DeleteBlobsExceptWithKeys(IList <BlobKey> keysToKeep) { int numDeleted = 0; FilePath file = new FilePath(path); FilePath[] contents = file.ListFiles(); foreach (FilePath attachment in contents) { BlobKey attachmentKey = new BlobKey(); GetKeyForFilename(attachmentKey, attachment.GetPath()); if (!keysToKeep.Contains(attachmentKey)) { bool result = attachment.Delete(); if (result) { ++numDeleted; } else { Log.E(Log.TagBlobStore, "Error deleting attachment: %s", attachment); } } } return(numDeleted); }
public void StoreBlob(byte[] data, BlobKey outKey) { BlobKey newKey = KeyForBlob(data); outKey.Bytes = newKey.Bytes; string keyPath = PathForKey(outKey); if (File.Exists(keyPath) && ((File.GetAttributes(keyPath) & FileAttributes.Offline) == 0)) { Log.To.Database.V(TAG, "Blob {0} already exists in store, no action needed", newKey.Base64Digest()); return; } var fos = default(FileStream); try { fos = File.Open(keyPath, FileMode.Create); fos.Write(data, 0, data.Length); } catch (FileNotFoundException e) { throw Misc.CreateExceptionAndLog(Log.To.Database, e, StatusCode.AttachmentError, TAG, "Unable to get file for output"); } catch (IOException ioe) { throw Misc.CreateExceptionAndLog(Log.To.Database, ioe, StatusCode.AttachmentError, TAG, "Unable to write to output file"); } finally { if (fos != null) { try { fos.Dispose(); } catch (IOException) { // ignore } } } }
public void TestRekey() { var item = Encoding.UTF8.GetBytes("this is an item"); var key = new BlobKey(); _store.StoreBlob(item, key); var newEncryptionKey = new SymmetricKey(); var addOrChange = _encrypt ? "Changing" : "Adding"; Trace.WriteLine($"---- {addOrChange} key"); _store.ChangeEncryptionKey(newEncryptionKey); var oldEncrypt = _encrypt; _store.EncryptionKey.Should().Be(newEncryptionKey, "because the key should have become the new one"); _store.BlobForKey(key).Should().Equal(item, "because the content should be the same regardless of encryption"); _encrypt = true; TestReopen(); _encrypt = oldEncrypt; if (_encrypt) { Trace.WriteLine("---- Removing key"); _store.ChangeEncryptionKey(null); _store.EncryptionKey.Should().BeNull("because the encryption was removed"); _store.BlobForKey(key).Should().Equal(item, "because the content should be the same regardless of encryption"); _encrypt = false; TestReopen(); } }
public int DeleteBlobsExceptWithKeys(ICollection <BlobKey> keysToKeep) { int numDeleted = 0; FilePath file = new FilePath(_path); FilePath[] contents = file.ListFiles(); foreach (FilePath attachment in contents) { BlobKey attachmentKey = new BlobKey(); if (GetKeyForFilename(attachmentKey, attachment.GetPath()) && !keysToKeep.Contains(attachmentKey)) { bool result = attachment.Delete(); if (result) { ++numDeleted; } else { Log.E(Database.TAG, "Error deleting attachment"); } } } return(numDeleted); }
public Stream BlobStreamForKey(BlobKey key) { var path = PathForKey(key); Log.D(Database.Tag, "Blob Path : " + path); var file = new FilePath(path); if (file.CanRead()) { try { return(new FileStream(file, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite)); } catch (FileNotFoundException e) { Log.E(Database.Tag, "Unexpected file not found in blob store", e); return(null); } catch (Exception e) { Log.E(Database.Tag, "Cannot new FileStream", e); } } return(null); }
public override bool Equals(object o) { if (!(o is BlobKey)) { return(false); } BlobKey oBlobKey = (BlobKey)o; if (Bytes == null || oBlobKey.Bytes == null) { return(false); } if (Bytes.Length != oBlobKey.Bytes.Length) { return(false); } for (int i = 0; i < Bytes.Length; i++) { if (!Bytes[i].Equals(oBlobKey.Bytes[i])) { return(false); } } return(true); }
public bool IsGZipped(BlobKey key) { int magic = 0; string path = PathForKey(key); FilePath file = new FilePath(path); if (file.CanRead()) { try { var raf = new RandomAccessFile(file, "r"); magic = raf.Read() & unchecked ((0xff)) | ((raf.Read() << 8) & unchecked ((0xff00))); raf.Close(); } catch (Exception e) { #if PORTABLE Runtime.PrintStackTrace(e); #else Runtime.PrintStackTrace(e, Console.Error); #endif } } return(magic == GZIPInputStream.GzipMagic); }
public void TestEncryptedAttachments() { manager.RegisterEncryptionKey("letmein", "seekrit"); var seekrit = default(Database); Assert.DoesNotThrow(() => seekrit = manager.GetDatabase("seekrit"), "Failed to create encrypted DB"); // Save a doc with an attachment: var doc = seekrit.GetDocument("att"); var body = Encoding.UTF8.GetBytes("This is a test attachment!"); var rev = doc.CreateRevision(); rev.SetAttachment("att.txt", "text/plain; charset=utf-8", body); var savedRev = rev.Save(); Assert.IsNotNull(savedRev, "Saving doc failed"); // Read the raw attachment file and make sure it's not cleartext: var digest = savedRev.GetProperty("_attachments").AsDictionary <string, object>().Get("att.txt") .AsDictionary <string, object>().GetCast <string>("digest"); Assert.IsNotNull(digest); var attKey = default(BlobKey); Assert.DoesNotThrow(() => attKey = new BlobKey(digest)); var path = seekrit.Attachments.PathForKey(attKey); var raw = File.ReadAllBytes(path); Assert.IsNotNull(raw); Assert.AreNotEqual(raw, body, "Oops, attachment was not encrypted"); seekrit.Dispose(); }
public long GetSizeOfBlob(BlobKey key) { string path = PathForKey(key); var info = new FileInfo(path); return(info.Exists ? info.Length : 0); }
public static BlobKey KeyForBlobFromFile(string file) { MessageDigest md; try { md = MessageDigest.GetInstance("SHA-1"); } catch (NotSupportedException) { Log.To.Database.E(TAG, "Error, SHA-1 digest is unavailable."); return(null); } byte[] sha1hash = new byte[40]; try { using (var fis = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.ReadWrite)) { byte[] buffer = new byte[65536]; int lenRead = fis.Read(buffer, 0, buffer.Length); while (lenRead > 0) { md.Update(buffer, 0, lenRead); lenRead = fis.Read(buffer, 0, buffer.Length); } } } catch (IOException e) { Log.To.Database.E(TAG, "Error reading tmp file to compute key (returning null)", e); return(null); } sha1hash = md.Digest(); BlobKey result = new BlobKey(sha1hash); return(result); }
public static BlobKey KeyForBlobFromFile(FileInfo file) { MessageDigest md; try { md = MessageDigest.GetInstance("SHA-1"); } catch (NoSuchAlgorithmException) { Log.E(Database.TAG, "Error, SHA-1 digest is unavailable."); return(null); } byte[] sha1hash = new byte[40]; try { var fis = new FileStream(file.FullName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); byte[] buffer = new byte[65536]; int lenRead = fis.Read(buffer, 0, buffer.Length); while (lenRead > 0) { md.Update(buffer, 0, lenRead); lenRead = fis.Read(buffer, 0, buffer.Length); } fis.Close(); } catch (IOException) { Log.E(Database.TAG, "Error readin tmp file to compute key"); } sha1hash = md.Digest(); BlobKey result = new BlobKey(sha1hash); return(result); }
public long GetSizeOfBlob(BlobKey key) { string path = PathForKey(key); FilePath file = new FilePath(path); return(file.Length()); }
public override bool Equals(object o) { if (!(o is Couchbase.Lite.BlobKey)) { return(false); } Couchbase.Lite.BlobKey oBlobKey = (Couchbase.Lite.BlobKey)o; return(Arrays.Equals(GetBytes(), oBlobKey.GetBytes())); }
public string PathForKey(BlobKey key) { if (EncryptionKey != null) { return(null); } return(RawPathForKey(key)); }
public byte[] BlobForKey(BlobKey key) { using (var blobStream = BlobStreamForKey(key)) { if (blobStream == null) { return(null); } return(blobStream.ReadAllBytes()); } }
/// <summary>Call this after all the data has been added.</summary> public void Finish() { try { outStream.Flush(); outStream.Dispose(); } catch (IOException e) { Log.To.Database.W(Tag, "Exception closing output stream, continuing...", e); } blobKey = new BlobKey(sha1Digest.Digest()); md5DigestResult = md5Digest.Digest(); }
/// <summary>Call this after all the data has been added.</summary> public void Finish() { try { outStream.Flush(); outStream.Close(); } catch (IOException e) { Log.W(Database.TAG, "Exception closing output stream", e); } blobKey = new BlobKey(sha1Digest.Digest()); md5DigestResult = md5Digest.Digest(); }
public bool GetKeyForFilename(BlobKey outKey, string filename) { if (!filename.EndsWith(FileExtension)) { return(false); } //trim off extension string rest = filename.Substring(_path.Length + 1, filename.Length - FileExtension.Length - (_path.Length + 1)); outKey.Bytes = BlobKey.ConvertFromHex(rest); return(true); }
public bool GetKeyForFilename(BlobKey outKey, string filename) { if (!filename.EndsWith(FileExtension)) { return(false); } //trim off extension string rest = Sharpen.Runtime.Substring(filename, path.Length + 1, filename.Length - FileExtension.Length); outKey.SetBytes(BlobKey.ConvertFromHex(rest)); return(true); }
/// <summary>Call this after all the data has been added.</summary> /// <remarks>Call this after all the data has been added.</remarks> public virtual void Finish() { try { outStream.Close(); } catch (IOException e) { Log.W(Log.TagBlobStore, "Exception closing output stream", e); } blobKey = new BlobKey(sha1Digest.Digest()); md5DigestResult = md5Digest.Digest(); }
/// <exception cref="System.Exception"></exception> public virtual void TestBasicOperation() { BlobStore attachments = database.GetAttachments(); InputStream attachmentStream = GetAsset("attachment.png"); byte[] bytes = IOUtils.ToByteArray(attachmentStream); BlobStoreWriter blobStoreWriter = new BlobStoreWriter(attachments); blobStoreWriter.AppendData(bytes); blobStoreWriter.Finish(); blobStoreWriter.Install(); string sha1DigestKey = blobStoreWriter.SHA1DigestString(); BlobKey keyFromSha1 = new BlobKey(sha1DigestKey); NUnit.Framework.Assert.IsTrue(attachments.GetSizeOfBlob(keyFromSha1) == bytes.Length ); }
private void Verify(BlobKey attKey, byte[] clearText) { var path = _store.RawPathForKey(attKey); var raw = File.ReadAllBytes(path); if (_encrypt) { raw.Should().NotBeNull() .And.Match(x => x.Locate(clearText) == -1, "because encrypted contents should not contain cleartext"); } else { raw.Should().NotBeNull() .And.Equal(clearText, "because the contents should serialize to disk correctly"); } }
public byte[] BlobForKey(BlobKey key) { string path = PathForKey(key); FilePath file = new FilePath(path); byte[] result = null; try { result = GetBytesFromFile(file); } catch (IOException e) { Log.E(Database.Tag, "Error reading file", e); } return(result); }
public override bool Equals(object o) { if (!(o is BlobKey)) { return(false); } BlobKey oBlobKey = (BlobKey)o; if (Bytes == null || oBlobKey.Bytes == null) { return(false); } return(Arrays.Equals(Bytes, oBlobKey.Bytes)); }
public void TestReopen() { var item = Encoding.UTF8.GetBytes("this is an item"); var key = new BlobKey(); _store.StoreBlob(item, key); var store2 = new BlobStore(_storePath, _store.EncryptionKey); var readItem = store2.BlobForKey(key); readItem.Should().Equal(item, "because the contents of a key should be the same in the second store"); readItem = _store.BlobForKey(key); readItem.Should().Equal(item, "because the contents of a key should be the same in the first store"); Verify(key, item); }
public bool StoreBlob(byte[] data, BlobKey outKey) { BlobKey newKey = KeyForBlob(data); outKey.SetBytes(newKey.GetBytes()); string path = PathForKey(outKey); FilePath file = new FilePath(path); if (file.CanRead()) { return(true); } FileOutputStream fos = null; try { fos = new FileOutputStream(file); fos.Write(data); } catch (FileNotFoundException e) { Log.E(Database.Tag, "Error opening file for output", e); return(false); } catch (IOException ioe) { Log.E(Database.Tag, "Error writing to file", ioe); return(false); } finally { if (fos != null) { try { fos.Close(); } catch (IOException) { } } } // ignore return(true); }
public static BlobKey KeyForBlob(byte[] data) { MessageDigest md; try { md = MessageDigest.GetInstance("SHA-1"); } catch (NoSuchAlgorithmException) { Log.E(Database.TAG, "Error, SHA-1 digest is unavailable."); return(null); } byte[] sha1hash = new byte[40]; md.Update(data, 0, data.Length); sha1hash = md.Digest(); BlobKey result = new BlobKey(sha1hash); return(result); }
public static BlobKey KeyForBlob(byte[] data) { MessageDigest md; try { md = MessageDigest.GetInstance("SHA-1"); } catch (NoSuchAlgorithmException) { Log.E(Database.Tag, "Error, SHA-1 digest is unavailable."); return null; } byte[] sha1hash = new byte[40]; md.Update(data, 0, data.Length); sha1hash = md.Digest(); BlobKey result = new BlobKey(sha1hash); return result; }
public ICollection <BlobKey> AllKeys() { ICollection <BlobKey> result = new HashSet <BlobKey>(); FilePath file = new FilePath(path); FilePath[] contents = file.ListFiles(); foreach (FilePath attachment in contents) { if (attachment.IsDirectory()) { continue; } BlobKey attachmentKey = new BlobKey(); GetKeyForFilename(attachmentKey, attachment.GetPath()); result.AddItem(attachmentKey); } return(result); }
public ICollection <BlobKey> AllKeys() { ICollection <BlobKey> result = new HashSet <BlobKey>();; foreach (var attachment in Directory.GetFileSystemEntries(_path)) { if (File.GetAttributes(attachment).HasFlag(FileAttributes.Directory)) { continue; } BlobKey attachmentKey = new BlobKey(); GetKeyForFilename(attachmentKey, attachment); result.Add(attachmentKey); } return(result); }
public void TestBasicOperation() { var attachmentStream = (InputStream)GetAsset("attachment.png"); var memoryStream = new MemoryStream(); attachmentStream.Wrapped.CopyTo(memoryStream); var bytes = memoryStream.ToArray(); var attachments = database.Attachments; var blobStoreWriter = new BlobStoreWriter(attachments); blobStoreWriter.AppendData(bytes); blobStoreWriter.Finish(); blobStoreWriter.Install(); var sha1DigestKey = blobStoreWriter.SHA1DigestString(); Assert.IsTrue(sha1DigestKey.Contains("LmsoqJJ6LOn4YS60pYnvrKbBd64=")); var keyFromSha1 = new BlobKey(sha1DigestKey); Assert.IsTrue(attachments.GetSizeOfBlob(keyFromSha1) == bytes.Length); }
/// <summary>Deletes obsolete attachments from the sqliteDb and blob store.</summary> private Status GarbageCollectAttachments() { // First delete attachment rows for already-cleared revisions: // OPT: Could start after last sequence# we GC'd up to try { StorageEngine.ExecSQL("DELETE FROM attachments WHERE sequence IN (SELECT sequence from revs WHERE json IS null)"); } catch (SQLException e) { Log.E(Tag, "Error deleting attachments", e); } // Now collect all remaining attachment IDs and tell the store to delete all but these: Cursor cursor = null; try { cursor = StorageEngine.RawQuery("SELECT DISTINCT key FROM attachments", CommandBehavior.SequentialAccess); cursor.MoveToNext(); var allKeys = new AList<BlobKey>(); while (!cursor.IsAfterLast()) { var key = new BlobKey(cursor.GetBlob(0)); allKeys.AddItem(key); cursor.MoveToNext(); } var numDeleted = Attachments.DeleteBlobsExceptWithKeys(allKeys); if (numDeleted < 0) { return new Status(StatusCode.InternalServerError); } Log.V(Tag, "Deleted " + numDeleted + " attachments"); return new Status(StatusCode.Ok); } catch (SQLException e) { Log.E(Tag, "Error finding attachment keys in use", e); return new Status(StatusCode.InternalServerError); } finally { if (cursor != null) { cursor.Close(); } } }
internal Attachment GetAttachmentForSequence (long sequence, string filename) { Debug.Assert((sequence > 0)); Debug.Assert((filename != null)); Cursor cursor = null; var args = new [] { Convert.ToString(sequence), filename }; try { cursor = StorageEngine.RawQuery("SELECT key, type FROM attachments WHERE sequence=? AND filename=?", args); if (!cursor.MoveToNext()) { throw new CouchbaseLiteException(StatusCode.NotFound); } var keyData = cursor.GetBlob(0); //TODO add checks on key here? (ios version) var key = new BlobKey(keyData); var contentStream = Attachments.BlobStreamForKey(key); if (contentStream == null) { Log.E(Tag, "Failed to load attachment"); throw new CouchbaseLiteException(StatusCode.InternalServerError); } else { var result = new Attachment(contentStream, cursor.GetString(1)); result.Compressed = Attachments.IsGZipped(key); return result; } } catch (SQLException) { throw new CouchbaseLiteException(StatusCode.InternalServerError); } finally { if (cursor != null) { cursor.Close(); } } }
public AttachmentInternal(string name, IDictionary<string, object> info) : this(name, info.GetCast<string>("content_type")) { Length = info.GetCast<long>("length"); EncodedLength = info.GetCast<long>("encoded_length"); _digest = info.GetCast<string>("digest"); if (_digest != null) { BlobKey = new BlobKey(_digest); } string encodingString = info.GetCast<string>("encoding"); if (encodingString != null) { if (encodingString.Equals("gzip")) { Encoding = AttachmentEncoding.GZIP; } else { throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.BadEncoding, TAG, "Invalid encoding type ({0}) in ctor", encodingString); } } var data = info.Get("data"); if (data != null) { // If there's inline attachment data, decode and store it: if (data is string) { _data = Convert.FromBase64String((string)data); } else { _data = data as IEnumerable<byte>; } if (_data == null) { throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.BadEncoding, TAG, "Invalid data type ({0}) in ctor", data.GetType().Name); } SetPossiblyEncodedLength(_data.LongCount()); } else if (info.GetCast<bool>("stub", false)) { // This item is just a stub; validate and skip it if(info.ContainsKey("revpos")) { var revPos = info.GetCast("revpos", -1); // PouchDB has a bug that generates "revpos":0; allow this (#627) if (revPos < 0) { throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.BadAttachment, TAG, "Invalid revpos ({0}) in ctor", revPos); } RevPos = revPos; } } else if (info.GetCast<bool>("follows", false)) { // I can't handle this myself; my caller will look it up from the digest if (Digest == null) { throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.BadAttachment, TAG, "follows is true, but the attachment digest is null in ctor"); } if(info.ContainsKey("revpos")) { var revPos = info.GetCast("revpos", -1); // PouchDB has a bug that generates "revpos":0; allow this (#627) if (revPos < 0) { throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.BadAttachment, TAG, "Invalid revpos ({0}) in ctor", revPos); } RevPos = revPos; } } else { throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.BadAttachment, TAG, "Neither data nor stub nor follows was specified on the attachment data"); } }
public AttachmentInternal(string name, IDictionary<string, object> info) : this(name, info.GetCast<string>("content_type")) { Length = info.GetCast<long>("length"); EncodedLength = info.GetCast<long>("encoded_length"); _digest = info.GetCast<string>("digest"); if (_digest != null) { BlobKey = new BlobKey(_digest); } string encodingString = info.GetCast<string>("encoding"); if (encodingString != null) { if (encodingString.Equals("gzip")) { Encoding = AttachmentEncoding.GZIP; } else { throw new CouchbaseLiteException(StatusCode.BadEncoding); } } var data = info.Get("data"); if (data != null) { // If there's inline attachment data, decode and store it: if (data is string) { _data = Convert.FromBase64String((string)data); } else { _data = data as IEnumerable<byte>; } if (_data == null) { throw new CouchbaseLiteException(StatusCode.BadEncoding); } SetPossiblyEncodedLength(_data.LongCount()); } else if (info.GetCast<bool>("stub", false)) { // This item is just a stub; validate and skip it if(info.ContainsKey("revpos")) { var revPos = info.GetCast<int>("revpos"); if (revPos <= 0) { throw new CouchbaseLiteException(StatusCode.BadAttachment); } RevPos = revPos; } } else if (info.GetCast<bool>("follows", false)) { // I can't handle this myself; my caller will look it up from the digest if (Digest == null) { throw new CouchbaseLiteException(StatusCode.BadAttachment); } if(info.ContainsKey("revpos")) { var revPos = info.GetCast<int>("revpos"); if (revPos <= 0) { throw new CouchbaseLiteException(StatusCode.BadAttachment); } RevPos = revPos; } } else { throw new CouchbaseLiteException(StatusCode.BadAttachment); } }
internal Uri FileForAttachmentDict(IDictionary<String, Object> attachmentDict) { if (!IsOpen) { Log.W(TAG, "FileForAttachmentDict called on closed database"); return null; } var digest = (string)attachmentDict.Get("digest"); if (digest == null) { return null; } string path = null; var pending = PendingAttachmentsByDigest.Get(digest); if (pending != null) { path = pending.FilePath; } else { // If it's an installed attachment, ask the blob-store for it: var key = new BlobKey(digest); path = Attachments.PathForKey(key); } Uri retVal = null; if (!Uri.TryCreate(path, UriKind.RelativeOrAbsolute, out retVal)) { return null; } return retVal; }
internal bool ProcessAttachmentsForRevision(RevisionInternal rev, IList<string> ancestry) { var revAttachments = rev.GetAttachments(); if (revAttachments == null) { return true; // no-op: no attachments } // Deletions can't have attachments: if (rev.IsDeleted() || revAttachments.Count == 0) { var body = rev.GetProperties(); body.Remove("_attachments"); rev.SetProperties(body); return true; } var prevRevId = ancestry != null && ancestry.Count > 0 ? ancestry[0] : null; int generation = RevisionInternal.GenerationFromRevID(prevRevId) + 1; IDictionary<string, object> parentAttachments = null; return rev.MutateAttachments((name, attachInfo) => { AttachmentInternal attachment = null; try { attachment = new AttachmentInternal(name, attachInfo); } catch(CouchbaseLiteException) { return null; } if(attachment.EncodedContent != null) { // If there's inline attachment data, decode and store it: BlobKey blobKey = new BlobKey(); if(!Attachments.StoreBlob(attachment.EncodedContent.ToArray(), blobKey)) { throw new CouchbaseLiteException( String.Format("Failed to write attachment ' {0}'to disk", name), StatusCode.AttachmentError); } attachment.BlobKey = blobKey; } else if(attachInfo.GetCast<bool>("follows")) { // "follows" means the uploader provided the attachment in a separate MIME part. // This means it's already been registered in _pendingAttachmentsByDigest; // I just need to look it up by its "digest" property and install it into the store: InstallAttachment(attachment); } else if(attachInfo.GetCast<bool>("stub")) { // "stub" on an incoming revision means the attachment is the same as in the parent. if(parentAttachments == null && prevRevId != null) { parentAttachments = GetAttachmentsFromDoc(rev.GetDocId(), prevRevId); if(parentAttachments == null) { if(Attachments.HasBlobForKey(attachment.BlobKey)) { // Parent revision's body isn't known (we are probably pulling a rev along // with its entire history) but it's OK, we have the attachment already return attachInfo; } var ancestorAttachment = FindAttachment(name, attachment.RevPos, rev.GetDocId(), ancestry); if(ancestorAttachment != null) { return ancestorAttachment; } throw new CouchbaseLiteException( String.Format("Unable to find 'stub' attachment {0} in history", name), StatusCode.BadAttachment); } } var parentAttachment = parentAttachments == null ? null : parentAttachments.Get(name).AsDictionary<string, object>(); if(parentAttachment == null) { throw new CouchbaseLiteException( String.Format("Unable to find 'stub' attachment {0} in history", name), StatusCode.BadAttachment); } return parentAttachment; } // Set or validate the revpos: if(attachment.RevPos == 0) { attachment.RevPos = generation; } else if(attachment.RevPos > generation) { throw new CouchbaseLiteException( String.Format("Attachment specifies revision generation {0} but document is only at revision generation {1}", attachment.RevPos, generation), StatusCode.BadAttachment); } Debug.Assert(attachment.IsValid); return attachment.AsStubDictionary(); }); }
public bool GetKeyForFilename(BlobKey outKey, string filename) { if (!filename.EndsWith(FileExtension)) { return false; } //trim off extension string rest = Sharpen.Runtime.Substring(filename, path.Length + 1, filename.Length - FileExtension.Length); outKey.SetBytes(BlobKey.ConvertFromHex(rest)); return true; }
public bool StoreBlobStream(Stream inputStream, out BlobKey outKey) { FilePath tmp = null; try { tmp = FilePath.CreateTempFile(TmpFilePrefix, TmpFileExtension, new FilePath(this.path)); FileOutputStream fos = new FileOutputStream(tmp); byte[] buffer = new byte[65536]; int lenRead = ((InputStream)inputStream).Read(buffer); while (lenRead > 0) { fos.Write(buffer, 0, lenRead); lenRead = ((InputStream)inputStream).Read(buffer); } inputStream.Close(); fos.Close(); } catch (IOException e) { Log.E(Database.Tag, "Error writing blog to tmp file", e); outKey = null; return false; } outKey = KeyForBlobFromFile(tmp); var keyPath = PathForKey(outKey); var file = new FilePath(keyPath); if (file.CanRead()) { // object with this hash already exists, we should delete tmp file and return true tmp.Delete(); } else { // does not exist, we should rename tmp file to this name tmp.RenameTo(file); } return true; }
public byte[] BlobForKey(BlobKey key) { string path = PathForKey(key); FilePath file = new FilePath(path); byte[] result = null; try { result = GetBytesFromFile(file); } catch (IOException e) { Log.E(Database.Tag, "Error reading file", e); } return result; }
internal bool ProcessAttachmentsForRevision(RevisionInternal rev, string prevRevId, Status status) { if (status == null) { status = new Status(); } status.Code = StatusCode.Ok; var revAttachments = rev.GetAttachments(); if (revAttachments == null) { return true; // no-op: no attachments } // Deletions can't have attachments: if (rev.IsDeleted() || revAttachments.Count == 0) { var body = rev.GetProperties(); body.Remove("_attachments"); rev.SetProperties(body); return true; } int generation = RevisionInternal.GenerationFromRevID(prevRevId) + 1; IDictionary<string, object> parentAttachments = null; return rev.MutateAttachments((name, attachInfo) => { AttachmentInternal attachment = null; try { attachment = new AttachmentInternal(name, attachInfo); } catch(CouchbaseLiteException e) { return null; } if(attachment.EncodedContent != null) { // If there's inline attachment data, decode and store it: BlobKey blobKey = new BlobKey(); if(!Attachments.StoreBlob(attachment.EncodedContent.ToArray(), blobKey)) { status.Code = StatusCode.AttachmentError; return null; } attachment.BlobKey = blobKey; } else if(attachInfo.GetCast<bool>("follows")) { // "follows" means the uploader provided the attachment in a separate MIME part. // This means it's already been registered in _pendingAttachmentsByDigest; // I just need to look it up by its "digest" property and install it into the store: InstallAttachment(attachment, attachInfo); } else if(attachInfo.GetCast<bool>("stub")) { // "stub" on an incoming revision means the attachment is the same as in the parent. if(parentAttachments == null && prevRevId != null) { parentAttachments = GetAttachmentsFromDoc(rev.GetDocId(), prevRevId, status); if(parentAttachments == null) { if(status.Code == StatusCode.Ok || status.Code == StatusCode.NotFound) { status.Code = StatusCode.BadAttachment; } return null; } } var parentAttachment = parentAttachments == null ? null : parentAttachments.Get(name).AsDictionary<string, object>(); if(parentAttachment == null) { status.Code = StatusCode.BadAttachment; return null; } return parentAttachment; } // Set or validate the revpos: if(attachment.RevPos == 0) { attachment.RevPos = generation; } else if(attachment.RevPos >= generation) { status.Code = StatusCode.BadAttachment; return null; } Debug.Assert(attachment.IsValid); return attachment.AsStubDictionary(); }); }
public int DeleteBlobsExceptWithKeys(IList<BlobKey> keysToKeep) { int numDeleted = 0; FilePath file = new FilePath(path); FilePath[] contents = file.ListFiles(); foreach (FilePath attachment in contents) { BlobKey attachmentKey = new BlobKey(); GetKeyForFilename(attachmentKey, attachment.GetPath()); if (!keysToKeep.Contains(attachmentKey)) { bool result = attachment.Delete(); if (result) { ++numDeleted; } else { Log.E(Database.Tag, "Error deleting attachmetn"); } } } return numDeleted; }
public ICollection<BlobKey> AllKeys() { ICollection<BlobKey> result = new HashSet<BlobKey>(); FilePath file = new FilePath(path); FilePath[] contents = file.ListFiles(); foreach (FilePath attachment in contents) { if (attachment.IsDirectory()) { continue; } BlobKey attachmentKey = new BlobKey(); GetKeyForFilename(attachmentKey, attachment.GetPath()); result.AddItem(attachmentKey); } return result; }
public bool StoreBlob(byte[] data, BlobKey outKey) { BlobKey newKey = KeyForBlob(data); outKey.SetBytes(newKey.GetBytes()); string path = PathForKey(outKey); FilePath file = new FilePath(path); if (file.CanRead()) { return true; } FileOutputStream fos = null; try { fos = new FileOutputStream(file); fos.Write(data); } catch (FileNotFoundException e) { Log.E(Database.Tag, "Error opening file for output", e); return false; } catch (IOException ioe) { Log.E(Database.Tag, "Error writing to file", ioe); return false; } finally { if (fos != null) { try { fos.Close(); } catch (IOException) { } } } // ignore return true; }
/// <summary>Constructs an "_attachments" dictionary for a revision, to be inserted in its JSON body.</summary> internal IDictionary<String, Object> GetAttachmentsDictForSequenceWithContent(long sequence, DocumentContentOptions contentOptions) { Debug.Assert((sequence > 0)); Cursor cursor = null; var args = new Object[] { sequence }; try { cursor = StorageEngine.RawQuery("SELECT filename, key, type, length, revpos FROM attachments WHERE sequence=?", CommandBehavior.SequentialAccess, args); if (!cursor.MoveToNext()) { return null; } var result = new Dictionary<String, Object>(); while (!cursor.IsAfterLast()) { var dataSuppressed = false; var filename = cursor.GetString(0); var keyData = cursor.GetBlob(1); var contentType = cursor.GetString(2); var length = cursor.GetInt(3); var revpos = cursor.GetInt(4); var key = new BlobKey(keyData); var digestString = "sha1-" + Convert.ToBase64String(keyData); var dataBase64 = (string) null; if (contentOptions.HasFlag(DocumentContentOptions.IncludeAttachments)) { if (contentOptions.HasFlag(DocumentContentOptions.BigAttachmentsFollow) && length >= Database.BigAttachmentLength) { dataSuppressed = true; } else { byte[] data = Attachments.BlobForKey(key); if (data != null) { dataBase64 = Convert.ToBase64String(data); } else { // <-- very expensive Log.W(Tag, "Error loading attachment"); } } } var attachment = new Dictionary<string, object>(); if (!(dataBase64 != null || dataSuppressed)) { attachment["stub"] = true; } if (dataBase64 != null) { attachment["data"] = dataBase64; } if (dataSuppressed) { attachment.Put ("follows", true); } attachment["digest"] = digestString; attachment["content_type"] = contentType; attachment["length"] = length; attachment["revpos"] = revpos; result[filename] = attachment; cursor.MoveToNext(); } return result; } catch (SQLException e) { Log.E(Tag, "Error getting attachments for sequence", e); return null; } finally { if (cursor != null) { cursor.Close(); } } }
/// <summary>Call this after all the data has been added.</summary> /// <remarks>Call this after all the data has been added.</remarks> public virtual void Finish() { try { outStream.Close(); } catch (IOException e) { Log.W(Database.Tag, "Exception closing output stream", e); } blobKey = new BlobKey(sha1Digest.Digest()); md5DigestResult = md5Digest.Digest(); }
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> internal void InsertAttachmentForSequenceWithNameAndType(long sequence, string name, string contentType, int revpos, BlobKey key) { try { var args = new ContentValues(); // TODO: Create Add override and refactor to use initializer syntax. args["sequence"] = sequence; args["filename"] = name; if (key != null) { args.Put("key", key.GetBytes()); args.Put("length", Attachments.GetSizeOfBlob(key)); } args["type"] = contentType; args["revpos"] = revpos; var result = StorageEngine.Insert("attachments", null, args); if (result == -1) { var msg = "Insert attachment failed (returned -1)"; Log.E(Tag, msg); throw new CouchbaseLiteException(msg, StatusCode.InternalServerError); } } catch (SQLException e) { Log.E(Tag, "Error inserting attachment", e); throw new CouchbaseLiteException(StatusCode.InternalServerError); } }
public Stream BlobStreamForKey(BlobKey key) { var path = PathForKey(key); Log.D(Database.Tag, "Blob Path : " + path); var file = new FilePath(path); if (file.CanRead()) { try { return new FileStream(file, FileMode.Open, FileAccess.ReadWrite, FileShare.ReadWrite); } catch (FileNotFoundException e) { Log.E(Database.Tag, "Unexpected file not found in blob store", e); return null; } catch (Exception e) { Log.E(Database.Tag, "Cannot new FileStream", e); } } return null; }
internal Uri FileForAttachmentDict(IDictionary<String, Object> attachmentDict) { var digest = (string)attachmentDict.Get("digest"); if (digest == null) { return null; } string path = null; var pending = PendingAttachmentsByDigest.Get(digest); if (pending != null) { path = pending.FilePath; } else { // If it's an installed attachment, ask the blob-store for it: var key = new BlobKey(digest); path = Attachments.PathForKey(key); } Uri retval = null; try { retval = new FilePath(path).ToURI().ToURL(); } catch (UriFormatException) { } //NOOP: retval will be null return retval; }
public IDictionary<string, object> GetAttachmentsDictForSequenceWithContent(long sequence, EnumSet<Database.TDContentOptions> contentOptions) { System.Diagnostics.Debug.Assert((sequence > 0)); Cursor cursor = null; string[] args = new string[] { System.Convert.ToString(sequence) }; try { cursor = database.RawQuery("SELECT filename, key, type, length, revpos FROM attachments WHERE sequence=?" , args); if (!cursor.MoveToNext()) { return null; } IDictionary<string, object> result = new Dictionary<string, object>(); while (!cursor.IsAfterLast()) { bool dataSuppressed = false; int length = cursor.GetInt(3); byte[] keyData = cursor.GetBlob(1); BlobKey key = new BlobKey(keyData); string digestString = "sha1-" + Base64.EncodeBytes(keyData); string dataBase64 = null; if (contentOptions.Contains(Database.TDContentOptions.TDIncludeAttachments)) { if (contentOptions.Contains(Database.TDContentOptions.TDBigAttachmentsFollow) && length >= Database.kBigAttachmentLength) { dataSuppressed = true; } else { byte[] data = attachments.BlobForKey(key); if (data != null) { dataBase64 = Base64.EncodeBytes(data); } else { // <-- very expensive Log.W(Database.Tag, "Error loading attachment"); } } } IDictionary<string, object> attachment = new Dictionary<string, object>(); if (dataBase64 == null || dataSuppressed == true) { attachment.Put("stub", true); } if (dataBase64 != null) { attachment.Put("data", dataBase64); } if (dataSuppressed == true) { attachment.Put("follows", true); } attachment.Put("digest", digestString); string contentType = cursor.GetString(2); attachment.Put("content_type", contentType); attachment.Put("length", length); attachment.Put("revpos", cursor.GetInt(4)); string filename = cursor.GetString(0); result.Put(filename, attachment); cursor.MoveToNext(); } return result; } catch (SQLException e) { Log.E(Database.Tag, "Error getting attachments for sequence", e); return null; } finally { if (cursor != null) { cursor.Close(); } } }
/// <summary> /// Given a revision, read its _attachments dictionary (if any), convert each attachment to a /// AttachmentInternal object, and return a dictionary mapping names->CBL_Attachments. /// </summary> /// <remarks> /// Given a revision, read its _attachments dictionary (if any), convert each attachment to a /// AttachmentInternal object, and return a dictionary mapping names->CBL_Attachments. /// </remarks> /// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception> internal IDictionary<String, AttachmentInternal> GetAttachmentsFromRevision(RevisionInternal rev) { var revAttachments = rev.GetPropertyForKey("_attachments").AsDictionary<string, object>(); if (revAttachments == null || revAttachments.Count == 0 || rev.IsDeleted()) { return new Dictionary<string, AttachmentInternal>(); } var attachments = new Dictionary<string, AttachmentInternal>(); foreach (var name in revAttachments.Keys) { var attachInfo = revAttachments.Get(name).AsDictionary<string, object>(); var contentType = (string)attachInfo.Get("content_type"); var attachment = new AttachmentInternal(name, contentType); var newContentBase64 = (string)attachInfo.Get("data"); if (newContentBase64 != null) { // If there's inline attachment data, decode and store it: byte[] newContents; try { newContents = StringUtils.ConvertFromUnpaddedBase64String (newContentBase64); } catch (IOException e) { throw new CouchbaseLiteException(e, StatusCode.BadEncoding); } attachment.Length = newContents.Length; var outBlobKey = new BlobKey(); var storedBlob = Attachments.StoreBlob(newContents, outBlobKey); attachment.BlobKey = outBlobKey; if (!storedBlob) { throw new CouchbaseLiteException(StatusCode.AttachmentError); } } else { if (attachInfo.ContainsKey("follows") && ((bool)attachInfo.Get("follows"))) { // "follows" means the uploader provided the attachment in a separate MIME part. // This means it's already been registered in _pendingAttachmentsByDigest; // I just need to look it up by its "digest" property and install it into the store: InstallAttachment(attachment, attachInfo); } else { // This item is just a stub; validate and skip it if (((bool)attachInfo.Get("stub")) == false) { throw new CouchbaseLiteException("Expected this attachment to be a stub", StatusCode. BadAttachment); } var revPos = Convert.ToInt64(attachInfo.Get("revpos")); if (revPos <= 0) { throw new CouchbaseLiteException("Invalid revpos: " + revPos, StatusCode.BadAttachment); } continue; } } // Handle encoded attachment: string encodingStr = (string)attachInfo.Get("encoding"); if (encodingStr != null && encodingStr.Length > 0) { if (Runtime.EqualsIgnoreCase(encodingStr, "gzip")) { attachment.Encoding = AttachmentEncoding.GZIP; } else { throw new CouchbaseLiteException("Unnkown encoding: " + encodingStr, StatusCode.BadEncoding ); } attachment.EncodedLength = attachment.Length; if (attachInfo.ContainsKey("length")) { attachment.Length = attachInfo.GetCast<long>("length"); } } if (attachInfo.ContainsKey("revpos")) { var revpos = Convert.ToInt32(attachInfo.Get("revpos")); attachment.RevPos = revpos; } attachments[name] = attachment; } return attachments; }
private bool UploadMultipartRevision(RevisionInternal revision) { MultipartContent multiPart = null; var revProps = revision.GetProperties(); var attachments = revProps.Get("_attachments").AsDictionary<string,object>(); foreach (var attachmentKey in attachments.Keys) { var attachment = attachments.Get(attachmentKey).AsDictionary<string,object>(); if (attachment.ContainsKey("follows")) { if (multiPart == null) { multiPart = new MultipartContent("related"); try { var json = Manager.GetObjectMapper().WriteValueAsString(revProps); var utf8charset = Encoding.UTF8; //multiPart.Add(new StringContent(json, utf8charset, "application/json"), "param1"); var jsonContent = new StringContent(json, utf8charset, "application/json"); //jsonContent.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment"); multiPart.Add(jsonContent); } catch (IOException e) { throw new ArgumentException("Not able to serialize revision properties into a multipart request content.", e); } } var blobStore = LocalDatabase.Attachments; var base64Digest = (string)attachment.Get("digest"); var blobKey = new BlobKey(base64Digest); var inputStream = blobStore.BlobStreamForKey(blobKey); if (inputStream == null) { Log.W(TAG, "Unable to find blob file for blobKey: " + blobKey + " - Skipping upload of multipart revision."); multiPart = null; } else { string contentType = null; if (attachment.ContainsKey("content_type")) { contentType = (string)attachment.Get("content_type"); } else { if (attachment.ContainsKey("content-type")) { var message = string.Format("Found attachment that uses content-type" + " field name instead of content_type (see couchbase-lite-android" + " issue #80): " + attachment); Log.W(TAG, message); } } var content = new StreamContent(inputStream); content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = attachmentKey }; content.Headers.ContentType = new MediaTypeHeaderValue(contentType ?? "application/octet-stream"); multiPart.Add(content); } } } if (multiPart == null) { return false; } var path = string.Format("/{0}?new_edits=false", revision.GetDocId()); // TODO: need to throttle these requests Log.D(TAG, "Uploading multipart request. Revision: " + revision); SafeAddToChangesCount(1); SendAsyncMultipartRequest(HttpMethod.Put, path, multiPart, (result, e) => { if (e != null) { var httpError = e as HttpResponseException; if (httpError != null) { if (httpError.StatusCode == System.Net.HttpStatusCode.UnsupportedMediaType) { _dontSendMultipart = true; UploadJsonRevision(revision); } } else { Log.E (TAG, "Exception uploading multipart request", e); LastError = e; RevisionFailed(); } } else { Log.D (TAG, "Uploaded multipart request. Result: " + result); SafeIncrementCompletedChangesCount(); RemovePending(revision); } }); return true; }
public bool IsGZipped(BlobKey key) { var magic = 0; var path = PathForKey(key); var file = new FilePath(path); if (file.CanRead()) { try { var raf = new RandomAccessFile(file, "r"); magic = raf.Read() & unchecked((0xff)) | ((raf.Read() << 8) & unchecked((0xff00))); raf.Close(); } catch (Exception e) { Runtime.PrintStackTrace(e, Console.Error); } } return magic == GZIPInputStream.GzipMagic; }
internal IDictionary<string, AttachmentInternal> GetAttachmentsFromRevision(RevisionInternal rev) { IDictionary<string, object> revAttachments = (IDictionary<string, object>)rev.GetPropertyForKey ("_attachments"); if (revAttachments == null || revAttachments.Count == 0 || rev.IsDeleted()) { return new Dictionary<string, AttachmentInternal>(); } IDictionary<string, AttachmentInternal> attachments = new Dictionary<string, AttachmentInternal >(); foreach (string name in revAttachments.Keys) { IDictionary<string, object> attachInfo = (IDictionary<string, object>)revAttachments .Get(name); string contentType = (string)attachInfo.Get("content_type"); AttachmentInternal attachment = new AttachmentInternal(name, contentType); string newContentBase64 = (string)attachInfo.Get("data"); if (newContentBase64 != null) { // If there's inline attachment data, decode and store it: byte[] newContents; try { newContents = Base64.Decode(newContentBase64); } catch (IOException e) { throw new CouchbaseLiteException(e, Status.BadEncoding); } attachment.SetLength(newContents.Length); BlobKey outBlobKey = new BlobKey(); bool storedBlob = GetAttachments().StoreBlob(newContents, outBlobKey); attachment.SetBlobKey(outBlobKey); if (!storedBlob) { throw new CouchbaseLiteException(Status.StatusAttachmentError); } } else { if (attachInfo.ContainsKey("follows") && ((bool)attachInfo.Get("follows")) == true) { // "follows" means the uploader provided the attachment in a separate MIME part. // This means it's already been registered in _pendingAttachmentsByDigest; // I just need to look it up by its "digest" property and install it into the store: InstallAttachment(attachment, attachInfo); } else { // This item is just a stub; validate and skip it if (((bool)attachInfo.Get("stub")) == false) { throw new CouchbaseLiteException("Expected this attachment to be a stub", Status. BadAttachment); } int revPos = ((int)attachInfo.Get("revpos")); if (revPos <= 0) { throw new CouchbaseLiteException("Invalid revpos: " + revPos, Status.BadAttachment ); } continue; } } // Handle encoded attachment: string encodingStr = (string)attachInfo.Get("encoding"); if (encodingStr != null && encodingStr.Length > 0) { if (Sharpen.Runtime.EqualsIgnoreCase(encodingStr, "gzip")) { attachment.SetEncoding(AttachmentInternal.AttachmentEncoding.AttachmentEncodingGZIP ); } else { throw new CouchbaseLiteException("Unnkown encoding: " + encodingStr, Status.BadEncoding ); } attachment.SetEncodedLength(attachment.GetLength()); if (attachInfo.ContainsKey("length")) { Number attachmentLength = (Number)attachInfo.Get("length"); attachment.SetLength(attachmentLength); } } if (attachInfo.ContainsKey("revpos")) { attachment.SetRevpos((int)attachInfo.Get("revpos")); } else { attachment.SetRevpos(1); } attachments.Put(name, attachment); } return attachments; }
public string PathForKey(BlobKey key) { return path + FilePath.separator + BlobKey.ConvertToHex(key.GetBytes()) + FileExtension; }
public static BlobKey KeyForBlobFromFile(FileInfo file) { MessageDigest md; try { md = MessageDigest.GetInstance("SHA-1"); } catch (NoSuchAlgorithmException) { Log.E(Database.Tag, "Error, SHA-1 digest is unavailable."); return null; } byte[] sha1hash = new byte[40]; try { var fis = new FileInputStream(file); byte[] buffer = new byte[65536]; int lenRead = fis.Read(buffer); while (lenRead > 0) { md.Update(buffer, 0, lenRead); lenRead = fis.Read(buffer); } fis.Close(); } catch (IOException) { Log.E(Database.Tag, "Error readin tmp file to compute key"); } sha1hash = md.Digest(); BlobKey result = new BlobKey(sha1hash); return result; }
public void TestStreamAttachmentBlobStoreWriter() { var attachments = database.Attachments; var blobWriter = new BlobStoreWriter(attachments); var testBlob = "foo"; blobWriter.AppendData(Encoding.UTF8.GetBytes(testBlob)); blobWriter.Finish(); var sha1Base64Digest = "sha1-C+7Hteo/D9vJXQ3UfzxbwnXaijM="; Assert.AreEqual(blobWriter.SHA1DigestString(), sha1Base64Digest); // install it blobWriter.Install(); // look it up in blob store and make sure it's there var blobKey = new BlobKey(sha1Base64Digest); var blob = attachments.BlobForKey(blobKey); Assert.IsTrue(Arrays.Equals(Encoding.UTF8.GetBytes(testBlob).ToArray(), blob)); }
public long GetSizeOfBlob(BlobKey key) { string path = PathForKey(key); FilePath file = new FilePath(path); return file.Length(); }