public void StoreBlob(byte[] data, BlobKey outKey) { BlobKey newKey = KeyForBlob(data); outKey.Bytes = newKey.Bytes; string keyPath = PathForKey(outKey); if (File.Exists(keyPath) && ((File.GetAttributes(keyPath) & FileAttributes.Offline) == 0)) { Log.To.Database.V(TAG, "Blob {0} already exists in store, no action needed", newKey.Base64Digest()); return; } var fos = default(FileStream); try { fos = File.Open(keyPath, FileMode.Create); fos.Write(data, 0, data.Length); } catch (FileNotFoundException e) { throw Misc.CreateExceptionAndLog(Log.To.Database, e, StatusCode.AttachmentError, TAG, "Unable to get file for output"); } catch (IOException ioe) { throw Misc.CreateExceptionAndLog(Log.To.Database, ioe, StatusCode.AttachmentError, TAG, "Unable to write to output file"); } finally { if (fos != null) { try { fos.Dispose(); } catch (IOException) { // ignore } } } }
/// <summary> /// Runs the <see cref="Couchbase.Lite.Query"/> and returns an enumerator over the result rows. /// </summary> /// <exception cref="T:Couchbase.Lite.CouchbaseLiteException"> /// Thrown if an issue occurs while executing the <see cref="Couchbase.Lite.Query"/>. /// </exception> public virtual QueryEnumerator Run() { Log.To.Query.I(TAG, "{0} running...", this); Database.Open(); ValueTypePtr <long> outSequence = 0; var viewName = (View != null) ? View.Name : null; var queryOptions = QueryOptions; IEnumerable <QueryRow> rows = null; var success = Database.RunInTransaction(() => { rows = Database.QueryViewNamed(viewName, queryOptions, 0, outSequence); LastSequence = outSequence; return(true); }); if (!success) { throw Misc.CreateExceptionAndLog(Log.To.Query, StatusCode.DbError, TAG, "Failed to query view named {0}", viewName); } return(new QueryEnumerator(Database, rows, outSequence)); }
private Tuple <string, string> GetDbNameAndExtFromZip(Stream compressedStream) { string dbName = null; string extension = null; using (var zipStream = new ZipInputStream(compressedStream) { IsStreamOwner = false }) { ZipEntry next; while ((next = zipStream.GetNextEntry()) != null) { var fileInfo = next.IsDirectory ? (FileSystemInfo) new DirectoryInfo(next.Name) : (FileSystemInfo) new FileInfo(next.Name); if (ContainsExtension(fileInfo.Name)) { dbName = Path.GetFileNameWithoutExtension(fileInfo.Name); extension = Path.GetExtension(fileInfo.Name); break; } } } if (dbName == null) { throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.BadParam, TAG, "No database found in zip file"); } compressedStream.Seek(0, SeekOrigin.Begin); return(Tuple.Create(dbName, extension)); }
internal void Read(Stream inputStream) { byte[] buffer = new byte[16384]; int len; length = 0; try { while ((len = inputStream.Read(buffer, 0, buffer.Length)) > 0) { var dataToWrite = buffer; outStream.Write(dataToWrite, 0, len); sha1Digest.Update(buffer, 0, len); md5Digest.Update(buffer, 0, len); length += len; } } catch (IOException e) { throw Misc.CreateExceptionAndLog(Log.To.Database, e, Tag, "Unable to read from stream"); } finally { try { inputStream.Close(); } catch (IOException e) { Log.To.Database.W(Tag, "Exception closing input stream, continuing...", e); } } }
public bool Read() { try { return(_textReader?.Read() == true); } catch (IOException) { Log.To.NoDomain.I(Tag, "Remote endpoint hung up, returning false for Read()"); return(false); } catch (NullReferenceException) { // For some reason disposing the reader while it is blocked on a read will cause // an NRE Log.To.NoDomain.I(Tag, "Streaming read cancelled, returning false for Read()..."); return(false); } catch (Exception e) { if (e is JsonReaderException) { throw Misc.CreateExceptionAndLog(Log.To.NoDomain, StatusCode.BadJson, TAG, "Error reading from streaming parser"); } var se = e.InnerException as SocketException; if (se?.SocketErrorCode == SocketError.Interrupted) { Log.To.NoDomain.I(Tag, "Streaming read cancelled, returning false for Read()..."); return(false); } throw Misc.CreateExceptionAndLog(Log.To.NoDomain, e, TAG, "Error reading from streaming parser"); } }
public T DeserializeObject <T>(string json) { T item; try { item = JsonConvert.DeserializeObject <T>(json, settings); } catch (JsonException e) { throw Misc.CreateExceptionAndLog(Log.To.NoDomain, e, StatusCode.BadJson, TAG, "Error deserializing json ({0})", new SecureLogString(json, LogMessageSensitivity.PotentiallyInsecure)); } return(item); }
public bool Read() { try { return(_textReader != null && _textReader.Read()); } catch (Exception e) { if (e is JsonReaderException) { throw Misc.CreateExceptionAndLog(Log.To.NoDomain, StatusCode.BadJson, TAG, "Error reading from streaming parser"); } throw Misc.CreateExceptionAndLog(Log.To.NoDomain, e, TAG, "Error reading from streaming parser"); } }
/// <summary> /// Goes through an _attachments dictionary and replaces any values that are Attachment objects /// with proper JSON metadata dicts. /// </summary> /// <remarks> /// Goes through an _attachments dictionary and replaces any values that are Attachment objects /// with proper JSON metadata dicts. It registers the attachment bodies with the blob store and sets /// the metadata 'digest' and 'follows' properties accordingly. /// </remarks> internal static IDictionary <string, object> InstallAttachmentBodies(IDictionary <String, Object> attachments, Database database) { var updatedAttachments = new Dictionary <string, object>(); foreach (string name in attachments.Keys) { object value; attachments.TryGetValue(name, out value); if (value is Attachment) { var attachment = (Attachment)value; var metadataMutable = new AttachmentMetadataDictionary(attachment.Metadata); var body = attachment.Body; if (body != null) { // Copy attachment body into the database's blob store: var writer = BlobStoreWriterForBody(body, database); metadataMutable[AttachmentMetadataDictionaryKeys.Length] = (long)writer.GetLength(); metadataMutable[AttachmentMetadataDictionaryKeys.Digest] = writer.SHA1DigestString(); metadataMutable[AttachmentMetadataDictionaryKeys.Follows] = true; var errMsg = metadataMutable.Validate(); if (errMsg != null) { throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.BadAttachment, Tag, "Error installing attachment body ({0})", errMsg); } database.RememberAttachmentWriter(writer, writer.SHA1DigestString()); } attachment.Dispose(); updatedAttachments[name] = metadataMutable; } else if (value is AttachmentInternal) { throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.BadParam, Tag, "AttachmentInternal objects not expected here. Could indicate a bug"); } else { if (value != null) { updatedAttachments[name] = value; } } } return(updatedAttachments); }
/// <summary>Appends data to the blob.</summary> /// <remarks>Appends data to the blob. Call this when new data is available.</remarks> public void AppendData(IEnumerable <Byte> data) { var dataVector = data.ToArray(); length += dataVector.LongLength; sha1Digest.Update(dataVector); md5Digest.Update(dataVector); try { outStream.Write(dataVector, 0, dataVector.Length); } catch (IOException e) { throw Misc.CreateExceptionAndLog(Log.To.Database, e, Tag, "Unable to write to stream"); } }
// Attempt to deserialize /json private void LazyLoadObjectFromJson() { if (_json == null) { Log.To.NoDomain.E(Tag, "Both json and object are null for this body, throwing..."); throw new InvalidOperationException("Attempt to lazy load from a body with no data"); } try { _jsonObject = Manager.GetObjectMapper().ReadValue <IDictionary <string, object> >(_json); } catch (Exception e) { throw Misc.CreateExceptionAndLog(Log.To.NoDomain, e, Tag, "Error deserializing {0}", this); } }
public T DeserializeNextObject <T>() { if (_textReader == null) { Log.To.Sync.W(TAG, "DeserializeNextObject is only valid after a call to StartIncrementalParse, " + "returning null"); return(default(T)); } try { return(JToken.ReadFrom(_textReader).ToObject <T>()); } catch (Exception e) { throw Misc.CreateExceptionAndLog(Log.To.NoDomain, e, TAG, "Error deserializing from streaming parser"); } }
// Attempt to serialize _jsonObject private void LazyLoadJsonFromObject() { if (_jsonObject == null) { Log.To.NoDomain.E(Tag, "Both json and object are null for this body, throwing..."); throw new InvalidOperationException("Attempt to lazy load from a body with no data"); } try { _json = Manager.GetObjectMapper().WriteValueAsBytes(_jsonObject).ToArray(); } catch (Exception e) { throw Misc.CreateExceptionAndLog(Log.To.NoDomain, e, Tag, "Error writing body as pretty JSON"); } }
public T Deserialize <T>(Stream json) { using (var sr = new StreamReader(json)) using (var jsonReader = new JsonTextReader(sr)) { var serializer = JsonSerializer.Create(settings); T item; try { item = serializer.Deserialize <T>(jsonReader); } catch (JsonException e) { throw Misc.CreateExceptionAndLog(Log.To.NoDomain, e, TAG, "Error deserializing json from stream"); } return(item); } }
/// <summary> /// Deletes a database that can no longer be opened by other means (lost password, etc). /// Note: will only work for databases created with version 1.2 or later. /// </summary> /// <param name="name">The name of the database to delete</param> public void DeleteDatabase(string name) { var info = new DirectoryInfo(Directory); var folder = info.GetDirectories($"{name}.cblite2").FirstOrDefault(); if (folder == null) { Log.To.Database.I(TAG, "{0} does not exist and cannot be deleted, returning...", name); return; } try { folder.Delete(true); } catch (Exception e) { throw Misc.CreateExceptionAndLog(Log.To.Database, e, TAG, "Error deleting {0}", name); } }
/// <summary> /// Sets the attachment with the given name. /// </summary> /// <remarks> /// Sets the <see cref="Couchbase.Lite.Attachment"/> with the given name. /// The <see cref="Couchbase.Lite.Attachment"/> data will be written to /// the <see cref="Couchbase.Lite.Database"/> when the /// <see cref="Couchbase.Lite.Revision"/> is saved. /// </remarks> /// <param name="name">The name of the <see cref="Couchbase.Lite.Attachment"/> to set.</param> /// <param name="contentType">The content-type of the <see cref="Couchbase.Lite.Attachment"/>.</param> /// <param name="contentUrl">The URL of the <see cref="Couchbase.Lite.Attachment"/> content.</param> public void SetAttachment(String name, String contentType, Uri contentUrl) { try { byte[] inputBytes = null; var request = WebRequest.Create(contentUrl); using (var response = request.GetResponse()) using (var inputStream = response.GetResponseStream()) { var length = inputStream.Length; inputBytes = inputStream.ReadAllBytes(); } SetAttachment(name, contentType, inputBytes); } catch (IOException e) { throw Misc.CreateExceptionAndLog(Log.To.Database, e, Tag, "Error opening stream for url: {0}", contentUrl); } }
public void ReplaceDatabase(string name, Stream databaseStream, IDictionary <string, Stream> attachmentStreams) { if (name == null) { Log.To.Database.E(TAG, "name cannot be null in ReplaceDatabase, throwing..."); throw new ArgumentNullException("name"); } if (databaseStream == null) { Log.To.Database.E(TAG, "databaseStream cannot be null in ReplaceDatabase, throwing..."); throw new ArgumentNullException("databaseStream"); } try { var tempPath = Path.Combine(Path.GetTempPath(), name + "-upgrade"); if (System.IO.Directory.Exists(tempPath)) { System.IO.Directory.Delete(tempPath, true); } System.IO.Directory.CreateDirectory(tempPath); var fileStream = File.OpenWrite(Path.Combine(tempPath, name + DatabaseSuffixv1)); databaseStream.CopyTo(fileStream); fileStream.Dispose(); var success = UpgradeDatabase(new FileInfo(Path.Combine(tempPath, name + DatabaseSuffixv1))); if (!success) { Log.To.Upgrade.W(TAG, "Unable to replace database (upgrade failed)"); System.IO.Directory.Delete(tempPath, true); return; } System.IO.Directory.Delete(tempPath, true); var db = GetDatabase(name, true); var attachmentsPath = db.AttachmentStorePath; if (attachmentStreams != null) { StreamUtils.CopyStreamsToFolder(attachmentStreams, attachmentsPath); } } catch (Exception e) { throw Misc.CreateExceptionAndLog(Log.To.Database, e, TAG, "Error replacing database"); } }
/// <summary> /// Returns a serialized JSON byte enumerable object containing the properties /// of this object in human readable form. /// </summary> /// <returns>JSON bytes</returns> public IEnumerable <Byte> AsPrettyJson() { object properties = AsObject(); if (properties != null) { ObjectWriter writer = Manager.GetObjectMapper().WriterWithDefaultPrettyPrinter(); try { _json = writer.WriteValueAsBytes(properties).ToArray(); } catch (CouchbaseLiteException) { Log.To.NoDomain.E(Tag, "Error writing body as pretty JSON, rethrowing..."); } catch (Exception e) { throw Misc.CreateExceptionAndLog(Log.To.NoDomain, e, Tag, "Error writing body as pretty JSON"); } } return(AsJson()); }
public string TempDir() { var path = Path.Combine(_path, "temp_attachments"); try { Directory.CreateDirectory(path); } catch (Exception e) { throw Misc.CreateExceptionAndLog(Log.To.Database, e, TAG, "Unable to create directory for: {0}", path); } if (!Directory.Exists(path)) { throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.DbError, TAG, "Unable to create directory for: {0}", path); } Log.To.Database.I(TAG, "{0} created temporary directory {1}", this, path); return(path); }
public T Deserialize <T>(Stream json) { var sr = new StreamReader(json); // Don't dispose, we don't want to close the stream using (var jsonReader = new JsonTextReader(sr) { CloseInput = false }) { var serializer = JsonSerializer.Create(settings); T item; try { item = serializer.Deserialize <T>(jsonReader); } catch (JsonException e) { throw Misc.CreateExceptionAndLog(Log.To.NoDomain, e, StatusCode.BadJson, TAG, "Error deserializing json from stream"); } return(item); } }
public bool Read() { try { return(_textReader != null && _textReader.Read()); } catch (Exception e) { if (e is JsonReaderException) { throw Misc.CreateExceptionAndLog(Log.To.NoDomain, StatusCode.BadJson, TAG, "Error reading from streaming parser"); } var se = e.InnerException as SocketException; if (se?.SocketErrorCode == SocketError.Interrupted) { Log.To.NoDomain.I(Tag, "Streaming read cancelled, returning false for Read()..."); return(false); } throw Misc.CreateExceptionAndLog(Log.To.NoDomain, e, TAG, "Error reading from streaming parser"); } }
internal void MarkEncrypted(bool encrypted) { var encMarkerPath = Path.Combine(_path, ENCRYPTION_MARKER_FILENAME); if (encrypted) { try { File.WriteAllText(encMarkerPath, ENCRYPTION_ALGORITHM); } catch (Exception e) { throw Misc.CreateExceptionAndLog(Log.To.Database, e, TAG, "Error enabling attachment encryption"); } } else { try { File.Delete(encMarkerPath); } catch (Exception e) { throw Misc.CreateExceptionAndLog(Log.To.Database, e, TAG, "Error disabling attachment encryption"); } } }
public BlobStoreWriter(BlobStore store) { this.store = store; try { sha1Digest = MessageDigest.GetInstance("SHA-1"); sha1Digest.Reset(); md5Digest = MessageDigest.GetInstance("MD5"); md5Digest.Reset(); } catch (NotSupportedException e) { throw Misc.CreateExceptionAndLog(Log.To.Database, e, Tag, "Could not get an instance of SHA-1 or MD5 for BlobStoreWriter."); } try { OpenTempFile(); } catch (FileNotFoundException e) { throw Misc.CreateExceptionAndLog(Log.To.Database, e, Tag, "Unable to open temporary file for BlobStoreWriter."); } }
private void VerifyExistingStore() { var markerPath = Path.Combine(_path, ENCRYPTION_MARKER_FILENAME); var fileExists = File.Exists(markerPath); var encryptionAlg = default(string); try { encryptionAlg = fileExists ? File.ReadAllText(markerPath) : null; } catch (Exception e) { throw Misc.CreateExceptionAndLog(Log.To.Database, e, TAG, "Error verifying blob store"); } if (encryptionAlg != null) { // "_encryption" file is present, so make sure we support its format & have a key: if (EncryptionKey == null) { throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.Unauthorized, TAG, "Opening encrypted blob-store without providing a key"); } else if (ENCRYPTION_ALGORITHM != encryptionAlg) { throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.Unauthorized, TAG, "Blob-store uses unrecognized encryption '{0}'", encryptionAlg); } } else if (!fileExists) { // No "_encryption" file was found, so on-disk store isn't encrypted: var encryptionKey = EncryptionKey; if (encryptionKey != null) { // This store was created before the db encryption fix, so its files are not // encrypted, even though they should be. Remedy that: Log.To.NoDomain.I(TAG, "**** BlobStore should be encrypted; fixing it now..."); EncryptionKey = null; ChangeEncryptionKey(encryptionKey); } } }
// This is used by the listener internal Replication ReplicationWithProperties(IDictionary <string, object> properties) { // Extract the parameters from the JSON request body: // http://wiki.apache.org/couchdb/Replication bool push, createTarget; var results = new Dictionary <string, object>() { { "database", null }, { "remote", null }, { "headers", null }, { "authorizer", null } }; Status result = ParseReplicationProperties(properties, out push, out createTarget, results); if (result.IsError) { throw Misc.CreateExceptionAndLog(Log.To.Listener, result.Code, TAG, "Failed to create replication"); } object continuousObj = properties.Get("continuous"); bool continuous = false; if (continuousObj is bool) { continuous = (bool)continuousObj; } var scheduler = new SingleTaskThreadpoolScheduler(); Replication rep = null; if (push) { rep = new Pusher((Database)results["database"], (Uri)results["remote"], continuous, new TaskFactory(scheduler)); } else { rep = new Puller((Database)results["database"], (Uri)results["remote"], continuous, new TaskFactory(scheduler)); } rep.Filter = properties.Get("filter") as string; rep.FilterParams = properties.Get("query_params").AsDictionary <string, object>(); rep.DocIds = properties.Get("doc_ids").AsList <string>(); rep.Headers = new Dictionary <string, string>(); foreach (var header in results.Get("headers").AsDictionary <string, string>()) { if (header.Key.ToLowerInvariant() == "cookie") { var cookie = default(Cookie); if (CookieParser.TryParse(header.Value, ((Uri)results["remote"]).GetLeftPart(UriPartial.Authority), out cookie)) { rep.SetCookie(cookie.Name, cookie.Value, cookie.Path, cookie.Expires, cookie.Secure, cookie.HttpOnly); } else { Log.To.Listener.W(TAG, "Invalid cookie string received ({0}), ignoring...", header.Value); } } else { rep.Headers.Add(header.Key, header.Value); } } rep.Headers = results.Get("headers").AsDictionary <string, string>(); rep.Authenticator = results.Get("authorizer") as IAuthenticator; if (push) { ((Pusher)rep).CreateTarget = createTarget; } var db = (Database)results["database"]; // If this is a duplicate, reuse an existing replicator: var activeReplicators = default(IList <Replication>); var existing = default(Replication); if (db.ActiveReplicators.AcquireTemp(out activeReplicators)) { existing = activeReplicators.FirstOrDefault(x => x.LocalDatabase == rep.LocalDatabase && x.RemoteUrl == rep.RemoteUrl && x.IsPull == rep.IsPull && x.RemoteCheckpointDocID().Equals(rep.RemoteCheckpointDocID())); } return(existing ?? rep); }
/// <summary> /// Initializes a Manager that stores Databases in the given directory. /// </summary> /// <param name="directoryFile"><see cref="System.IO.DirectoryInfo"/> object for initializing the Manager object.</param> /// <param name="options">Option flags for initialization.</param> /// <exception cref="T:System.IO.DirectoryNotFoundException">Thrown when there is an error while accessing or creating the given directory.</exception> public Manager(DirectoryInfo directoryFile, ManagerOptions options) { if (directoryFile == null) { Log.To.Database.E(TAG, "directoryFile cannot be null in ctor, throwing..."); throw new ArgumentNullException("directoryFile"); } this.directoryFile = directoryFile; Options = options ?? DefaultOptions; this.databases = new Dictionary <string, Database>(); this.replications = new List <Replication>(); Shared = new SharedState(); //create the directory, but don't fail if it already exists if (!directoryFile.Exists) { directoryFile.Create(); directoryFile.Refresh(); if (!directoryFile.Exists) { throw Misc.CreateExceptionAndLog(Log.To.Database, StatusCode.InternalServerError, TAG, "Unable to create directory {0}", directoryFile); } } UpgradeOldDatabaseFiles(directoryFile); #if __IOS__ Foundation.NSString protection; switch (options.FileProtection & Foundation.NSDataWritingOptions.FileProtectionMask) { case Foundation.NSDataWritingOptions.FileProtectionNone: protection = Foundation.NSFileManager.FileProtectionNone; break; case Foundation.NSDataWritingOptions.FileProtectionComplete: protection = Foundation.NSFileManager.FileProtectionComplete; break; case Foundation.NSDataWritingOptions.FileProtectionCompleteUntilFirstUserAuthentication: protection = Foundation.NSFileManager.FileProtectionCompleteUntilFirstUserAuthentication; break; default: protection = Foundation.NSFileManager.FileProtectionCompleteUnlessOpen; break; } var attributes = new Foundation.NSDictionary(Foundation.NSFileManager.FileProtectionKey, protection); Foundation.NSError error; Foundation.NSFileManager.DefaultManager.SetAttributes(attributes, directoryFile.FullName, out error); #endif var scheduler = options.CallbackScheduler; CapturedContext = new TaskFactory(scheduler); Log.To.TaskScheduling.I(TAG, "Callbacks will be scheduled on {0}", scheduler); workExecutor = new TaskFactory(new SingleTaskThreadpoolScheduler()); _networkReachabilityManager = new NetworkReachabilityManager(); _networkReachabilityManager.StartListening(); StorageType = "SQLite"; Log.To.Database.I(TAG, "Created {0}", this); }
// This is used by the listener internal Replication ReplicationWithProperties(IDictionary <string, object> properties) { // Extract the parameters from the JSON request body: // http://wiki.apache.org/couchdb/Replication bool push, createTarget; var results = new Dictionary <string, object>() { { "database", null }, { "remote", null }, { "headers", null }, { "authorizer", null } }; Status result = ParseReplicationProperties(properties, out push, out createTarget, results); if (result.IsError) { throw Misc.CreateExceptionAndLog(Log.To.Listener, result.Code, TAG, "Failed to create replication"); } object continuousObj = properties.Get("continuous"); bool continuous = false; if (continuousObj is bool) { continuous = (bool)continuousObj; } var scheduler = new SingleTaskThreadpoolScheduler(); Replication rep = null; if (push) { rep = new Pusher((Database)results["database"], (Uri)results["remote"], continuous, new TaskFactory(scheduler)); } else { rep = new Puller((Database)results["database"], (Uri)results["remote"], continuous, new TaskFactory(scheduler)); } rep.Filter = properties.Get("filter") as string; rep.FilterParams = properties.Get("query_params") as IDictionary <string, object>; rep.DocIds = properties.Get("doc_ids") as IEnumerable <string>; rep.RequestHeaders = results.Get("headers") as IDictionary <string, object>; rep.Authenticator = results.Get("authorizer") as IAuthenticator; if (push) { ((Pusher)rep).CreateTarget = createTarget; } var db = (Database)results["database"]; // If this is a duplicate, reuse an existing replicator: var existing = db.ActiveReplicators.FirstOrDefault(x => x.LocalDatabase == rep.LocalDatabase && x.RemoteUrl == rep.RemoteUrl && x.IsPull == rep.IsPull && x.RemoteCheckpointDocID().Equals(rep.RemoteCheckpointDocID())); return(existing ?? rep); }