public HttpResponseMessage Post(string name) { if (name.StartsWith(CopyPrefix)) { var targetFileName = GetQueryStringValue("targetFilename"); return(Copy(name.Substring(CopyPrefix.Length), targetFileName)); } name = FileHeader.Canonize(name); var metadata = GetFilteredMetadataFromHeaders(ReadInnerHeaders); var etag = GetEtag(); Storage.Batch(accessor => { Synchronizations.AssertFileIsNotBeingSynced(name); Historian.Update(name, metadata); Files.UpdateMetadata(name, metadata, etag); SynchronizationTask.Context.NotifyAboutWork(); }); return(GetEmptyMessage(HttpStatusCode.NoContent)); }
public HttpResponseMessage Post(string name) { name = FileHeader.Canonize(name); var metadata = GetFilteredMetadataFromHeaders(ReadInnerHeaders); var etag = GetEtag(); Storage.Batch(accessor => { Synchronizations.AssertFileIsNotBeingSynced(name); Historian.Update(name, metadata); Files.UpdateMetadata(name, metadata, etag); SynchronizationTask.Context.NotifyAboutWork(); }); return(GetEmptyMessage(HttpStatusCode.NoContent)); }
public HttpResponseMessage Post(string name) { name = FileHeader.Canonize(name); var metadata = GetFilteredMetadataFromHeaders(ReadInnerHeaders); var etag = GetEtag(); Storage.Batch(accessor => { Synchronizations.AssertFileIsNotBeingSynced(name); Historian.Update(name, metadata); Files.UpdateMetadata(name, metadata, etag); Synchronizations.StartSynchronizeDestinationsInBackground(); }); //Hack needed by jquery on the client side. We need to find a better solution for this return(GetEmptyMessage(HttpStatusCode.NoContent)); }
private void DeleteFiles(IEnumerable <string> keys, int totalResults, Action <string> progress) { Storage.Batch(accessor => { var files = keys.Select(accessor.ReadFile); foreach (var fileWithIndex in files.Select((value, i) => new { i, value })) { var file = fileWithIndex.value; var fileName = file.FullPath; try { Synchronizations.AssertFileIsNotBeingSynced(fileName); } catch (Exception) { //ignore files that are being synced continue; } var metadata = file.Metadata; if (metadata == null || metadata.Keys.Contains(SynchronizationConstants.RavenDeleteMarker)) { continue; } Historian.Update(fileName, metadata); Files.IndicateFileToDelete(fileName, null); // don't create a tombstone for .downloading file if (!fileName.EndsWith(RavenFileNameHelper.DownloadingFileSuffix)) { Files.PutTombstone(fileName, metadata); accessor.DeleteConfig(RavenFileNameHelper.ConflictConfigNameForFile(fileName)); // delete conflict item too } progress(string.Format("File {0}/{1} was deleted, name: '{2}'.", fileWithIndex.i, totalResults, fileName)); } }); }
public HttpResponseMessage Post(string name) { name = RavenFileNameHelper.RavenPath(name); var headers = this.GetFilteredMetadataFromHeaders(InnerHeaders); Historian.UpdateLastModified(headers); Historian.Update(name, headers); try { ConcurrencyAwareExecutor.Execute(() => Storage.Batch(accessor => { AssertFileIsNotBeingSynced(name, accessor, true); accessor.UpdateFileMetadata(name, headers); }), ConcurrencyResponseException); } catch (FileNotFoundException) { log.Debug("Cannot update metadata because file '{0}' was not found", name); return(GetEmptyMessage(HttpStatusCode.NotFound)); } Search.Index(name, headers); Publisher.Publish(new FileChangeNotification { File = FilePathTools.Cannoicalise(name), Action = FileChangeAction.Update }); StartSynchronizeDestinationsInBackground(); log.Debug("Metadata of a file '{0}' was updated", name); //Hack needed by jquery on the client side. We need to find a better solution for this return(GetEmptyMessage(HttpStatusCode.NoContent)); }
public HttpResponseMessage Delete(string name) { name = FileHeader.Canonize(name); Storage.Batch(accessor => { Synchronizations.AssertFileIsNotBeingSynced(name); var fileAndPages = accessor.GetFile(name, 0, 0); var metadata = fileAndPages.Metadata; if (metadata == null) { throw new FileNotFoundException(); } if (metadata.Keys.Contains(SynchronizationConstants.RavenDeleteMarker)) { throw new FileNotFoundException(); } Historian.Update(name, metadata); Files.IndicateFileToDelete(name, GetEtag()); if (name.EndsWith(RavenFileNameHelper.DownloadingFileSuffix) == false) // don't create a tombstone for .downloading file { Files.PutTombstone(name, metadata); accessor.DeleteConfig(RavenFileNameHelper.ConflictConfigNameForFile(name)); // delete conflict item too } }); SynchronizationTask.Context.NotifyAboutWork(); return(GetEmptyMessage(HttpStatusCode.NoContent)); }
public async Task <HttpResponseMessage> Put(string name, string uploadId = null) { try { FileSystem.MetricsCounters.FilesPerSecond.Mark(); name = RavenFileNameHelper.RavenPath(name); var headers = this.GetFilteredMetadataFromHeaders(InnerHeaders); Historian.UpdateLastModified(headers); headers["Creation-Date"] = DateTime.UtcNow.ToString("yyyy-MM-ddTHH:mm:ss.fffffff", CultureInfo.InvariantCulture); Historian.Update(name, headers); SynchronizationTask.Cancel(name); long?size = -1; ConcurrencyAwareExecutor.Execute(() => Storage.Batch(accessor => { AssertFileIsNotBeingSynced(name, accessor, true); StorageOperationsTask.IndicateFileToDelete(name); var contentLength = Request.Content.Headers.ContentLength; var sizeHeader = GetHeader("RavenFS-size"); long sizeForParse; if (contentLength == 0 || long.TryParse(sizeHeader, out sizeForParse) == false) { size = contentLength; if (Request.Headers.TransferEncodingChunked ?? false) { size = null; } } else { size = sizeForParse; } accessor.PutFile(name, size, headers); Search.Index(name, headers); })); log.Debug("Inserted a new file '{0}' with ETag {1}", name, headers.Value <Guid>(Constants.MetadataEtagField)); using (var contentStream = await Request.Content.ReadAsStreamAsync()) using (var readFileToDatabase = new ReadFileToDatabase(BufferPool, Storage, contentStream, name)) { await readFileToDatabase.Execute(); if (readFileToDatabase.TotalSizeRead != size) { Storage.Batch(accessor => { StorageOperationsTask.IndicateFileToDelete(name); }); throw new HttpResponseException(HttpStatusCode.BadRequest); } Historian.UpdateLastModified(headers); // update with the final file size log.Debug("File '{0}' was uploaded. Starting to update file metadata and indexes", name); headers["Content-MD5"] = readFileToDatabase.FileHash; Storage.Batch(accessor => accessor.UpdateFileMetadata(name, headers)); int totalSizeRead = readFileToDatabase.TotalSizeRead; headers["Content-Length"] = totalSizeRead.ToString(CultureInfo.InvariantCulture); Search.Index(name, headers); Publisher.Publish(new FileChangeNotification { Action = FileChangeAction.Add, File = FilePathTools.Cannoicalise(name) }); log.Debug("Updates of '{0}' metadata and indexes were finished. New file ETag is {1}", name, headers.Value <Guid>(Constants.MetadataEtagField)); StartSynchronizeDestinationsInBackground(); } } catch (Exception ex) { if (uploadId != null) { Guid uploadIdentifier; if (Guid.TryParse(uploadId, out uploadIdentifier)) { Publisher.Publish(new CancellationNotification { UploadId = uploadIdentifier, File = name }); } } log.WarnException(string.Format("Failed to upload a file '{0}'", name), ex); var concurrencyException = ex as ConcurrencyException; if (concurrencyException != null) { throw ConcurrencyResponseException(concurrencyException); } throw; } return(GetEmptyMessage(HttpStatusCode.Created)); }
public async Task PutAsync(string name, Etag etag, RavenJObject metadata, Func <Task <Stream> > streamAsync, PutOperationOptions options) { try { FileSystem.MetricsCounters.FilesPerSecond.Mark(); name = FileHeader.Canonize(name); if (options.PreserveTimestamps) { if (!metadata.ContainsKey(Constants.RavenCreationDate)) { if (metadata.ContainsKey(Constants.CreationDate)) { metadata[Constants.RavenCreationDate] = metadata[Constants.CreationDate]; } else { throw new InvalidOperationException("Preserve Timestamps requires that the client includes the Raven-Creation-Date header."); } } Historian.UpdateLastModified(metadata, options.LastModified.HasValue ? options.LastModified.Value : DateTimeOffset.UtcNow); } else { metadata[Constants.RavenCreationDate] = DateTimeOffset.UtcNow; Historian.UpdateLastModified(metadata); } // TODO: To keep current filesystems working. We should remove when adding a new migration. metadata[Constants.CreationDate] = metadata[Constants.RavenCreationDate].Value <DateTimeOffset>().ToString("yyyy-MM-ddTHH:mm:ss.fffffffZ", CultureInfo.InvariantCulture); Historian.Update(name, metadata); long?size = -1; Storage.Batch(accessor => { FileSystem.Synchronizations.AssertFileIsNotBeingSynced(name); AssertPutOperationNotVetoed(name, metadata); SynchronizationTask.Cancel(name); var contentLength = options.ContentLength; var contentSize = options.ContentSize; if (contentLength == 0 || contentSize.HasValue == false) { size = contentLength; if (options.TransferEncodingChunked) { size = null; } } else { size = contentSize; } FileSystem.PutTriggers.Apply(trigger => trigger.OnPut(name, metadata)); using (FileSystem.DisableAllTriggersForCurrentThread()) { IndicateFileToDelete(name, etag); } var putResult = accessor.PutFile(name, size, metadata); FileSystem.PutTriggers.Apply(trigger => trigger.AfterPut(name, size, metadata)); Search.Index(name, metadata, putResult.Etag); }); Log.Debug("Inserted a new file '{0}' with ETag {1}", name, metadata.Value <string>(Constants.MetadataEtagField)); using (var contentStream = await streamAsync()) using (var readFileToDatabase = new ReadFileToDatabase(BufferPool, Storage, FileSystem.PutTriggers, contentStream, name, metadata)) { await readFileToDatabase.Execute(); if (readFileToDatabase.TotalSizeRead != size) { using (FileSystem.DisableAllTriggersForCurrentThread()) { IndicateFileToDelete(name, null); } throw new HttpResponseException(HttpStatusCode.BadRequest); } if (options.PreserveTimestamps == false) { Historian.UpdateLastModified(metadata); // update with the final file size. } Log.Debug("File '{0}' was uploaded. Starting to update file metadata and indexes", name); metadata["Content-MD5"] = readFileToDatabase.FileHash; MetadataUpdateResult updateMetadata = null; Storage.Batch(accessor => updateMetadata = accessor.UpdateFileMetadata(name, metadata, null)); int totalSizeRead = readFileToDatabase.TotalSizeRead; metadata["Content-Length"] = totalSizeRead.ToString(CultureInfo.InvariantCulture); Search.Index(name, metadata, updateMetadata.Etag); Publisher.Publish(new FileChangeNotification { Action = FileChangeAction.Add, File = name }); Log.Debug("Updates of '{0}' metadata and indexes were finished. New file ETag is {1}", name, metadata.Value <string>(Constants.MetadataEtagField)); } } catch (Exception ex) { Log.WarnException(string.Format("Failed to upload a file '{0}'", name), ex); throw; } }
public async Task <HttpResponseMessage> Put(string name, string uploadId = null, bool preserveTimestamps = false) { try { FileSystem.MetricsCounters.FilesPerSecond.Mark(); name = FileHeader.Canonize(name); var headers = this.GetFilteredMetadataFromHeaders(ReadInnerHeaders); if (preserveTimestamps) { if (!headers.ContainsKey(Constants.RavenCreationDate)) { if (headers.ContainsKey(Constants.CreationDate)) { headers[Constants.RavenCreationDate] = headers[Constants.CreationDate]; } else { throw new InvalidOperationException("Preserve Timestamps requires that the client includes the Raven-Creation-Date header."); } } var lastModified = GetHeader(Constants.RavenLastModified); if (lastModified != null) { DateTimeOffset when; if (!DateTimeOffset.TryParse(lastModified, out when)) { when = DateTimeOffset.UtcNow; } Historian.UpdateLastModified(headers, when); } else { Historian.UpdateLastModified(headers); } } else { headers[Constants.RavenCreationDate] = DateTimeOffset.UtcNow; Historian.UpdateLastModified(headers); } // TODO: To keep current filesystems working. We should remove when adding a new migration. headers[Constants.CreationDate] = headers[Constants.RavenCreationDate].Value <DateTimeOffset>().ToString("yyyy-MM-ddTHH:mm:ss.fffffffZ", CultureInfo.InvariantCulture); Historian.Update(name, headers); SynchronizationTask.Cancel(name); long?size = -1; ConcurrencyAwareExecutor.Execute(() => Storage.Batch(accessor => { AssertPutOperationNotVetoed(name, headers); AssertFileIsNotBeingSynced(name, accessor, true); var contentLength = Request.Content.Headers.ContentLength; var sizeHeader = GetHeader("RavenFS-size"); long sizeForParse; if (contentLength == 0 || long.TryParse(sizeHeader, out sizeForParse) == false) { size = contentLength; if (Request.Headers.TransferEncodingChunked ?? false) { size = null; } } else { size = sizeForParse; } FileSystem.PutTriggers.Apply(trigger => trigger.OnPut(name, headers)); using (FileSystem.DisableAllTriggersForCurrentThread()) { StorageOperationsTask.IndicateFileToDelete(name); } accessor.PutFile(name, size, headers); FileSystem.PutTriggers.Apply(trigger => trigger.AfterPut(name, size, headers)); Search.Index(name, headers); })); log.Debug("Inserted a new file '{0}' with ETag {1}", name, headers.Value <Guid>(Constants.MetadataEtagField)); using (var contentStream = await Request.Content.ReadAsStreamAsync()) using (var readFileToDatabase = new ReadFileToDatabase(BufferPool, Storage, FileSystem.PutTriggers, contentStream, name, headers)) { await readFileToDatabase.Execute(); if (readFileToDatabase.TotalSizeRead != size) { StorageOperationsTask.IndicateFileToDelete(name); throw new HttpResponseException(HttpStatusCode.BadRequest); } if (!preserveTimestamps) { Historian.UpdateLastModified(headers); // update with the final file size. } log.Debug("File '{0}' was uploaded. Starting to update file metadata and indexes", name); headers["Content-MD5"] = readFileToDatabase.FileHash; Storage.Batch(accessor => accessor.UpdateFileMetadata(name, headers)); int totalSizeRead = readFileToDatabase.TotalSizeRead; headers["Content-Length"] = totalSizeRead.ToString(CultureInfo.InvariantCulture); Search.Index(name, headers); Publisher.Publish(new FileChangeNotification { Action = FileChangeAction.Add, File = FilePathTools.Cannoicalise(name) }); log.Debug("Updates of '{0}' metadata and indexes were finished. New file ETag is {1}", name, headers.Value <Guid>(Constants.MetadataEtagField)); StartSynchronizeDestinationsInBackground(); } } catch (Exception ex) { if (uploadId != null) { Guid uploadIdentifier; if (Guid.TryParse(uploadId, out uploadIdentifier)) { Publisher.Publish(new CancellationNotification { UploadId = uploadIdentifier, File = name }); } } log.WarnException(string.Format("Failed to upload a file '{0}'", name), ex); var concurrencyException = ex as ConcurrencyException; if (concurrencyException != null) { throw ConcurrencyResponseException(concurrencyException); } throw; } return(GetEmptyMessage(HttpStatusCode.Created)); }