internal StreamWithLength(BatchStream stream, int contentLength) { Debug.Assert(null != stream, "null != stream"); Debug.Assert(0 < contentLength, "0 < contentLength"); this.target = stream; this.length = contentLength; }
private async Task <ItemSyncResult> UpdateFile(DirectoryItem destination, FileInfo fileInfo, FileStream file, CancellationToken token = default(CancellationToken)) { try { _logger.Verbose("Replacing File {file} on remote with local", fileInfo); if (fileInfo.Length < 2147483647) { await _hiDriveClient.File .Upload(fileInfo.Name, dir_id : destination.Id, modificationTime : fileInfo.LastWriteTimeUtc, dirModificationTime : fileInfo.Directory.LastWriteTimeUtc, mode : UploadMode.CreateOrUpdate) .ExecuteAsync(file, token); } else { var path = CombinePaths(destination.Path, fileInfo.Name); var batchSize = 0x10000000; var i = (int)Math.Ceiling(fileInfo.Length / (double)batchSize); for (long j = 0; j < i; j++) { var stream = new BatchStream(file, j * batchSize, batchSize); await _hiDriveClient.File .Patch(path, modificationTime : fileInfo.LastWriteTimeUtc, offset : j *batchSize) .ExecuteAsync(stream, token); } } return(new ItemSyncResult(fileInfo.FullName, SyncAction.Updated, SyncState.Successful)); } catch (ServiceException e) { _logger.Warning("Replacing File failed!", e); return(new ItemSyncResult(fileInfo.FullName, SyncAction.Updated, SyncState.Failed, e)); } }
protected override void Dispose(bool disposing) { base.Dispose(disposing); if (disposing && (null != this.target)) { if (this.target.disposeWithContentStreamDispose) { this.target.contentStream = null; this.target.Dispose(); } else if (0 < this.length) { if (null != this.target.reader) { this.target.Seek(this.length, SeekOrigin.Current); } this.length = 0; } this.target.ClearPreviousOperationInformation(); } this.target = null; }
public bool SendBatch() { byte[] data = BatchStream.ToArray(); bool ret = Socket.SendBuffer(data); return(ret); }
public void Close() { NetStreamBinary.Close(); Client.Close(); Client = null; BatchStream.Close(); _client.ConnectionState = ConnectionState.Disconnected; }
public void EndBatch() { if (IsBatching) { IsBatching = false; long pos = BatchStream.Position; BatchStream.Seek(BatchSizeOff, SeekOrigin.Begin); BatchWriter.Write((uint)pos); BatchStream.Seek(BatchCmdOff, SeekOrigin.Begin); BatchWriter.Write(BatchCmdCount); BatchStream.Seek(pos, SeekOrigin.Begin); } }
/// <summary>Initializes a new <see cref="JsonSerializer"/> for the specified stream.</summary> /// <param name="requestStream">Input stream from which JSON content must be read.</param> /// <param name="encoding">Encoding to use for the stream.</param> /// <param name="update">indicates whether this is a update operation or not</param> /// <param name="dataService">Data service for which the deserializer will act.</param> /// <param name="tracker">Tracker to use for modifications.</param> internal JsonDeserializer(Stream requestStream, Encoding encoding, bool update, IDataService dataService, UpdateTracker tracker) : base(update, dataService, tracker) { Debug.Assert(requestStream != null, "requestStream != null"); // JsonReader is using StreamReader.Peek() method. However if the underlying stream does not support seeking // StreamReader.Peek() will always return -1 what causes the JsonReader to think that all data has already been // read and the Json payload is invalid. We need to wrap non-seekable stream with BufferedStream to make it seekable. // Since in batch cases, we use our own implementation of Stream to read the batch content, we do not want to use BufferedStream in // that case, since Peek just works fine in that case. Also, if we use BufferedStream, we get wierd behaviour since BufferedStream // tries to read few characters than the batchBoundary and our batch stream implementation does not handle that case well. bool useGivenStream = requestStream.CanSeek || BatchStream.IsBatchStream(requestStream); this.jsonReader = new JsonReader(new StreamReader(useGivenStream ? requestStream : new BufferedStream(requestStream), encoding)); }
/// <summary> /// Initializes a new dummy host for the batch request. /// This host represents a single operation in the batch. /// </summary> /// <param name="absoluteServiceUri">absolute Uri to the service</param> /// <param name="batchStream">batch stream which contains the header information.</param> /// <param name="contentId">content id for the given operation host.</param> /// <param name='boundary'>Response separator string.</param> /// <param name='writer'>Output writer.</param> internal BatchServiceHost(Uri absoluteServiceUri, BatchStream batchStream, string contentId, string boundary, StreamWriter writer) : this(boundary, writer) { Debug.Assert(absoluteServiceUri != null && absoluteServiceUri.IsAbsoluteUri, "absoluteServiceUri != null && absoluteServiceUri.IsAbsoluteUri"); Debug.Assert(batchStream != null, "batchStream != null"); this.absoluteServiceUri = absoluteServiceUri; this.absoluteRequestUri = RequestUriProcessor.GetAbsoluteUriFromReference(batchStream.ContentUri, absoluteServiceUri); this.requestHttpMethod = GetHttpMethodName(batchStream.State); this.requestStream = batchStream.GetContentStream(); this.contentId = contentId; foreach (KeyValuePair<string, string> header in batchStream.ContentHeaders) { this.requestHeaders.Add(header.Key, header.Value); } }
private async Task <ItemSyncResult> UploadFile(DirectoryItem destination, FileInfo fileInfo, FileStream file, CancellationToken token = default(CancellationToken)) { try { _logger.Verbose("Uploading File {file} to remote", fileInfo); if (fileInfo.Length < 2147483647) { await _hiDriveClient.File .Upload(fileInfo.Name, dir_id : destination.Id, modificationTime : fileInfo.LastWriteTimeUtc, dirModificationTime : fileInfo.Directory.LastWriteTimeUtc) .ExecuteAsync(file, token); } else { //Create empty file, and upload in parts var fileItem = await _hiDriveClient.File .Upload(fileInfo.Name, dir_id : destination.Id, modificationTime : fileInfo.LastWriteTimeUtc, dirModificationTime : fileInfo.Directory.LastWriteTimeUtc) .ExecuteAsync(Stream.Null, token); var batchSize = 0x10000000; var i = (int)Math.Ceiling(fileInfo.Length / (double)batchSize); for (long j = 0; j < i; j++) { var stream = new BatchStream(file, j * batchSize, batchSize); await _hiDriveClient.File .Patch(pid : fileItem.Id, modificationTime : fileInfo.LastWriteTimeUtc, offset : j *batchSize) .ExecuteAsync(stream, token); } } return(new ItemSyncResult(fileInfo.FullName, SyncAction.Added, SyncState.Successful)); } catch (ServiceException e) { _logger.Warning("Uploading File failed!", e); return(new ItemSyncResult(fileInfo.FullName, SyncAction.Added, SyncState.Failed, e)); } catch (OperationCanceledException e) { _logger.Warning("Uploading File timed out!"); return(new ItemSyncResult(fileInfo.FullName, SyncAction.Added, SyncState.Failed, e)); } }
internal StreamWithDelimiter(BatchStream stream) : base(stream, Int32.MaxValue) { }