public OneDriveClient(TokenResponse token) { Pre.ThrowIfArgumentNull(token, nameof(token)); this.CurrentToken = token; this.oneDriveHttpClient = new HttpClient() { BaseAddress = new Uri(OneDriveApiBaseAddress), }; HttpClientHandler noRedirectHandler = new HttpClientHandler { AllowAutoRedirect = false }; this.oneDriveHttpClientNoRedirect = new HttpClient(noRedirectHandler) { BaseAddress = new Uri(OneDriveApiBaseAddress), }; this.liveHttpClient = new HttpClient() { BaseAddress = new Uri(LiveApiBaseAddress), }; }
/// <summary> /// Create a new instance of the <see cref="EncryptionManager"/> class /// </summary> /// <param name="encryptionCertificate"> /// The certificate that contains the secrets used for encryption. Note that /// the private key must be present in this certificate in order for decryption /// to be performed. /// </param> /// <param name="mode">The mode (encryption/decryption) of the encryption manager</param> /// <param name="outputStream">The stream where the transformed content will be written</param> /// <param name="sourceFileSize"></param> public EncryptionManager( X509Certificate2 encryptionCertificate, EncryptionMode mode, Stream outputStream, long sourceFileSize) { Pre.ThrowIfArgumentNull(encryptionCertificate, nameof(encryptionCertificate)); Pre.ThrowIfArgumentNull(outputStream, nameof(outputStream)); Pre.ThrowIfTrue(mode == EncryptionMode.None, "Encryption mode cannot be None"); this.encryptionCertificate = encryptionCertificate; this.Mode = mode; this.sourceFileSize = sourceFileSize; this.outputStream = outputStream; this.sha1 = new SHA1Cng(); this.md5 = new MD5Cng(); // Any valid encrypted file will have a minimum size (to include the header and minimal // encrypted content). Ensure that the source file is at least this size. if (mode == EncryptionMode.Decrypt) { Pre.Assert(sourceFileSize >= MinimumEncryptedFileSize, "sourceFileSize >= minimumEncryptedFileSize"); } this.Initialize(); }
public FolderBrowserViewModel(AdapterBase syncAdapter) { Pre.ThrowIfArgumentNull(syncAdapter, "syncAdapter"); this.CloseWindowCommand = new DelegatedCommand(o => this.HandleClose(false)); this.CancelCommand = new DelegatedCommand(o => this.HandleClose(false)); this.OKCommand = new DelegatedCommand(o => this.HandleClose(true), this.CanOkCommandExecute); this.syncAdapter = syncAdapter; }
public RestoreJob(SyncRelationship relationship, List <SyncEntry> syncEntries, string restorePath) : base(relationship) { Pre.ThrowIfArgumentNull(relationship, nameof(relationship)); Pre.ThrowIfArgumentNull(syncEntries, nameof(syncEntries)); Pre.ThrowIfStringNullOrWhiteSpace(restorePath, nameof(restorePath)); this.syncEntries = syncEntries; this.restorePath = restorePath; }
public override void Write(byte[] buffer, int offset, int count) { if (count <= 0) { return; } Pre.ThrowIfArgumentNull(buffer, "buffer"); Pre.ThrowIfTrue(buffer.Length == 0, "buffer.Length is 0"); Pre.ThrowIfTrue(offset + count > buffer.Length, "offset + count > buffer.Length"); switch (this.UploadSession.State) { case OneDriveFileUploadState.Completed: throw new OneDriveException("Cannot write to completed upload session."); case OneDriveFileUploadState.Faulted: throw new OneDriveException("Cannot write to faulted upload session."); case OneDriveFileUploadState.Cancelled: throw new OneDriveException("Cannot write to cancelled upload session."); } // Check if the new data will be more than the file size specified in the session if (this.totalSize + count > this.UploadSession.Length) { Logger.Error( "OneDrive file upload overflow. File={0}, ParentId={1}, Length={2}, CurrentSize={3}, WriteSize={4}", this.UploadSession.ItemName, this.UploadSession.ParentId, this.UploadSession.Length, this.totalSize, count); throw new OneDriveException("More data was written to the stream than is allowed by the file."); } // Allocate a new buffer locally (since the buffer provided by the caller might not exist after the call // returns) and copy the given buffer into the local buffer. byte[] localBuffer = new byte[count]; Buffer.BlockCopy(buffer, offset, localBuffer, 0, count); // Add the new buffer to the list of buffers and update the total size. this.buffers.Add(localBuffer); this.totalSize += count; // If the total size of the buffers is at least the fragment size, flush the data (sending it to OneDrive). if (this.AreFragmentAvailable()) { this.Flush(); } }
public override void Write(byte[] buffer, int offset, int count) { if (count <= 0) { return; } Pre.ThrowIfArgumentNull(buffer, "buffer"); Pre.ThrowIfTrue(buffer.Length == 0, "buffer.Length is 0"); Pre.ThrowIfTrue(offset + count > buffer.Length, "offset + count > buffer.Length"); base.Write(buffer, offset, count); }
/// <summary> /// Read the encrypted file header information from a stream /// </summary> /// <param name="stream">The stream to read</param> /// <returns>The encrypted header object</returns> public static EncryptedFileHeader ReadFromStream(Stream stream) { Pre.ThrowIfArgumentNull(stream, nameof(stream)); // Read the entire header from the stream byte[] buffer = stream.ReadByteArray(HeaderSize); // Verify the header's checksum using (SHA1CryptoServiceProvider sha1 = new SHA1CryptoServiceProvider()) { byte[] computedHash = sha1.ComputeHash(buffer, 0, HeaderSize - ChecksumSize); byte[] headerHash = BufferUtil.CopyBytes(buffer, buffer.Length - ChecksumSize, ChecksumSize); if (!NativeMethods.ByteArrayEquals(computedHash, headerHash)) { // TODO: Replace with a better exception throw new Exception("The header checksum failed"); } } // Create a new stream for reading the header data. This will make it easier to process the // fields in the header and will avoid having to reposition the original stream. using (MemoryStream headerStream = new MemoryStream(buffer)) { EncryptedFileHeader header = new EncryptedFileHeader(); int encryptedKeyLength = headerStream.ReadInt32(); // Ensure the key is not longer than the buffer Pre.Assert(encryptedKeyLength < HeaderSize, "encryptedKeyLength < HeaderSize"); // Read the encrypted key field. header.EncryptedKey = headerStream.ReadByteArray(encryptedKeyLength); // Read the IV length. This should be 16 bytes (asserted below). int ivLength = headerStream.ReadInt32(); Pre.Assert(ivLength == IVStorageSize, "ivLength == ivStorageSize"); // Read the initialization vector header.IV = headerStream.ReadByteArray(ivLength); // Read the file sizes, thumbprint, and padding header.OriginalFileLength = headerStream.ReadInt64(); header.EncryptedFileLength = headerStream.ReadInt64(); header.CertificateThumbprint = headerStream.ReadByteArray(CertificateThumbprintSize); header.PaddingLength = headerStream.ReadInt16(); return(header); } }
public EntryUpdateInfo(SyncEntry entry, AdapterBase originatingAdapter, SyncEntryChangedFlags flags, string relativePath) { Pre.ThrowIfArgumentNull(entry, "entry"); Pre.ThrowIfArgumentNull(originatingAdapter, "originatingAdapter"); this.Entry = entry; this.OriginatingAdapter = originatingAdapter; this.Flags = flags; this.RelativePath = relativePath; if (flags != SyncEntryChangedFlags.None) { this.State = EntryUpdateState.NotStarted; } }
public override void Write(byte[] buffer, int offset, int count) { if (count <= 0) { return; } Pre.ThrowIfArgumentNull(buffer, "buffer"); Pre.ThrowIfTrue(buffer.Length == 0, "buffer.Length is 0"); Pre.ThrowIfTrue(offset + count > buffer.Length, "offset + count > buffer.Length"); if (this.flushInProgress) { throw new InvalidOperationException("Cannot write while a flush is in progress"); } this.memoryStream.Write(buffer, offset, count); }
public GoogleDriveClient(TokenResponse token) { Pre.ThrowIfArgumentNull(token, nameof(token)); this.CurrentToken = token; HttpClientHandler handler = new HttpClientHandler() { AutomaticDecompression = DecompressionMethods.GZip | DecompressionMethods.Deflate }; this.googleDriveHttpClient = new HttpClient(handler) { BaseAddress = new Uri(GoogleDriveApiBaseAddress) }; this.googleDriveHttpClient.DefaultRequestHeaders.TryAddWithoutValidation("User-Agent", "SyncPro/1.0 (gzip)"); }
public override void FinalizeItemWrite(Stream stream, EntryUpdateInfo updateInfo) { AzureStorageUploadStream uploadStream = stream as AzureStorageUploadStream; Pre.ThrowIfArgumentNull(uploadStream, "uploadStream"); // If there are any block IDs in the block list, then the file was uploaded using blocks (as opposed to // uploading the file as a single blob). For this, we need to call PutBlockList to finalize the creation // of the blob in storage. if (uploadStream.BlockList.Any()) { HttpResponseMessage response = this.storageClient.PutBlockListAsync( this.TypedConfiguration.ContainerName, uploadStream.FileName, uploadStream.BlockList).Result; using (response) { if (!response.IsSuccessStatusCode) { throw new AzureStorageHttpException(); } } } SyncEntryAdapterData adapterData = updateInfo.Entry.AdapterEntries.FirstOrDefault(a => a.AdapterId == this.Configuration.Id); if (adapterData == null) { adapterData = new SyncEntryAdapterData { SyncEntry = updateInfo.Entry, AdapterId = this.Configuration.Id }; updateInfo.Entry.AdapterEntries.Add(adapterData); } adapterData.AdapterEntryId = GetUniqueIdForFile(updateInfo.RelativePath); }
public OneDriveFileUploadStream(OneDriveClient client, OneDriveUploadSession uploadSession, int fragmentSize) { Pre.ThrowIfArgumentNull(client, nameof(client)); Pre.ThrowIfArgumentNull(uploadSession, nameof(uploadSession)); if (fragmentSize < fragmentSizeBase) { throw new ArgumentOutOfRangeException("The fragement size must be at least " + fragmentSizeBase); } if (fragmentSize % fragmentSizeBase != 0) { throw new ArgumentException("The segement size must be a multiple of " + fragmentSizeBase, nameof(fragmentSize)); } this.client = client; this.UploadSession = uploadSession; this.fragmentSize = fragmentSize; this.bytesRemaining = uploadSession.Length; }
internal static string UniqueIdToItemId(byte[] uniqueId) { Pre.ThrowIfArgumentNull(uniqueId, nameof(uniqueId)); return(Encoding.ASCII.GetString(uniqueId)); }
protected JobBase(SyncRelationship relationship) { Pre.ThrowIfArgumentNull(relationship, "relationship"); this.Relationship = relationship; }