private void VerifySegmentsAreOnRecordBoundaries(UploadMetadata metadata, string filePath) { using (var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read)) { foreach (var segment in metadata.Segments) { if (segment.SegmentNumber > 0) { //verify that each segment starts with a non-newline and that the 2 previous characters before that offset are newline characters //2 characters behind: newline stream.Seek(segment.Offset - 2, SeekOrigin.Begin); char c1 = (char)stream.ReadByte(); Assert.True(IsNewline(c1), string.Format("Expecting a newline at offset {0}", stream.Position - 1)); //1 character behind: newline char c2 = (char)stream.ReadByte(); Assert.True(IsNewline(c2), string.Format("Expecting a newline at offset {0}", stream.Position - 1)); //by test design, we never have two consecutive newlines that are the same; we'd always have \r\n, but never \r\r or \r\n var c3 = (char)stream.ReadByte(); Assert.NotEqual(c2, c3); } } } }
private void VerifyTargetStreamsAreComplete(UploadMetadata metadata, InMemoryFrontEnd fe) { foreach (var segment in metadata.Segments) { VerifyTargetStreamIsComplete(segment, metadata, fe); } }
private UploadMetadata CreateMetadata(int segmentCount) { var path = Path.GetTempFileName(); var metadata = new UploadMetadata() { MetadataFilePath = path, InputFilePath = _smallFilePath, FileLength = _smallFileContents.Length, SegmentCount = segmentCount, SegmentLength = UploadSegmentMetadata.CalculateSegmentLength(_smallFileContents.Length, segmentCount), Segments = new UploadSegmentMetadata[segmentCount], TargetStreamPath = "abc", UploadId = "123", IsBinary = true }; long offset = 0; for (int i = 0; i < segmentCount; i++) { long length = UploadSegmentMetadata.CalculateSegmentLength(i, metadata); metadata.Segments[i] = new UploadSegmentMetadata() { SegmentNumber = i, Offset = offset, Status = SegmentUploadStatus.Pending, Length = length, Path = string.Format("{0}.{1}.segment{2}", metadata.TargetStreamPath, metadata.UploadId, i) }; offset += length; } return(metadata); }
public void UploadSegmentMetadata_CalculateParticularSegmentLength() { //verify bad inputs Assert.Throws <ArgumentOutOfRangeException>( () => { UploadSegmentMetadata.CalculateSegmentLength(-1, new UploadMetadata() { FileLength = 10, SegmentCount = 5, SegmentLength = 2 }); }); Assert.Throws <ArgumentOutOfRangeException>( () => { UploadSegmentMetadata.CalculateSegmentLength(100, new UploadMetadata() { FileLength = 10, SegmentCount = 5, SegmentLength = 2 }); }); Assert.Throws <ArgumentException>( () => { UploadSegmentMetadata.CalculateSegmentLength(1, new UploadMetadata() { FileLength = -10, SegmentCount = 5, SegmentLength = 2 }); }); Assert.Throws <ArgumentException>( () => { UploadSegmentMetadata.CalculateSegmentLength(1, new UploadMetadata() { FileLength = 100, SegmentCount = 2, SegmentLength = 2 }); }); Assert.Throws <ArgumentException>( () => { UploadSegmentMetadata.CalculateSegmentLength(1, new UploadMetadata() { FileLength = 100, SegmentCount = 5, SegmentLength = 26 }); }); //test various scenarios with a fixed file length, and varying the segment count from 1 to the FileLength int FileLength = 16 * (int)Math.Pow(2, 20);//16MB for (int segmentCount = 1; segmentCount <= FileLength; segmentCount += 1024) { long typicalSegmentLength = UploadSegmentMetadata.CalculateSegmentLength(FileLength, segmentCount); var uploadMetadata = new UploadMetadata() { FileLength = FileLength, SegmentCount = segmentCount, SegmentLength = typicalSegmentLength }; long firstSegmentLength = UploadSegmentMetadata.CalculateSegmentLength(0, uploadMetadata); long lastSegmentLength = UploadSegmentMetadata.CalculateSegmentLength(segmentCount - 1, uploadMetadata); Assert.Equal(typicalSegmentLength, firstSegmentLength); if (segmentCount == 1) { Assert.Equal(firstSegmentLength, lastSegmentLength); } long reconstructedFileLength = typicalSegmentLength * (segmentCount - 1) + lastSegmentLength; Assert.Equal(FileLength, reconstructedFileLength); } }
protected override void SerializeJson(JsonTextWriter writer, TransportContext context) { var serializer = JsonSerializer.Create(App.JsonSettings); Log.Debug("Creating JSON payload for {0} data points", _query.Package.Count); var payloadItems = from p in _query.Package select UploadPayload.Create(p); var jsonPayloadItems = serializer.SerializeToString(payloadItems); var lastPiece = _query.Package.Last(); var anchorage = lastPiece.Anchorage; var vehicle = lastPiece.Vehicle; var numberOfPeople = lastPiece.NumberOfPeople; var metadata = UploadMetadata.Create(); metadata.NumberOfPeople = numberOfPeople; var jsonMetadata = serializer.SerializeToString(metadata); //JSON payload output writer.WriteStartObject(); writer.WritePropertyName("secret"); writer.WriteValue(_query.SecretHash.ToBase64()); writer.WritePropertyName("metadata"); writer.WriteValue(jsonMetadata); writer.WritePropertyName("anchorage-type"); writer.WriteValue(anchorage); writer.WritePropertyName("vehicle-type"); writer.WriteValue(vehicle); writer.WritePropertyName("number-of-people"); writer.WriteValue(numberOfPeople); writer.WritePropertyName("payload"); writer.WriteValue(jsonPayloadItems); writer.WritePropertyName("payload-hash"); writer.WriteValue(jsonPayloadItems.ToSha512Hash().ToBase64()); writer.WritePropertyName("time"); writer.WriteValue(DateTime.UtcNow); writer.WriteEndObject(); }
/// <summary> /// Uploads stream to given folder and document name. /// </summary> /// <param name="uploadDocumentStreamRequest">Request parameters.</param> /// <returns>Response for document details.</returns> public async Task <UploadDocumentStreamResponse> UploadDocumentStreamAsync(UploadDocumentStreamRequest uploadDocumentStreamRequest) { InitiateDocumentVersionUploadRequest initiateDocumentVersionUploadRequest = new InitiateDocumentVersionUploadRequest() { ParentFolderId = uploadDocumentStreamRequest.ParentFolderId, Name = uploadDocumentStreamRequest.DocumentName, ContentType = uploadDocumentStreamRequest.ContentType, ContentCreatedTimestamp = uploadDocumentStreamRequest.ContentCreatedTimestamp, ContentModifiedTimestamp = uploadDocumentStreamRequest.ContentModifiedTimestamp, DocumentSizeInBytes = uploadDocumentStreamRequest.DocumentSizeInBytes, Id = uploadDocumentStreamRequest.DocumentId, AuthenticationToken = authenticationToken }; InitiateDocumentVersionUploadResponse response = await client.InitiateDocumentVersionUploadAsync(initiateDocumentVersionUploadRequest).ConfigureAwait(false); UploadMetadata uploadMetadata = response.UploadMetadata; String documentId = response.Metadata.Id; String versionId = response.Metadata.LatestVersionMetadata.Id; String uploadUrl = uploadMetadata.UploadUrl; AWSStreamContent content = new AWSStreamContent(uploadDocumentStreamRequest.Stream); try { IDictionary <string, string> requestHeaders = new Dictionary <string, string>(); requestHeaders.Add("x-amz-server-side-encryption", "AES256"); content.RemoveHttpContentHeader("Content-Type"); content.AddHttpContentHeader("Content-Type", uploadDocumentStreamRequest.ContentType); await httpClient.PutRequestUriAsync(uploadUrl, content, requestHeaders); var task = CompleteUpload(documentId, versionId); UploadDocumentStreamResponse uploadDocumentStreamResponse = new UploadDocumentStreamResponse(uploadDocumentStreamRequest) { DocumentId = task.Result, VersionId = versionId }; return(uploadDocumentStreamResponse); } finally { content.Dispose(); } }
private UploadMetadata CreateMetadata(string filePath, long fileLength) { var metadata = new UploadMetadata() { InputFilePath = filePath, FileLength = fileLength, TargetStreamPath = StreamPath, SegmentCount = 1, SegmentLength = UploadSegmentMetadata.CalculateSegmentLength(fileLength, 1), Segments = new UploadSegmentMetadata[1], IsBinary = true }; metadata.Segments[0] = new UploadSegmentMetadata(0, metadata); metadata.Segments[0].Path = metadata.TargetStreamPath; return(metadata); }
/// <summary> /// Uploads stream to given folder and document name. /// </summary> /// <param name="uploadDocumentStreamRequest">Request parameters.</param> /// <returns>Response for document details.</returns> public async Task <UploadDocumentStreamResponse> UploadDocumentStreamAsync(UploadDocumentStreamRequest uploadDocumentStreamRequest) { InitiateDocumentVersionUploadRequest initiateDocumentVersionUploadRequest = new InitiateDocumentVersionUploadRequest() { ParentFolderId = uploadDocumentStreamRequest.ParentFolderId, Name = uploadDocumentStreamRequest.DocumentName, ContentType = uploadDocumentStreamRequest.ContentType, ContentCreatedTimestamp = uploadDocumentStreamRequest.ContentCreatedTimestamp, ContentModifiedTimestamp = uploadDocumentStreamRequest.ContentModifiedTimestamp, DocumentSizeInBytes = uploadDocumentStreamRequest.DocumentSizeInBytes, Id = uploadDocumentStreamRequest.DocumentId, AuthenticationToken = authenticationToken }; InitiateDocumentVersionUploadResponse response = await client.InitiateDocumentVersionUploadAsync(initiateDocumentVersionUploadRequest).ConfigureAwait(false); UploadMetadata uploadMetadata = response.UploadMetadata; String documentId = response.Metadata.Id; String versionId = response.Metadata.LatestVersionMetadata.Id; String uploadUrl = uploadMetadata.UploadUrl; try { httpClient.DefaultRequestHeaders.Add("x-amz-server-side-encryption", "AES256"); StreamContent content = new StreamContent(uploadDocumentStreamRequest.Stream); content.Headers.Remove("Content-Type"); content.Headers.Add("Content-Type", uploadDocumentStreamRequest.ContentType); Task <HttpResponseMessage> responseTask = httpClient.PutAsync(uploadUrl, content); Task <Task <String> > task = responseTask.ContinueWith(t => CompleteUpload(documentId, versionId)); UploadDocumentStreamResponse uploadDocumentStreamResponse = new UploadDocumentStreamResponse(uploadDocumentStreamRequest) { DocumentId = task.Result.Result, VersionId = versionId }; return(uploadDocumentStreamResponse); } finally { httpClient.DefaultRequestHeaders.Remove("x-amz-server-side-encryption"); } }
/// <summary> /// Populates the specified metadata. /// </summary> /// <param name="metadata">The metadata.</param> private void Populate(UploadMetadata metadata) { this.TotalFileLength = metadata.FileLength; this.TotalSegmentCount = metadata.SegmentCount; _segmentProgress = new SegmentUploadProgress[this.TotalSegmentCount]; foreach (var segmentInfo in metadata.Segments) { if (segmentInfo.Status == SegmentUploadStatus.Complete) { this.UploadedByteCount += segmentInfo.Length; _segmentProgress[segmentInfo.SegmentNumber] = new SegmentUploadProgress(segmentInfo.SegmentNumber, segmentInfo.Length, segmentInfo.Length, false); } else { _segmentProgress[segmentInfo.SegmentNumber] = new SegmentUploadProgress(segmentInfo.SegmentNumber, segmentInfo.Length, 0, false); } } }
/// <summary> /// Runs the code example. /// </summary> /// <param name="user">The AdWords user.</param> /// <param name="conversionName">The conversion type name that you'd like to attribute this /// upload to.</param> /// <param name="externalUploadId">The external upload ID can be any number that you use to /// keep track of your uploads.</param> /// <param name="emailAddresses">The email addresses for creating user identifiers.</param> /// <param name="advertiserUploadTime">The advertiser upload time. For times, use the format /// yyyyMMdd HHmmss tz. For more details on formats, see: /// https://developers.google.com/adwords/api/docs/appendix/codes-formats#date-and-time-formats /// For time zones, see: /// https://developers.google.com/adwords/api/docs/appendix/codes-formats#timezone-ids</param> /// <param name="bridgeMapVersionId">The version ID of the bridge map.</param> /// <param name="uploadType">The type of data upload.</param> /// <param name="partnerId">The partner ID</param> public void Run(AdWordsUser user, string conversionName, long externalUploadId, string[] emailAddresses, string advertiserUploadTime, string bridgeMapVersionId, OfflineDataUploadType uploadType, int partnerId) { using (OfflineDataUploadService offlineDataUploadService = (OfflineDataUploadService)user.GetService( AdWordsService.v201802.OfflineDataUploadService)) { offlineDataUploadService.RequestHeader.partialFailure = true; // Create the first offline data row for upload. // This transaction occurred 7 days ago with amount of 200 USD. DateTime transactionTime1 = DateTime.Now; transactionTime1.AddDays(-7); long transactionAmount1 = 200000000; string transactionCurrencyCode1 = "USD"; UserIdentifier[] userIdentifierList1 = new UserIdentifier[] { CreateUserIdentifier(OfflineDataUploadUserIdentifierType.HASHED_EMAIL, emailAddresses[0]), CreateUserIdentifier(OfflineDataUploadUserIdentifierType.STATE, "New York") }; OfflineData offlineData1 = CreateOfflineDataRow(transactionTime1, transactionAmount1, transactionCurrencyCode1, conversionName, userIdentifierList1); // Create the second offline data row for upload. // This transaction occurred 14 days ago with amount of 450 EUR. DateTime transactionTime2 = DateTime.Now; transactionTime2.AddDays(-14); long transactionAmount2 = 450000000; string transactionCurrencyCode2 = "EUR"; UserIdentifier[] userIdentifierList2 = new UserIdentifier[] { CreateUserIdentifier(OfflineDataUploadUserIdentifierType.HASHED_EMAIL, emailAddresses[1]), CreateUserIdentifier(OfflineDataUploadUserIdentifierType.STATE, "California") }; OfflineData offlineData2 = CreateOfflineDataRow(transactionTime2, transactionAmount2, transactionCurrencyCode2, conversionName, userIdentifierList2); // Create offline data upload object. OfflineDataUpload offlineDataUpload = new OfflineDataUpload(); offlineDataUpload.externalUploadId = externalUploadId; offlineDataUpload.offlineDataList = new OfflineData[] { offlineData1, offlineData2 }; // Set the type and metadata of this upload. offlineDataUpload.uploadType = uploadType; StoreSalesUploadCommonMetadata storeSalesMetaData = null; switch (uploadType) { case OfflineDataUploadType.STORE_SALES_UPLOAD_FIRST_PARTY: storeSalesMetaData = new FirstPartyUploadMetadata() { loyaltyRate = 1, transactionUploadRate = 1 }; break; case OfflineDataUploadType.STORE_SALES_UPLOAD_THIRD_PARTY: storeSalesMetaData = new ThirdPartyUploadMetadata() { loyaltyRate = 1.0, transactionUploadRate = 1.0, advertiserUploadTime = advertiserUploadTime, validTransactionRate = 1.0, partnerMatchRate = 1.0, partnerUploadRate = 1.0, bridgeMapVersionId = bridgeMapVersionId, partnerId = partnerId }; break; } UploadMetadata uploadMetadata = new UploadMetadata(); uploadMetadata.Item = storeSalesMetaData; offlineDataUpload.uploadMetadata = uploadMetadata; // Create an offline data upload operation. OfflineDataUploadOperation offlineDataUploadOperation = new OfflineDataUploadOperation(); offlineDataUploadOperation.@operator = Operator.ADD; offlineDataUploadOperation.operand = offlineDataUpload; // Keep the operations in an array, so it may be reused later for error processing. List <OfflineDataUploadOperation> operations = new List <OfflineDataUploadOperation>(); operations.Add(offlineDataUploadOperation); try { // Upload offline data to the server. OfflineDataUploadReturnValue result = offlineDataUploadService.mutate( operations.ToArray()); offlineDataUpload = result.value[0]; // Print the upload ID and status. Console.WriteLine("Uploaded offline data with external upload ID {0}, " + "and upload status {1}.", offlineDataUpload.externalUploadId, offlineDataUpload.uploadStatus); // Print any partial failure errors from the response. if (result.partialFailureErrors != null) { foreach (ApiError apiError in result.partialFailureErrors) { // Get the index of the failed operation from the error's field path elements. int operationIndex = apiError.GetOperationIndex(); if (operationIndex != -1) { OfflineDataUpload failedOfflineDataUpload = operations[operationIndex].operand; // Get the index of the entry in the offline data list from the error's field path // elements. int offlineDataListIndex = apiError.GetFieldPathIndex("offlineDataList"); Console.WriteLine("Offline data list entry {0} in operation {1} with external " + "upload ID {2} and type '{3}' has triggered a failure for the following " + "reason: '{4}'.", offlineDataListIndex, operationIndex, failedOfflineDataUpload.externalUploadId, failedOfflineDataUpload.uploadType, apiError.errorString); } else { Console.WriteLine("A failure has occurred for the following reason: {0}", apiError.errorString); } } } } catch (Exception e) { throw new System.ApplicationException("Failed upload offline data conversions.", e); } } }
/// <summary> /// Initializes a new instance of the <see cref="UploadProgress"/> class. /// </summary> /// <param name="metadata">The metadata.</param> internal UploadProgress(UploadMetadata metadata) { Populate(metadata); }
/// <summary> /// Runs the code example. /// </summary> /// <param name="user">The AdWords user.</param> /// <param name="conversionName">The conversion type name that you'd like to attribute this /// upload to.</param> /// <param name="externalUploadId">The external upload ID can be any number that you use to /// keep track of your uploads.</param> /// <param name="emailAddresses">The email addresses for creating user identifiers.</param> /// <param name="advertiserUploadTime">The advertiser upload time. For times, use the format /// yyyyMMdd HHmmss tz. For more details on formats, see: /// https://developers.google.com/adwords/api/docs/appendix/codes-formats#date-and-time-formats /// For time zones, see: /// https://developers.google.com/adwords/api/docs/appendix/codes-formats#timezone-ids</param> /// <param name="bridgeMapVersionId">The version ID of the bridge map.</param> /// <param name="uploadType">The type of data upload.</param> /// <param name="partnerId">The partner ID</param> public void Run(AdWordsUser user, string conversionName, long externalUploadId, string[] emailAddresses, string advertiserUploadTime, string bridgeMapVersionId, OfflineDataUploadType uploadType, int partnerId) { using (OfflineDataUploadService offlineDataUploadService = (OfflineDataUploadService)user.GetService( AdWordsService.v201710.OfflineDataUploadService)) { // Create the first offline data row for upload. // This transaction occurred 7 days ago with amount of 200 USD. DateTime transactionTime1 = new DateTime(); transactionTime1.AddDays(-7); long transactionAmount1 = 200000000; string transactionCurrencyCode1 = "USD"; UserIdentifier[] userIdentifierList1 = new UserIdentifier[] { CreateUserIdentifier(OfflineDataUploadUserIdentifierType.HASHED_EMAIL, emailAddresses[0]), CreateUserIdentifier(OfflineDataUploadUserIdentifierType.STATE, "New York") }; OfflineData offlineData1 = CreateOfflineDataRow(transactionTime1, transactionAmount1, transactionCurrencyCode1, conversionName, userIdentifierList1); // Create the second offline data row for upload. // This transaction occurred 14 days ago with amount of 450 EUR. DateTime transactionTime2 = new DateTime(); transactionTime2.AddDays(-14); long transactionAmount2 = 450000000; string transactionCurrencyCode2 = "EUR"; UserIdentifier[] userIdentifierList2 = new UserIdentifier[] { CreateUserIdentifier(OfflineDataUploadUserIdentifierType.HASHED_EMAIL, emailAddresses[1]), CreateUserIdentifier(OfflineDataUploadUserIdentifierType.STATE, "California") }; OfflineData offlineData2 = CreateOfflineDataRow(transactionTime2, transactionAmount2, transactionCurrencyCode2, conversionName, userIdentifierList2); // Create offline data upload object. OfflineDataUpload offlineDataUpload = new OfflineDataUpload(); offlineDataUpload.externalUploadId = externalUploadId; offlineDataUpload.offlineDataList = new OfflineData[] { offlineData1, offlineData2 }; // Set the type and metadata of this upload. offlineDataUpload.uploadType = uploadType; StoreSalesUploadCommonMetadata storeSalesMetaData = null; switch (uploadType) { case OfflineDataUploadType.STORE_SALES_UPLOAD_FIRST_PARTY: storeSalesMetaData = new FirstPartyUploadMetadata() { loyaltyRate = 1, transactionUploadRate = 1 }; break; case OfflineDataUploadType.STORE_SALES_UPLOAD_THIRD_PARTY: storeSalesMetaData = new ThirdPartyUploadMetadata() { loyaltyRate = 1.0, transactionUploadRate = 1.0, advertiserUploadTime = advertiserUploadTime, validTransactionRate = 1.0, partnerMatchRate = 1.0, partnerUploadRate = 1.0, bridgeMapVersionId = bridgeMapVersionId, partnerId = partnerId }; break; } UploadMetadata uploadMetadata = new UploadMetadata(); uploadMetadata.Item = storeSalesMetaData; offlineDataUpload.uploadMetadata = uploadMetadata; // Create an offline data upload operation. OfflineDataUploadOperation offlineDataUploadOperation = new OfflineDataUploadOperation(); offlineDataUploadOperation.@operator = Operator.ADD; offlineDataUploadOperation.operand = offlineDataUpload; try { // Upload offline data to the server. OfflineDataUploadReturnValue result = offlineDataUploadService.mutate( new OfflineDataUploadOperation[] { offlineDataUploadOperation }); offlineDataUpload = result.value[0]; // Print the upload ID and status. Console.WriteLine("Uploaded offline data with external upload ID {0}, " + "and upload status {1}.", offlineDataUpload.externalUploadId, offlineDataUpload.uploadStatus); // Print any partial data errors from the response. The order of the partial // data errors list is the same as the uploaded offline data list in the // request. if (offlineDataUpload.partialDataErrors != null) { for (int i = 0; i < offlineDataUpload.partialDataErrors.Length; i++) { ApiError partialDataError = offlineDataUpload.partialDataErrors[i]; Console.WriteLine("Found a partial error for offline data {0} with error " + "string: {1}.", i + 1, partialDataError.errorString); } } } catch (Exception e) { throw new System.ApplicationException("Failed upload offline data conversions.", e); } } }
private byte[] GetExpectedContents(UploadSegmentMetadata segment, UploadMetadata metadata) { byte[] result = new byte[segment.Length]; Array.Copy(_smallFileContents, segment.SegmentNumber * metadata.SegmentLength, result, 0, segment.Length); return(result); }
private void VerifyTargetStreamIsComplete(UploadSegmentMetadata segmentMetadata, UploadMetadata metadata, InMemoryFrontEnd frontEnd) { Assert.Equal(SegmentUploadStatus.Complete, segmentMetadata.Status); Assert.True(frontEnd.StreamExists(segmentMetadata.Path), string.Format("Segment {0} was not uploaded", segmentMetadata.SegmentNumber)); Assert.Equal(segmentMetadata.Length, frontEnd.GetStreamLength(segmentMetadata.Path)); var actualContents = frontEnd.GetStreamContents(segmentMetadata.Path); var expectedContents = GetExpectedContents(segmentMetadata, metadata); AssertExtensions.AreEqual(expectedContents, actualContents, "Segment {0} has unexpected contents", segmentMetadata.SegmentNumber); }