public void TestStreamedUploadWithChunking() { uploadChunkRecords.Clear(); // Initialize the uploader for chunked upload. this.Init(TEST_USER, true, CHUNK_SIZE_ALIGN * 12); // Generate operations for upload. Operation[] operations = GetKeywordOperations(123); // Start the upload. BatchUploadProgress progress = this.BeginStreamUpload("http://www.example.com"); // Split the upload into NUM_BATCHES_FOR_STREAMED_UPLOAD batches. int[] batchSizes = new int[NUM_BATCHES_FOR_STREAMED_UPLOAD]; const int NUM_OPERATIONS_TO_UPLOAD_PER_BATCH = NUM_KEYWORD_OPERATIONS / NUM_BATCHES_FOR_STREAMED_UPLOAD; long uploadRequestCount = 0; for (int i = 0; i < NUM_BATCHES_FOR_STREAMED_UPLOAD; i++) { Operation[] operationsToStream = new Operation[NUM_OPERATIONS_TO_UPLOAD_PER_BATCH]; Array.Copy(operations, i * NUM_OPERATIONS_TO_UPLOAD_PER_BATCH, operationsToStream, 0, NUM_OPERATIONS_TO_UPLOAD_PER_BATCH); long oldProgress = progress.BytesUploaded; progress = this.StreamUpload(progress, operationsToStream); long additionalDataCount = progress.BytesUploaded - oldProgress; uploadRequestCount += (additionalDataCount) / CHUNK_SIZE; if ((additionalDataCount % CHUNK_SIZE) != 0) { uploadRequestCount += 1; } } this.EndStreamUpload(progress); uploadRequestCount += 1; // There should be uploadRequestCount entries in uploadChunkRecords. Assert.That(uploadChunkRecords.Count == uploadRequestCount); for (int i = 0; i < uploadRequestCount - 1; i++) { long start = uploadChunkRecords[i].StartOffset + uploadChunkRecords[i].Start; long end = uploadChunkRecords[i].StartOffset + uploadChunkRecords[i].End; long uploaded = end - start; // uploaded size is always a multiple of 256K if (end - start == CHUNK_SIZE - 1) { Assert.Pass(string.Format("Chunk {0} is aligned with CHUNK_SIZE.", i)); } else { Assert.That((uploaded + 1) % (256 * 1024) == 0, string.Format("Chunk {0} is not aligned with 256K.", i)); } } }
/// <summary> /// Uploads the operations to a specified URL in a streamed manner. /// </summary> /// <param name="uploadProgress">The upload progress tracker.</param> /// <param name="operations">The list of operations.</param> /// <returns>The updated progress tracker.</returns> public BatchUploadProgress StreamUpload(BatchUploadProgress uploadProgress, IEnumerable <Operation> operations) { String postBody = GetPostBody(operations); return(StreamUpload(uploadProgress, postBody)); }
public void TestStreamedUploadNoChunking() { uploadChunkRecords.Clear(); // Initialize the uploader for chunked upload. this.Init(TEST_USER, false, 0); // Generate operations for upload. Operation[] operations = GetKeywordOperations(123); // Start the upload. BatchUploadProgress progress = this.BeginStreamUpload("http://www.example.com"); // Split the upload into NUM_BATCHES_FOR_STREAMED_UPLOAD batches. int[] batchSizes = new int[NUM_BATCHES_FOR_STREAMED_UPLOAD]; for (int i = 0; i < NUM_BATCHES_FOR_STREAMED_UPLOAD; i++) { Operation[] operationsToStream = new Operation[NUM_KEYWORD_OPERATIONS / NUM_BATCHES_FOR_STREAMED_UPLOAD]; int dataLength = Encoding.UTF8.GetBytes(GetPostBody(operationsToStream)).Length; int paddedLength = CHUNK_SIZE_ALIGN - (dataLength % CHUNK_SIZE_ALIGN); batchSizes[i] = dataLength + paddedLength; Array.Copy(operations, i * NUM_BATCHES_FOR_STREAMED_UPLOAD, operationsToStream, 0, NUM_BATCHES_FOR_STREAMED_UPLOAD); progress = this.StreamUpload(progress, operationsToStream); } this.EndStreamUpload(progress); // There should be NUM_BATCHES_FOR_STREAMED_UPLOAD + 1 batches. Assert.That(uploadChunkRecords.Count == NUM_BATCHES_FOR_STREAMED_UPLOAD + 1); // StartOffset tests. Assert.AreEqual(0, uploadChunkRecords[0].StartOffset); for (int i = 1; i < NUM_BATCHES_FOR_STREAMED_UPLOAD + 1; i++) { Assert.AreEqual(uploadChunkRecords[i].StartOffset, uploadChunkRecords[i - 1].StartOffset + (uploadChunkRecords[i - 1].End - uploadChunkRecords[i - 1].Start) + 1); } // Start, End, totalUploadSize tests. for (int i = 0; i < 10; i++) { Assert.AreEqual(0, uploadChunkRecords[i].Start); Assert.AreEqual(batchSizes[i] - 1, uploadChunkRecords[i].End); Assert.IsNull(uploadChunkRecords[i].TotalUploadSize); } // Start, End, totalUploadSize tests. for (int i = 0; i < 10; i++) { Assert.AreEqual(0, uploadChunkRecords[i].Start); Assert.AreEqual(batchSizes[i] - 1, uploadChunkRecords[i].End); Assert.IsNull(uploadChunkRecords[i].TotalUploadSize); } // Last record. Assert.AreEqual(0, uploadChunkRecords[10].Start); Assert.AreEqual(POSTAMBLE.Length, uploadChunkRecords[10].End + 1); Assert.AreEqual(uploadChunkRecords[10].StartOffset + POSTAMBLE.Length, uploadChunkRecords[10].TotalUploadSize); }
/// <summary> /// Runs the code example. /// </summary> /// <param name="user">The AdWords user.</param> public void Run(AdWordsUser user) { using (BatchJobService batchJobService = (BatchJobService)user.GetService(AdWordsService.v201806.BatchJobService)) { try { // Create a BatchJob. BatchJobOperation addOp = new BatchJobOperation() { @operator = Operator.ADD, operand = new BatchJob() }; BatchJob batchJob = batchJobService.mutate(new BatchJobOperation[] { addOp }).value[0]; // Get the upload URL from the new job. string uploadUrl = batchJob.uploadUrl.url; Console.WriteLine( "Created BatchJob with ID {0}, status '{1}' and upload URL {2}.", batchJob.id, batchJob.status, batchJob.uploadUrl.url); BatchJobUtilities batchJobUploadHelper = new BatchJobUtilities(user); // Create a resumable Upload URL to upload the operations. string resumableUploadUrl = batchJobUploadHelper.GetResumableUploadUrl(uploadUrl); BatchUploadProgress uploadProgress = batchJobUploadHelper.BeginStreamUpload(resumableUploadUrl); // Create and add an operation to create a new budget. BudgetOperation budgetOperation = BuildBudgetOperation(); uploadProgress = batchJobUploadHelper.StreamUpload(uploadProgress, new List <Operation>() { budgetOperation }); // Create and add operations to create new campaigns. List <Operation> campaignOperations = new List <Operation>(); campaignOperations.AddRange( BuildCampaignOperations(budgetOperation.operand.budgetId)); uploadProgress = batchJobUploadHelper.StreamUpload(uploadProgress, campaignOperations); // Create and add operations to create new ad groups. List <Operation> adGroupOperations = new List <Operation>(); foreach (CampaignOperation campaignOperation in campaignOperations) { adGroupOperations.AddRange( BuildAdGroupOperations(campaignOperation.operand.id)); } uploadProgress = batchJobUploadHelper.StreamUpload(uploadProgress, adGroupOperations); // Create and add operations to create new ad group ads (expanded text ads). List <Operation> adOperations = new List <Operation>(); foreach (AdGroupOperation adGroupOperation in adGroupOperations) { adOperations.AddRange( BuildAdGroupAdOperations(adGroupOperation.operand.id)); } uploadProgress = batchJobUploadHelper.StreamUpload(uploadProgress, adOperations); // Create and add operations to create new ad group criteria (keywords). List <Operation> keywordOperations = new List <Operation>(); foreach (AdGroupOperation adGroupOperation in adGroupOperations) { keywordOperations.AddRange( BuildAdGroupCriterionOperations(adGroupOperation.operand.id)); } uploadProgress = batchJobUploadHelper.StreamUpload(uploadProgress, keywordOperations); // Mark the upload as complete. batchJobUploadHelper.EndStreamUpload(uploadProgress); bool isCompleted = batchJobUploadHelper.WaitForPendingJob(batchJob.id, TIME_TO_WAIT_FOR_COMPLETION, delegate(BatchJob waitBatchJob, long timeElapsed) { Console.WriteLine("[{0} seconds]: Batch job ID {1} has status '{2}'.", timeElapsed / 1000, waitBatchJob.id, waitBatchJob.status); batchJob = waitBatchJob; return(false); }); if (!isCompleted) { throw new TimeoutException( "Job is still in pending state after waiting for " + TIME_TO_WAIT_FOR_COMPLETION + " seconds."); } if (batchJob.processingErrors != null) { foreach (BatchJobProcessingError processingError in batchJob .processingErrors) { Console.WriteLine(" Processing error: {0}, {1}, {2}, {3}, {4}", processingError.ApiErrorType, processingError.trigger, processingError.errorString, processingError.fieldPath, processingError.reason); } } if (batchJob.downloadUrl != null && batchJob.downloadUrl.url != null) { BatchJobMutateResponse mutateResponse = batchJobUploadHelper.Download(batchJob.downloadUrl.url); Console.WriteLine("Downloaded results from {0}.", batchJob.downloadUrl.url); foreach (MutateResult mutateResult in mutateResponse.rval) { string outcome = mutateResult.errorList == null ? "SUCCESS" : "FAILURE"; Console.WriteLine(" Operation [{0}] - {1}", mutateResult.index, outcome); } } } catch (Exception e) { throw new System.ApplicationException( "Failed to add campaigns using batch job.", e); } } }
/// <summary> /// Uploads the operations to a specified URL in a streamed manner. /// </summary> /// <param name="uploadProgress">The upload progress tracker.</param> /// <param name="operations">The list of operations.</param> /// <returns>The updated progress tracker.</returns> /// <remarks>Use this method as a workaround for /// https://bugzilla.xamarin.com/show_bug.cgi?id=40793 if you are using VB.NET on Mono. /// Otherwise use <see cref="StreamUpload(BatchUploadProgress, IEnumerable{Operation})"/> /// method.</remarks> public BatchUploadProgress StreamUpload(BatchUploadProgress uploadProgress, Operation[] operations) { return(StreamUpload(uploadProgress, (IEnumerable <Operation>)operations)); }