protected override Core.Services.ServiceOutcome DoPipelineWork() { // Create a batch and use its progress as the service's progress BatchDownloadOperation batch = new BatchDownloadOperation(); batch.Progressed += new EventHandler((sender, e) => { this.ReportProgress(batch.Progress * 0.95); }); foreach (DeliveryFile file in this.Delivery.Files) { WebRequest request = WebRequest.Create(file.SourceUrl); this.Delivery.Save(); DeliveryFileDownloadOperation download = file.Download(request); //download.Ended += new EventHandler(download_Ended); batch.Add(download); } batch.Start(); batch.Wait(); this.Delivery.Save(); return(Core.Services.ServiceOutcome.Success); }
protected override Core.Services.ServiceOutcome DoPipelineWork() { // Create a batch and use its progress as the service's progress _batch.Progressed += new EventHandler((sender, e) => { this.ReportProgress(_batch.Progress * 0.99); }); foreach (DeliveryFile file in this.Delivery.Files) { if (String.IsNullOrWhiteSpace(file.SourceUrl)) { continue; } Log.Write(String.Format("Delivery file {0} starting download ({1}).", file.Name, file.FileID), LogMessageType.Information); DownloadFile(file); } _batch.Start(); _batch.Wait(); // Add a retrieved history entry for the entire delivery this.Delivery.Save(); return(Core.Services.ServiceOutcome.Success); }
protected override Core.Services.ServiceOutcome DoPipelineWork() { // Create a batch and use its progress as the service's progress BatchDownloadOperation batch = new BatchDownloadOperation(); batch.Progressed += new EventHandler((sender, e) => { this.Progress = batch.Progress * 0.95; }); foreach (DeliveryFile file in this.Delivery.Files) { if (String.IsNullOrWhiteSpace(file.SourceUrl)) { continue; } DeliveryFileDownloadOperation download = file.Download(); download.Ended += new EventHandler(download_Ended); batch.Add(download); } batch.Start(); batch.Wait(); this.Delivery.Save(); return(Core.Services.ServiceOutcome.Success); }
protected override ServiceOutcome DoPipelineWork() { var mutex = new Mutex(false, "SalesForceRetriever"); var batch = new BatchDownloadOperation(); try { mutex.WaitOne(); var token = GetAuthenticationToken(); // exist foreach (var file in Delivery.Files) { var query = String.Format(file.Parameters["Query"].ToString(), Delivery.TimePeriodStart.Year, Delivery.TimePeriodStart.Month, Delivery.TimePeriodStart.Day); file.SourceUrl = String.Format("{0}/services/data/v20.0/query?q={1}", token.InstanceUrl, query); var request = (HttpWebRequest)WebRequest.Create(file.SourceUrl); request.Headers.Add("Authorization: OAuth " + token.AccessToken); var fileDownloadOperation = file.Download(request); batch.Add(fileDownloadOperation); } batch.Start(); batch.Wait(); batch.EnsureSuccess(); } finally { mutex.ReleaseMutex(); } Delivery.Save(); return(ServiceOutcome.Success); }
protected override Core.Services.ServiceOutcome DoPipelineWork() { _batchDownloadOperation = new BatchDownloadOperation(); _batchDownloadOperation.Progressed += new EventHandler(_batchDownloadOperation_Progressed); _batchDownloadOperation.Ended += new EventHandler(_batchDownloadOperation_Ended); _adCenterApi = new AdCenterApi(this); _filesInProgress = this.Delivery.Files.Count; CreateRequests(); if (!Download()) { CreateRequests(); Download(); } _batchDownloadOperation.Start(); _batchDownloadOperation.Wait(); _batchDownloadOperation.EnsureSuccess(); //_waitHandle.WaitOne(); //Download(adReportFile, reportRequest); this.Delivery.Save(); return(Core.Services.ServiceOutcome.Success); }
protected override Core.Services.ServiceOutcome DoPipelineWork() { Mutex mutex = new Mutex(false, "GoogleAnalyticsRetriver"); BatchDownloadOperation batch = new BatchDownloadOperation(); try { mutex.WaitOne(); #region Authentication //get access token + refresh token from db (if exist) Auth2 oAuth2 = Auth2.Get(Delivery.Parameters["ClientID"].ToString()); //if not exist if (string.IsNullOrEmpty(oAuth2.access_token) || (string.IsNullOrEmpty(oAuth2.refresh_token))) { oAuth2 = GetAccessTokenParamsFromGoogleAnalytics(); } //check if access_token is not expired if (oAuth2.updateTime.AddSeconds(oAuth2.expires_in - 300) < DateTime.Now) { oAuth2 = RefreshToken(oAuth2.refresh_token); } #endregion // exist foreach (var file in Delivery.Files) { string urlEncoded = string.Format(file.SourceUrl, Uri.EscapeUriString(oAuth2.access_token)); HttpWebRequest request = (HttpWebRequest)HttpWebRequest.Create(urlEncoded); request.Headers.Add("Accept-Encoding", "gzip"); request.UserAgent = "winrar(gzip)"; FileDownloadOperation fileDownloadOperation = file.Download(request); batch.Add(fileDownloadOperation); } batch.Start(); batch.Wait(); batch.EnsureSuccess(); } finally { mutex.ReleaseMutex(); } Delivery.Save(); return(Core.Services.ServiceOutcome.Success); }
protected override Core.Services.ServiceOutcome DoPipelineWork() { // Create a batch and use its progress as the service's progress BatchDownloadOperation batch = new BatchDownloadOperation(); batch.Progressed += new EventHandler((sender, e) => { this.ReportProgress(batch.Progress * 0.95); }); foreach (DeliveryFile file in this.Delivery.Files) { if (String.IsNullOrWhiteSpace(file.SourceUrl)) { continue; } WebRequest request = WebRequest.Create(file.SourceUrl); request.ContentType = file.Parameters["Content-Type"].ToString(); request.Method = "POST"; byte[] bytes = Encoding.UTF8.GetBytes(file.Parameters["Body"].ToString()); request.ContentLength = bytes.Length; using (var stream = request.GetRequestStream()) { stream.Write(bytes, 0, bytes.Length); } //Headers request.Headers.Add("SOAPAction", file.Parameters["SOAPAction"].ToString()); this.Delivery.Save(); DeliveryFileDownloadOperation download = file.Download(request); download.Ended += new EventHandler(download_Ended); batch.Add(download); } batch.Start(); batch.Wait(); // Add a retrieved history entry for the entire delivery this.Delivery.Save(); return(ServiceOutcome.Success); }
private List <DeliveryFile> FetchNext(List <DeliveryFile> fetchFrom, int offset) { BatchDownloadOperation nextBatch = new BatchDownloadOperation(); List <DeliveryFile> nextRecordsFiles = new List <DeliveryFile>(); foreach (DeliveryFile ReportFile in fetchFrom) { //setting cuurent file has batched and batching next file ReportFile.Parameters.Add("Batch", true); string fileName = ReportFile.Name + "-" + offset; JsonDynamicReader reportReader = new JsonDynamicReader(ReportFile.OpenContents(compression: FileCompression.None), "$.nextRecordsUrl"); string nextRecordPath; if (reportReader.Read()) { nextRecordPath = reportReader.Current.nextRecordsUrl; DeliveryFile nextRecordFile = new DeliveryFile(); nextRecordFile.SourceUrl = ((Token)(ReportFile.Parameters["Token"])).instance_url + nextRecordPath; HttpWebRequest request = (HttpWebRequest)HttpWebRequest.Create(nextRecordFile.SourceUrl); request.Headers.Add("Authorization: OAuth " + ((Token)(ReportFile.Parameters["Token"])).access_token); //check if response contains more than one file FileDownloadOperation fileDownloadOperation = nextRecordFile.Download(request); nextBatch.Add(fileDownloadOperation); nextRecordsFiles.Add(nextRecordFile); } } if (nextRecordsFiles.Count > 0) { nextBatch.Start(); nextBatch.Wait(); nextBatch.EnsureSuccess(); foreach (DeliveryFile file in FetchNext(nextRecordsFiles, offset)) { this.Delivery.Files.Add(file); } } return(nextRecordsFiles); }
protected override ServiceOutcome DoPipelineWork() { // Create a batch and use its progress as the service's progress var batch = new BatchDownloadOperation(); batch.Progressed += (sender, e) => { Progress = batch.Progress * 0.95; }; foreach (var file in Delivery.Files) { if (String.IsNullOrWhiteSpace(file.SourceUrl)) { continue; } var request = WebRequest.Create(file.SourceUrl); request.ContentType = file.Parameters["Content-Type"].ToString(); request.Method = "POST"; byte[] bytes = Encoding.UTF8.GetBytes(file.Parameters["Body"].ToString()); request.ContentLength = bytes.Length; using (var stream = request.GetRequestStream()) { stream.Write(bytes, 0, bytes.Length); } //Headers request.Headers.Add("SOAPAction", file.Parameters["SOAPAction"].ToString()); // TODO: shirat - to remove? why saving delivery between files? Delivery.Save(); var download = file.Download(request); download.Ended += download_Ended; batch.Add(download); } batch.Start(); batch.Wait(); Delivery.Save(); return(ServiceOutcome.Success); }
protected override Core.Services.ServiceOutcome DoPipelineWork() { // Create a batch and use its progress as the service's progress BatchDownloadOperation batch = new BatchDownloadOperation(); batch.Progressed += new EventHandler((sender, e) => { this.ReportProgress(batch.Progress * 0.95); }); foreach (DeliveryFile file in this.Delivery.Files) { WebRequest request = WebRequest.Create(file.SourceUrl); //request.ContentType = file.Parameters["Content-Type"].ToString(); //request.Method = "POST"; //byte[] bytes = Encoding.UTF8.GetBytes(string.Empty); //request.ContentLength = bytes.Length; //using (var stream = request.GetRequestStream()) //{ // stream.Write(bytes, 0, bytes.Length); //} ////Headers //request.Headers.Add("SOAPAction", file.Parameters["SOAPAction"].ToString()); this.Delivery.Save(); DeliveryFileDownloadOperation download = file.Download(request); //download.Ended += new EventHandler(download_Ended); batch.Add(download); } batch.Start(); batch.Wait(); this.Delivery.Save(); return(Core.Services.ServiceOutcome.Success); }
protected override Core.Services.ServiceOutcome DoPipelineWork() { if (Delivery.Files != null && Delivery.Files.Count > 0) { _batchDownloadOperation = new BatchDownloadOperation(); _batchDownloadOperation.Progressed += new EventHandler(_batchDownloadOperation_Progressed); _filesInProgress = this.Delivery.Files.Count; foreach (DeliveryFile file in this.Delivery.Files) { DownloadFile(file); } _batchDownloadOperation.Start(); _batchDownloadOperation.Wait(); _batchDownloadOperation.EnsureSuccess(); this.Delivery.Save(); } return(Core.Services.ServiceOutcome.Success); }
protected override ServiceOutcome DoPipelineWork() { _baseAddress = new Uri(this.Instance.Configuration.Options[FacebookConfigurationOptions.BaseServiceAddress]); //Get Access token _accessToken = this.Instance.Configuration.Options[FacebookConfigurationOptions.Auth_AccessToken]; _appSecretProof = GetAppSecretProof(); BatchDownloadOperation countBatch = new BatchDownloadOperation(); var toremove = from f in Delivery.Files where !f.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Length) select f.Name; foreach (string item in toremove.ToList()) { Delivery.Files.Remove(item); } foreach (DeliveryFile file in Delivery.Files) { if (file.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Length)) { FileDownloadOperation fileDownloadOperation = file.Download(CreateRequest(file.Parameters[Consts.DeliveryFileParameters.Url].ToString() + "limit=1")); countBatch.Add(fileDownloadOperation); } } countBatch.Progressed += new EventHandler(counted_Batch_Progressed); countBatch.Start(); countBatch.Wait(); countBatch.EnsureSuccess(); List <DeliveryFile> files = new List <DeliveryFile>(); Dictionary <Consts.FileTypes, List <string> > filesByType = new Dictionary <Consts.FileTypes, List <string> >(); Delivery.Parameters.Add("FilesByType", filesByType); foreach (DeliveryFile file in Delivery.Files.Where(f => f.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Length))) { using (StreamReader reader = new StreamReader(file.OpenContents())) { int offset = 0; int limit = 500; string json = reader.ReadToEnd(); MyType t = JsonConvert.DeserializeObject <MyType>(json); while (offset < t.count) { DeliveryFile f = new DeliveryFile(); f.Name = string.Format(file.Name, offset); f.Parameters.Add(Consts.DeliveryFileParameters.Url, string.Format("{0}&limit={1}&offset={2}", file.Parameters[Consts.DeliveryFileParameters.Url], limit, offset)); f.Parameters.Add(Consts.DeliveryFileParameters.FileSubType, (long)Consts.FileSubType.Data); f.Parameters.Add(Consts.DeliveryFileParameters.FileType, Enum.Parse(typeof(Consts.FileTypes), file.Parameters[Consts.DeliveryFileParameters.FileType].ToString())); files.Add(f); if (!filesByType.ContainsKey((Consts.FileTypes)f.Parameters[Consts.DeliveryFileParameters.FileType])) { filesByType.Add((Consts.FileTypes)f.Parameters[Consts.DeliveryFileParameters.FileType], new List <string>()); } filesByType[(Consts.FileTypes)f.Parameters[Consts.DeliveryFileParameters.FileType]].Add(f.Name); offset += limit; } offset = 0; } } foreach (var file in files) { this.Delivery.Files.Add(file); } this.Delivery.Save(); BatchDownloadOperation batch = new BatchDownloadOperation(); foreach (DeliveryFile file in files.Where(fi => fi.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Data))) { FileDownloadOperation fileDownloadOperation = file.Download(CreateRequest(file.Parameters[Consts.DeliveryFileParameters.Url].ToString())); batch.Add(fileDownloadOperation); } //batch.Progressed += new EventHandler(batch_Progressed); batch.Start(); batch.Wait(); batch.EnsureSuccess(); this.Delivery.Save(); return(ServiceOutcome.Success); }
private void RunBatch(BatchDownloadOperation batch) { batch.Start(); batch.Wait(); batch.EnsureSuccess(); }
protected override Core.Services.ServiceOutcome DoPipelineWork() { _batchDownloadOperation = new BatchDownloadOperation() { MaxConcurrent = 1 }; _batchDownloadOperation.Progressed += new EventHandler(_batchDownloadOperation_Progressed); _filesInProgress = this.Delivery.Files.Count; bool includeZeroImpression = Boolean.Parse(this.Delivery.Parameters["includeZeroImpression"].ToString()); //Sets Date Range and time period _dateRange = GA201406.ReportDefinitionDateRangeType.CUSTOM_DATE; string startDate = this.TimePeriod.Start.ToDateTime().ToString("yyyyMMdd"); string endDate = this.TimePeriod.End.ToDateTime().ToString("yyyyMMdd"); _waitHandle = new AutoResetEvent(false); foreach (string clientId in (string[])this.Delivery.Parameters["AdwordsClientIDs"]) { //Get all files on specific client var files = from f in this.Delivery.Files where f.Parameters["AdwordsClientID"].ToString() == clientId select f; #region Adwords User //================================================================================== Dictionary <string, string> headers = new Dictionary <string, string>() { { "DeveloperToken", this.Delivery.Parameters["DeveloperToken"].ToString() }, { "UserAgent", FileManager.UserAgentString }, { "EnableGzipCompression", "true" }, { "ClientCustomerId", clientId }, { "Email", this.Delivery.Parameters["MccEmail"].ToString() } }; AdWordsUser user = new AdWordsUser(headers); (user.Config as AdWordsAppConfig).AuthorizationMethod = AdWordsAuthorizationMethod.OAuth2; AdwordsUtill.GetOAuthDetailsFromDB(this.Delivery.Parameters["OAuth2ClientId"].ToString(), user); //================================================================================== #endregion Adwords User bool firstCheck = true; List <string> awqls = new List <string>(); foreach (DeliveryFile file in files) { GA201406.ReportDefinitionReportType reportType; //Validate Google report type if (Enum.IsDefined(typeof(GA201406.ReportDefinitionReportType), file.Parameters["ReportType"].ToString())) { reportType = (GA201406.ReportDefinitionReportType)Enum.Parse(typeof(GA201406.ReportDefinitionReportType), file.Parameters["ReportType"].ToString(), true); } else { throw new Exception("Google Adwords Report Type Error ! Could not find Enum value for report type"); } if (Enum.IsDefined(typeof(GA201406.ReportDefinitionReportType), file.Parameters["ReportType"].ToString())) { reportType = (GA201406.ReportDefinitionReportType)Enum.Parse(typeof(GA201406.ReportDefinitionReportType), file.Parameters["ReportType"].ToString(), true); } else { throw new Exception("Google Adwords Report Type Error ! Could not find Enum value for report type"); } //Creating AWQL StringBuilder sb = new StringBuilder(); sb.Append("SELECT "); foreach (string item in GoogleStaticReportFields.REPORTS_FIELDS[reportType][file.Parameters["ReportFieldsType"].ToString()]) { sb.Append(item); sb.Append(","); } sb.Remove(sb.Length - 1, 1); // removing last "," sb.Append(" FROM " + reportType.ToString()); if (!includeZeroImpression && GoogleStaticReportFields.REPORTS_FIELDS[reportType][file.Parameters["ReportFieldsType"].ToString()].Contains("")) { sb.Append(" WHERE Impressions > 0"); } if (file.Name.Equals(GoogleStaticReportsNamesUtill._reportNames[GA201406.ReportDefinitionReportType.PLACEHOLDER_FEED_ITEM_REPORT])) //Site link file { if (sb.ToString().Contains("WHERE")) { sb.Append("AND ClickType IN [SITELINKS] AND PlaceholderType IN [1] "); } else { sb.Append(" WHERE ClickType IN [SITELINKS] AND PlaceholderType IN [1] "); } } sb.Append(string.Format(" DURING {0},{1}", startDate, endDate)); AdWordsAppConfig config = (AdWordsAppConfig)user.Config; string QUERY_REPORT_URL_FORMAT = "{0}/api/adwords/reportdownload/{1}?" + "__fmt={2}"; string reportVersion = "v201406"; string format = GA201406.DownloadFormat.GZIPPED_CSV.ToString(); file.SourceUrl = string.Format(QUERY_REPORT_URL_FORMAT, config.AdWordsApiServer, reportVersion, format); string query = sb.ToString(); string postData = string.Format("__rdquery={0}", HttpUtility.UrlEncode(query)); awqls.Add(query); //Validate Report if (firstCheck) { string error = string.Empty; if (!ValidateReport(file, user, postData, out error)) { //CHEKING FOR INVALID AUTHTOKEN if (error.Contains(GA201406.AuthenticationErrorReason.GOOGLE_ACCOUNT_COOKIE_INVALID.ToString())) { //RENEWING AUTHTOKEN throw new Exception("GOOGLE_ACCOUNT_COOKIE_INVALID, RENEWING AUTHTOKEN is not supported on version V201406"); //(user.Config as AdWordsAppConfig).AuthToken = AdwordsUtill.GetAuthToken(user, generateNew: true); } else { throw new Exception("Google Adwords API Error: " + error); } } firstCheck = !firstCheck; } //If Validate - Success DownloadFile(file, user, postData); } } _batchDownloadOperation.Start(); _batchDownloadOperation.Wait(); _batchDownloadOperation.EnsureSuccess(); //INCASE OF GENERAL EXCEPTION OPEN DELIVERY FILE HAS HTML AND VIEW INNER ERROR this.Delivery.Save(); return(Core.Services.ServiceOutcome.Success); }
protected override Core.Services.ServiceOutcome DoPipelineWork() { Mutex mutex = new Mutex(false, "SalesForceRetriver"); BatchDownloadOperation batch = new BatchDownloadOperation(); try { mutex.WaitOne(); #region Authentication //get access token + refresh token from db (if exist) Token tokenResponse = Token.Get(Delivery.Parameters["SalesForceClientID"].ToString(), this.Delivery.Account.ID); //if not exist if (string.IsNullOrEmpty(tokenResponse.access_token) || (string.IsNullOrEmpty(tokenResponse.refresh_token))) { tokenResponse = GetAccessTokenParamsFromSalesForce(); } //check if access_token is not expired if (tokenResponse.UpdateTime.Add((TimeSpan.Parse(AppSettings.Get(tokenResponse, "TimeOut")))) < DateTime.Now) { tokenResponse = RefreshToken(tokenResponse.refresh_token); } #endregion // exist foreach (var file in Delivery.Files) { string query = file.Parameters["Query"].ToString(); //Regex for Calendar units. MatchCollection calendarMatches = Regex.Matches(query, @"TimePeriod.EqualToCalendarUnits\(([A-Z\.a-z_]+)\)", RegexOptions.IgnoreCase); if (calendarMatches.Count > 0) { foreach (Match calendarMatch in calendarMatches) { string dataParamName = calendarMatch.Groups[1].Value; query = query.Replace(string.Format("TimePeriod.EqualToCalendarUnits({0})", dataParamName), string.Format(" CALENDAR_YEAR({0})={1} AND CALENDAR_MONTH({0})={2} AND DAY_IN_MONTH({0}) = {3} ", dataParamName, Delivery.TimePeriodStart.Year, Delivery.TimePeriodStart.Month, Delivery.TimePeriodStart.Day)); } } //Regex for TimePeriodStringFormat units. MatchCollection timeMatches = Regex.Matches(query, @"TimePeriod.EqualToString\(([A-Z\.a-z_]+)\)", RegexOptions.IgnoreCase); if (timeMatches.Count > 0) { foreach (Match calendarMatch in timeMatches) { string dataParamName = calendarMatch.Groups[1].Value; string sfTimePeriodStartFormat = string.Format("{0}T00:00:00.00Z", Delivery.TimePeriodStart.ToString("yyyy-MM-dd")); string sfTimePeriodEndFormat = string.Format("{0}T23:59:59.59Z", Delivery.TimePeriodStart.ToString("yyyy-MM-dd")); query = query.Replace(string.Format("TimePeriod.EqualToString({0})", dataParamName), string.Format("{0}>{1} AND {0}<{2} ", dataParamName, sfTimePeriodStartFormat, sfTimePeriodEndFormat)); } } file.Parameters.Add("Token", tokenResponse); file.SourceUrl = string.Format("{0}/services/data/v20.0/query?q={1}", tokenResponse.instance_url, query); HttpWebRequest request = (HttpWebRequest)HttpWebRequest.Create(file.SourceUrl); request.Headers.Add("Authorization: OAuth " + tokenResponse.access_token); //check if response contains more than one file FileDownloadOperation fileDownloadOperation = file.Download(request); batch.Add(fileDownloadOperation); } batch.Start(); batch.Wait(); batch.EnsureSuccess(); //supporting more than one file per query int offset = 1; // FetchNext(this.Delivery.Files, offset); } finally { mutex.ReleaseMutex(); } Delivery.Save(); return(Core.Services.ServiceOutcome.Success); }
protected override ServiceOutcome DoPipelineWork() { //Get Access token if (!Configuration.Parameters.ContainsKey(FacebookConfigurationOptions.AccessToken)) { throw new Exception(String.Format("Missing Configuration Param: {0}", FacebookConfigurationOptions.AccessToken)); } _accessToken = Configuration.Parameters.Get <string>(FacebookConfigurationOptions.AccessToken); #region Get file legth // download legth files to get no. of rows in each file (FB limit up to 500 records per file) // (regular file which contains rows counter at hte end of the file) var countBatch = new BatchDownloadOperation(); var toRemove = from f in Delivery.Files where !f.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Length) select f.Name; foreach (var item in toRemove.ToList()) { Delivery.Files.Remove(item); } foreach (var file in Delivery.Files) { if (file.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Length)) { FileDownloadOperation fileDownloadOperation = file.Download(CreateRequest(file.Parameters[Consts.DeliveryFileParameters.Url] + "limit=1")); countBatch.Add(fileDownloadOperation); } } countBatch.Progressed += batch_Progressed; countBatch.Start(); countBatch.Wait(); countBatch.EnsureSuccess(); #endregion // download data var files = new List <DeliveryFile>(); var filesByType = new Dictionary <Consts.FileTypes, List <string> >(); Delivery.Parameters.Add("FilesByType", filesByType); foreach (var file in Delivery.Files.Where(f => f.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Length))) { using (var reader = new StreamReader(file.OpenContents())) { var offset = 0; var json = reader.ReadToEnd(); var t = JsonConvert.DeserializeObject <FileMetadata>(json); while (offset < t.Count) { var f = new DeliveryFile { Name = string.Format(file.Name, offset) }; f.Parameters.Add(Consts.DeliveryFileParameters.Url, string.Format("{0}&limit={1}&offset={2}", file.Parameters[Consts.DeliveryFileParameters.Url], ROW_LIMIT, offset)); f.Parameters.Add(Consts.DeliveryFileParameters.FileSubType, (long)Consts.FileSubType.Data); f.Parameters.Add(Consts.DeliveryFileParameters.FileType, Enum.Parse(typeof(Consts.FileTypes), file.Parameters[Consts.DeliveryFileParameters.FileType].ToString())); files.Add(f); if (!filesByType.ContainsKey((Consts.FileTypes)f.Parameters[Consts.DeliveryFileParameters.FileType])) { filesByType.Add((Consts.FileTypes)f.Parameters[Consts.DeliveryFileParameters.FileType], new List <string>()); } filesByType[(Consts.FileTypes)f.Parameters[Consts.DeliveryFileParameters.FileType]].Add(f.Name); offset += ROW_LIMIT; } } } var batch = new BatchDownloadOperation(); foreach (var file in files) { Delivery.Files.Add(file); } Delivery.Save(); foreach (var file in files.Where(fi => fi.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Data))) { FileDownloadOperation fileDownloadOperation = file.Download(CreateRequest(file.Parameters[Consts.DeliveryFileParameters.Url].ToString())); batch.Add(fileDownloadOperation); } batch.Progressed += batch_Progressed; batch.Start(); batch.Wait(); batch.EnsureSuccess(); Delivery.Save(); return(ServiceOutcome.Success); }
protected override ServiceOutcome DoPipelineWork() { _batchDownloadOperation = new BatchDownloadOperation { MaxConcurrent = 1 }; _batchDownloadOperation.Progressed += _batchDownloadOperation_Progressed; var includeZeroImpression = Boolean.Parse(Delivery.Parameters["includeZeroImpression"].ToString()); // time period var startDate = Delivery.TimePeriodDefinition.Start.ToDateTime().ToString("yyyyMMdd"); var endDate = Delivery.TimePeriodDefinition.End.ToDateTime().ToString("yyyyMMdd"); foreach (var clientId in (string[])Delivery.Parameters["AdwordsClientIDs"]) { //Get all files on specific client var files = Delivery.Files.Where(x => x.Parameters["AdwordsClientID"].ToString() == clientId); //Setting Adwords User var headers = new Dictionary <string, string> { { "DeveloperToken", Delivery.Parameters["DeveloperToken"].ToString() }, { "UserAgent", FileManager.UserAgentString }, { "EnableGzipCompression", "true" }, { "ClientCustomerId", clientId }, { "Email", Delivery.Parameters["MccEmail"].ToString() } }; var user = new AdWordsUser(headers); // AuthToken var config = user.Config as AdWordsAppConfig; if (config == null) { throw new Exception("Failed to convert AdwordUser.Config to AdWordsAppConfig"); } config.AuthToken = AdwordsUtill.GetAuthToken(user); var firstCheck = true; var awqls = new List <string>(); foreach (var file in files) { // report type if (!Enum.IsDefined(typeof(GA201302.ReportDefinitionReportType), file.Parameters["ReportType"].ToString())) { throw new ConfigurationErrorsException(String.Format("Unknown Google Adwords Report Type '{0}'", file.Parameters["ReportType"])); } // Creating AWQL var sb = new StringBuilder(); sb.AppendFormat("SELECT {0} FROM {1}", file.Parameters["ReportFields"], file.Parameters["ReportType"]); if (!includeZeroImpression) { sb.Append(" WHERE Impressions > 0"); } sb.AppendFormat(" DURING {0},{1}", startDate, endDate); var format = GA201302.DownloadFormat.GZIPPED_CSV.ToString(); file.SourceUrl = string.Format(QUERY_REPORT_URL_FORMAT, config.AdWordsApiServer, REPORT_VERSION, format); var query = sb.ToString(); var postData = string.Format("__rdquery={0}", HttpUtility.UrlEncode(query)); awqls.Add(query); //Validate Report if (firstCheck) { string error; if (!ValidateReport(file, user, postData, out error)) { //CHEKING FOR INVALID AUTHTOKEN if (error.Contains(GA201302.AuthenticationErrorReason.GOOGLE_ACCOUNT_COOKIE_INVALID.ToString())) { //RENEWING AUTHTOKEN config.AuthToken = AdwordsUtill.GetAuthToken(user, generateNew: true); } else { throw new Exception(String.Format("Google Adwords API Error: {0}", error)); } } firstCheck = false; } //If Validate - Success DownloadFile(file, user, postData); } } Progress = 0.2; _batchDownloadOperation.Start(); _batchDownloadOperation.Wait(); _batchDownloadOperation.EnsureSuccess(); //INCASE OF GENERAL EXCEPTION OPEN DELIVERY FILE HAS HTML AND VIEW INNER ERROR Progress = 0.9; Delivery.Save(); return(ServiceOutcome.Success); }