protected override Core.Services.ServiceOutcome DoPipelineWork() { // Create a batch and use its progress as the service's progress BatchDownloadOperation batch = new BatchDownloadOperation(); batch.Progressed += new EventHandler((sender, e) => { this.Progress = batch.Progress * 0.95; }); foreach (DeliveryFile file in this.Delivery.Files) { if (String.IsNullOrWhiteSpace(file.SourceUrl)) { continue; } DeliveryFileDownloadOperation download = file.Download(); download.Ended += new EventHandler(download_Ended); batch.Add(download); } batch.Start(); batch.Wait(); this.Delivery.Save(); return(Core.Services.ServiceOutcome.Success); }
protected override ServiceOutcome DoPipelineWork() { var mutex = new Mutex(false, "SalesForceRetriever"); var batch = new BatchDownloadOperation(); try { mutex.WaitOne(); var token = GetAuthenticationToken(); // exist foreach (var file in Delivery.Files) { var query = String.Format(file.Parameters["Query"].ToString(), Delivery.TimePeriodStart.Year, Delivery.TimePeriodStart.Month, Delivery.TimePeriodStart.Day); file.SourceUrl = String.Format("{0}/services/data/v20.0/query?q={1}", token.InstanceUrl, query); var request = (HttpWebRequest)WebRequest.Create(file.SourceUrl); request.Headers.Add("Authorization: OAuth " + token.AccessToken); var fileDownloadOperation = file.Download(request); batch.Add(fileDownloadOperation); } batch.Start(); batch.Wait(); batch.EnsureSuccess(); } finally { mutex.ReleaseMutex(); } Delivery.Save(); return(ServiceOutcome.Success); }
private bool Download() { bool result = true; foreach (DeliveryFile file in this.Delivery.Files) { try { HttpWebRequest request = (HttpWebRequest)HttpWebRequest.Create(file.SourceUrl); _batchDownloadOperation.Add(file.Download(request)); } catch (WebException webEx) { Log.Write("web alert", webEx.InnerException, LogMessageType.Warning); result = false; foreach (DeliveryFile deliveryFile in this.Delivery.Files) { deliveryFile.SourceUrl = string.Empty; } break; } } return(result); }
protected override Core.Services.ServiceOutcome DoPipelineWork() { // Create a batch and use its progress as the service's progress BatchDownloadOperation batch = new BatchDownloadOperation(); batch.Progressed += new EventHandler((sender, e) => { this.ReportProgress(batch.Progress * 0.95); }); foreach (DeliveryFile file in this.Delivery.Files) { WebRequest request = WebRequest.Create(file.SourceUrl); this.Delivery.Save(); DeliveryFileDownloadOperation download = file.Download(request); //download.Ended += new EventHandler(download_Ended); batch.Add(download); } batch.Start(); batch.Wait(); this.Delivery.Save(); return(Core.Services.ServiceOutcome.Success); }
private void DownloadFile(DeliveryFile file, AdWordsUser user, string postData) { WebRequest request = CreateAdwordsReportRequest(file, user, postData); FileDownloadOperation operation = file.Download(request); operation.RequestBody = postData; _batchDownloadOperation.Add(operation); }
private FileInfo AddDataFilesToBatch(BatchDownloadOperation batch, DeliveryFile file) { if (file.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Data)) { FileDownloadOperation fileDownloadOperation = file.Download(CreateRequest(file.Parameters[Consts.DeliveryFileParameters.Url].ToString())); batch.Add(fileDownloadOperation); return(fileDownloadOperation.FileInfo); } return(null); }
protected override Core.Services.ServiceOutcome DoPipelineWork() { Mutex mutex = new Mutex(false, "GoogleAnalyticsRetriver"); BatchDownloadOperation batch = new BatchDownloadOperation(); try { mutex.WaitOne(); #region Authentication //get access token + refresh token from db (if exist) Auth2 oAuth2 = Auth2.Get(Delivery.Parameters["ClientID"].ToString()); //if not exist if (string.IsNullOrEmpty(oAuth2.access_token) || (string.IsNullOrEmpty(oAuth2.refresh_token))) { oAuth2 = GetAccessTokenParamsFromGoogleAnalytics(); } //check if access_token is not expired if (oAuth2.updateTime.AddSeconds(oAuth2.expires_in - 300) < DateTime.Now) { oAuth2 = RefreshToken(oAuth2.refresh_token); } #endregion // exist foreach (var file in Delivery.Files) { string urlEncoded = string.Format(file.SourceUrl, Uri.EscapeUriString(oAuth2.access_token)); HttpWebRequest request = (HttpWebRequest)HttpWebRequest.Create(urlEncoded); request.Headers.Add("Accept-Encoding", "gzip"); request.UserAgent = "winrar(gzip)"; FileDownloadOperation fileDownloadOperation = file.Download(request); batch.Add(fileDownloadOperation); } batch.Start(); batch.Wait(); batch.EnsureSuccess(); } finally { mutex.ReleaseMutex(); } Delivery.Save(); return(Core.Services.ServiceOutcome.Success); }
private void DownloadFile(DeliveryFile file) { WebRequest request = FileWebRequest.Create(file.SourceUrl); /* FTP */ if (request.GetType().Equals(typeof(FtpWebRequest))) { FtpWebRequest ftpRequest = (FtpWebRequest)request; ftpRequest.UseBinary = true; ftpRequest.Credentials = new NetworkCredential ( this.Delivery.Parameters["UserID"].ToString(), this.Delivery.Parameters["Password"].ToString() ); ftpRequest.Method = WebRequestMethods.Ftp.DownloadFile; ftpRequest.UsePassive = true; _batch.Add(file.Download(request, Convert.ToInt64(file.Parameters["Size"]))); } /*OTHER*/ else { _batch.Add(file.Download(request)); } }
private void DownloadFile(DeliveryFile file) { FtpWebRequest request = (FtpWebRequest)FtpWebRequest.Create(file.SourceUrl); request.UseBinary = true; request.Credentials = new NetworkCredential ( this.Delivery.Parameters["UserID"].ToString(), this.Delivery.Parameters["Password"].ToString() ); request.Method = WebRequestMethods.Ftp.DownloadFile; request.UsePassive = true; _batchDownloadOperation.Add(file.Download(request, Convert.ToInt64(file.Parameters["Size"]))); }
protected override Core.Services.ServiceOutcome DoPipelineWork() { // Create a batch and use its progress as the service's progress BatchDownloadOperation batch = new BatchDownloadOperation(); batch.Progressed += new EventHandler((sender, e) => { this.ReportProgress(batch.Progress * 0.95); }); foreach (DeliveryFile file in this.Delivery.Files) { if (String.IsNullOrWhiteSpace(file.SourceUrl)) { continue; } WebRequest request = WebRequest.Create(file.SourceUrl); request.ContentType = file.Parameters["Content-Type"].ToString(); request.Method = "POST"; byte[] bytes = Encoding.UTF8.GetBytes(file.Parameters["Body"].ToString()); request.ContentLength = bytes.Length; using (var stream = request.GetRequestStream()) { stream.Write(bytes, 0, bytes.Length); } //Headers request.Headers.Add("SOAPAction", file.Parameters["SOAPAction"].ToString()); this.Delivery.Save(); DeliveryFileDownloadOperation download = file.Download(request); download.Ended += new EventHandler(download_Ended); batch.Add(download); } batch.Start(); batch.Wait(); // Add a retrieved history entry for the entire delivery this.Delivery.Save(); return(ServiceOutcome.Success); }
private List <DeliveryFile> FetchNext(List <DeliveryFile> fetchFrom, int offset) { BatchDownloadOperation nextBatch = new BatchDownloadOperation(); List <DeliveryFile> nextRecordsFiles = new List <DeliveryFile>(); foreach (DeliveryFile ReportFile in fetchFrom) { //setting cuurent file has batched and batching next file ReportFile.Parameters.Add("Batch", true); string fileName = ReportFile.Name + "-" + offset; JsonDynamicReader reportReader = new JsonDynamicReader(ReportFile.OpenContents(compression: FileCompression.None), "$.nextRecordsUrl"); string nextRecordPath; if (reportReader.Read()) { nextRecordPath = reportReader.Current.nextRecordsUrl; DeliveryFile nextRecordFile = new DeliveryFile(); nextRecordFile.SourceUrl = ((Token)(ReportFile.Parameters["Token"])).instance_url + nextRecordPath; HttpWebRequest request = (HttpWebRequest)HttpWebRequest.Create(nextRecordFile.SourceUrl); request.Headers.Add("Authorization: OAuth " + ((Token)(ReportFile.Parameters["Token"])).access_token); //check if response contains more than one file FileDownloadOperation fileDownloadOperation = nextRecordFile.Download(request); nextBatch.Add(fileDownloadOperation); nextRecordsFiles.Add(nextRecordFile); } } if (nextRecordsFiles.Count > 0) { nextBatch.Start(); nextBatch.Wait(); nextBatch.EnsureSuccess(); foreach (DeliveryFile file in FetchNext(nextRecordsFiles, offset)) { this.Delivery.Files.Add(file); } } return(nextRecordsFiles); }
protected override ServiceOutcome DoPipelineWork() { // Create a batch and use its progress as the service's progress var batch = new BatchDownloadOperation(); batch.Progressed += (sender, e) => { Progress = batch.Progress * 0.95; }; foreach (var file in Delivery.Files) { if (String.IsNullOrWhiteSpace(file.SourceUrl)) { continue; } var request = WebRequest.Create(file.SourceUrl); request.ContentType = file.Parameters["Content-Type"].ToString(); request.Method = "POST"; byte[] bytes = Encoding.UTF8.GetBytes(file.Parameters["Body"].ToString()); request.ContentLength = bytes.Length; using (var stream = request.GetRequestStream()) { stream.Write(bytes, 0, bytes.Length); } //Headers request.Headers.Add("SOAPAction", file.Parameters["SOAPAction"].ToString()); // TODO: shirat - to remove? why saving delivery between files? Delivery.Save(); var download = file.Download(request); download.Ended += download_Ended; batch.Add(download); } batch.Start(); batch.Wait(); Delivery.Save(); return(ServiceOutcome.Success); }
protected override Core.Services.ServiceOutcome DoPipelineWork() { // Create a batch and use its progress as the service's progress BatchDownloadOperation batch = new BatchDownloadOperation(); batch.Progressed += new EventHandler((sender, e) => { this.ReportProgress(batch.Progress * 0.95); }); foreach (DeliveryFile file in this.Delivery.Files) { WebRequest request = WebRequest.Create(file.SourceUrl); //request.ContentType = file.Parameters["Content-Type"].ToString(); //request.Method = "POST"; //byte[] bytes = Encoding.UTF8.GetBytes(string.Empty); //request.ContentLength = bytes.Length; //using (var stream = request.GetRequestStream()) //{ // stream.Write(bytes, 0, bytes.Length); //} ////Headers //request.Headers.Add("SOAPAction", file.Parameters["SOAPAction"].ToString()); this.Delivery.Save(); DeliveryFileDownloadOperation download = file.Download(request); //download.Ended += new EventHandler(download_Ended); batch.Add(download); } batch.Start(); batch.Wait(); this.Delivery.Save(); return(Core.Services.ServiceOutcome.Success); }
private void Next(IEnumerable <DeliveryFile> nextfiles) { if (nextfiles.Count() == 0) { return; } var newFiles = new List <DeliveryFile>(); foreach (DeliveryFile file in nextfiles) { counter = counter + 1; CreateNextFiles(file, counter, newFiles); } if (newFiles.Count() == 0) { return; } foreach (var file in newFiles) { this.Delivery.Files.Add(file); } this.Delivery.Save(); BatchDownloadOperation batch = new BatchDownloadOperation(); foreach (DeliveryFile file in newFiles.Where(fi => fi.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.New))) { file.Parameters[Consts.DeliveryFileParameters.FileSubType] = Consts.FileSubType.Data; FileDownloadOperation fileDownloadOperation = file.Download(CreateRequest(file.Parameters[Consts.DeliveryFileParameters.Url].ToString())); batch.Add(fileDownloadOperation); } RunBatch(batch); Next(newFiles); }
protected override ServiceOutcome DoPipelineWork() { _baseAddress = new Uri(this.Instance.Configuration.Options[FacebookConfigurationOptions.BaseServiceAddress]); //Get Access token _accessToken = this.Instance.Configuration.Options[FacebookConfigurationOptions.Auth_AccessToken]; _appSecretProof = GetAppSecretProof(); BatchDownloadOperation countBatch = new BatchDownloadOperation(); var toremove = from f in Delivery.Files where !f.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Length) select f.Name; foreach (string item in toremove.ToList()) { Delivery.Files.Remove(item); } //files = new List<DeliveryFile>(); Delivery.Parameters.Add("FilesByType", filesByType); FileDownloadOperation fileDownloadOperation; foreach (DeliveryFile file in Delivery.Files) { if (file.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Length)) { fileDownloadOperation = file.Download(CreateRequest(file.Parameters[Consts.DeliveryFileParameters.Url].ToString())); countBatch.Add(fileDownloadOperation); } var fileTypeStr = Convert.ToString(file.Parameters[Consts.DeliveryFileParameters.FileType]); Consts.FileTypes fileType = (Consts.FileTypes)Enum.Parse(typeof(Consts.FileTypes), fileTypeStr); if (!filesByType.ContainsKey(fileType)) { filesByType.Add(fileType, new List <string>()); } filesByType[fileType].Add(file.Name); } countBatch.Progressed += new EventHandler(counted_Batch_Progressed); RunBatch(countBatch); var nextfiles = Delivery.Files.Where(f => f.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Length)); Next(nextfiles); this.Delivery.Save(); ////batch.Progressed += new EventHandler(batch_Progressed); //RunBatch(batch); //this.Delivery.Save(); return(ServiceOutcome.Success); }
protected override Core.Services.ServiceOutcome DoPipelineWork() { Mutex mutex = new Mutex(false, "SalesForceRetriver"); BatchDownloadOperation batch = new BatchDownloadOperation(); try { mutex.WaitOne(); #region Authentication //get access token + refresh token from db (if exist) Token tokenResponse = Token.Get(Delivery.Parameters["SalesForceClientID"].ToString(), this.Delivery.Account.ID); //if not exist if (string.IsNullOrEmpty(tokenResponse.access_token) || (string.IsNullOrEmpty(tokenResponse.refresh_token))) { tokenResponse = GetAccessTokenParamsFromSalesForce(); } //check if access_token is not expired if (tokenResponse.UpdateTime.Add((TimeSpan.Parse(AppSettings.Get(tokenResponse, "TimeOut")))) < DateTime.Now) { tokenResponse = RefreshToken(tokenResponse.refresh_token); } #endregion // exist foreach (var file in Delivery.Files) { string query = file.Parameters["Query"].ToString(); //Regex for Calendar units. MatchCollection calendarMatches = Regex.Matches(query, @"TimePeriod.EqualToCalendarUnits\(([A-Z\.a-z_]+)\)", RegexOptions.IgnoreCase); if (calendarMatches.Count > 0) { foreach (Match calendarMatch in calendarMatches) { string dataParamName = calendarMatch.Groups[1].Value; query = query.Replace(string.Format("TimePeriod.EqualToCalendarUnits({0})", dataParamName), string.Format(" CALENDAR_YEAR({0})={1} AND CALENDAR_MONTH({0})={2} AND DAY_IN_MONTH({0}) = {3} ", dataParamName, Delivery.TimePeriodStart.Year, Delivery.TimePeriodStart.Month, Delivery.TimePeriodStart.Day)); } } //Regex for TimePeriodStringFormat units. MatchCollection timeMatches = Regex.Matches(query, @"TimePeriod.EqualToString\(([A-Z\.a-z_]+)\)", RegexOptions.IgnoreCase); if (timeMatches.Count > 0) { foreach (Match calendarMatch in timeMatches) { string dataParamName = calendarMatch.Groups[1].Value; string sfTimePeriodStartFormat = string.Format("{0}T00:00:00.00Z", Delivery.TimePeriodStart.ToString("yyyy-MM-dd")); string sfTimePeriodEndFormat = string.Format("{0}T23:59:59.59Z", Delivery.TimePeriodStart.ToString("yyyy-MM-dd")); query = query.Replace(string.Format("TimePeriod.EqualToString({0})", dataParamName), string.Format("{0}>{1} AND {0}<{2} ", dataParamName, sfTimePeriodStartFormat, sfTimePeriodEndFormat)); } } file.Parameters.Add("Token", tokenResponse); file.SourceUrl = string.Format("{0}/services/data/v20.0/query?q={1}", tokenResponse.instance_url, query); HttpWebRequest request = (HttpWebRequest)HttpWebRequest.Create(file.SourceUrl); request.Headers.Add("Authorization: OAuth " + tokenResponse.access_token); //check if response contains more than one file FileDownloadOperation fileDownloadOperation = file.Download(request); batch.Add(fileDownloadOperation); } batch.Start(); batch.Wait(); batch.EnsureSuccess(); //supporting more than one file per query int offset = 1; // FetchNext(this.Delivery.Files, offset); } finally { mutex.ReleaseMutex(); } Delivery.Save(); return(Core.Services.ServiceOutcome.Success); }
protected override ServiceOutcome DoPipelineWork() { //Get Access token if (!Configuration.Parameters.ContainsKey(FacebookConfigurationOptions.AccessToken)) { throw new Exception(String.Format("Missing Configuration Param: {0}", FacebookConfigurationOptions.AccessToken)); } _accessToken = Configuration.Parameters.Get <string>(FacebookConfigurationOptions.AccessToken); #region Get file legth // download legth files to get no. of rows in each file (FB limit up to 500 records per file) // (regular file which contains rows counter at hte end of the file) var countBatch = new BatchDownloadOperation(); var toRemove = from f in Delivery.Files where !f.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Length) select f.Name; foreach (var item in toRemove.ToList()) { Delivery.Files.Remove(item); } foreach (var file in Delivery.Files) { if (file.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Length)) { FileDownloadOperation fileDownloadOperation = file.Download(CreateRequest(file.Parameters[Consts.DeliveryFileParameters.Url] + "limit=1")); countBatch.Add(fileDownloadOperation); } } countBatch.Progressed += batch_Progressed; countBatch.Start(); countBatch.Wait(); countBatch.EnsureSuccess(); #endregion // download data var files = new List <DeliveryFile>(); var filesByType = new Dictionary <Consts.FileTypes, List <string> >(); Delivery.Parameters.Add("FilesByType", filesByType); foreach (var file in Delivery.Files.Where(f => f.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Length))) { using (var reader = new StreamReader(file.OpenContents())) { var offset = 0; var json = reader.ReadToEnd(); var t = JsonConvert.DeserializeObject <FileMetadata>(json); while (offset < t.Count) { var f = new DeliveryFile { Name = string.Format(file.Name, offset) }; f.Parameters.Add(Consts.DeliveryFileParameters.Url, string.Format("{0}&limit={1}&offset={2}", file.Parameters[Consts.DeliveryFileParameters.Url], ROW_LIMIT, offset)); f.Parameters.Add(Consts.DeliveryFileParameters.FileSubType, (long)Consts.FileSubType.Data); f.Parameters.Add(Consts.DeliveryFileParameters.FileType, Enum.Parse(typeof(Consts.FileTypes), file.Parameters[Consts.DeliveryFileParameters.FileType].ToString())); files.Add(f); if (!filesByType.ContainsKey((Consts.FileTypes)f.Parameters[Consts.DeliveryFileParameters.FileType])) { filesByType.Add((Consts.FileTypes)f.Parameters[Consts.DeliveryFileParameters.FileType], new List <string>()); } filesByType[(Consts.FileTypes)f.Parameters[Consts.DeliveryFileParameters.FileType]].Add(f.Name); offset += ROW_LIMIT; } } } var batch = new BatchDownloadOperation(); foreach (var file in files) { Delivery.Files.Add(file); } Delivery.Save(); foreach (var file in files.Where(fi => fi.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Data))) { FileDownloadOperation fileDownloadOperation = file.Download(CreateRequest(file.Parameters[Consts.DeliveryFileParameters.Url].ToString())); batch.Add(fileDownloadOperation); } batch.Progressed += batch_Progressed; batch.Start(); batch.Wait(); batch.EnsureSuccess(); Delivery.Save(); return(ServiceOutcome.Success); }
protected override ServiceOutcome DoPipelineWork() { _baseAddress = new Uri(this.Instance.Configuration.Options[FacebookConfigurationOptions.BaseServiceAddress]); //Get Access token _accessToken = this.Instance.Configuration.Options[FacebookConfigurationOptions.Auth_AccessToken]; _appSecretProof = GetAppSecretProof(); BatchDownloadOperation countBatch = new BatchDownloadOperation(); var toremove = from f in Delivery.Files where !f.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Length) select f.Name; foreach (string item in toremove.ToList()) { Delivery.Files.Remove(item); } foreach (DeliveryFile file in Delivery.Files) { if (file.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Length)) { FileDownloadOperation fileDownloadOperation = file.Download(CreateRequest(file.Parameters[Consts.DeliveryFileParameters.Url].ToString() + "limit=1")); countBatch.Add(fileDownloadOperation); } } countBatch.Progressed += new EventHandler(counted_Batch_Progressed); countBatch.Start(); countBatch.Wait(); countBatch.EnsureSuccess(); List <DeliveryFile> files = new List <DeliveryFile>(); Dictionary <Consts.FileTypes, List <string> > filesByType = new Dictionary <Consts.FileTypes, List <string> >(); Delivery.Parameters.Add("FilesByType", filesByType); foreach (DeliveryFile file in Delivery.Files.Where(f => f.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Length))) { using (StreamReader reader = new StreamReader(file.OpenContents())) { int offset = 0; int limit = 500; string json = reader.ReadToEnd(); MyType t = JsonConvert.DeserializeObject <MyType>(json); while (offset < t.count) { DeliveryFile f = new DeliveryFile(); f.Name = string.Format(file.Name, offset); f.Parameters.Add(Consts.DeliveryFileParameters.Url, string.Format("{0}&limit={1}&offset={2}", file.Parameters[Consts.DeliveryFileParameters.Url], limit, offset)); f.Parameters.Add(Consts.DeliveryFileParameters.FileSubType, (long)Consts.FileSubType.Data); f.Parameters.Add(Consts.DeliveryFileParameters.FileType, Enum.Parse(typeof(Consts.FileTypes), file.Parameters[Consts.DeliveryFileParameters.FileType].ToString())); files.Add(f); if (!filesByType.ContainsKey((Consts.FileTypes)f.Parameters[Consts.DeliveryFileParameters.FileType])) { filesByType.Add((Consts.FileTypes)f.Parameters[Consts.DeliveryFileParameters.FileType], new List <string>()); } filesByType[(Consts.FileTypes)f.Parameters[Consts.DeliveryFileParameters.FileType]].Add(f.Name); offset += limit; } offset = 0; } } foreach (var file in files) { this.Delivery.Files.Add(file); } this.Delivery.Save(); BatchDownloadOperation batch = new BatchDownloadOperation(); foreach (DeliveryFile file in files.Where(fi => fi.Parameters[Consts.DeliveryFileParameters.FileSubType].Equals((long)Consts.FileSubType.Data))) { FileDownloadOperation fileDownloadOperation = file.Download(CreateRequest(file.Parameters[Consts.DeliveryFileParameters.Url].ToString())); batch.Add(fileDownloadOperation); } //batch.Progressed += new EventHandler(batch_Progressed); batch.Start(); batch.Wait(); batch.EnsureSuccess(); this.Delivery.Save(); return(ServiceOutcome.Success); }