public async Task <Profile> LoadProfile(Profile source, LifeImageCloudConnection conn) { var taskInfo = ""; Profile newProfile = null; try { if (source.startupParams.getServerProfile == true) { newProfile = await GetAgentConfigurationFromCloud(conn, Profile.rowVersion, Profile._overrideVersionAndModifiedDate); } } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"Task was canceled."); } catch (Exception e) { _logger.LogFullException(e, $"{taskInfo} Problem getting server profile. Remote Configuration Not Possible!!"); throw e; } _profileMerger.MergeProfile(source, newProfile); //source.MergeProfile(newProfile); return(source); }
public void MergeStudies(RootObject newStudies, LifeImageCloudConnection Connection) { lock (Connection.studies) { if (Connection.studies == null || Connection.studies.ImagingStudy == null || Connection.studies.ImagingStudy.Count == 0) { _logger.Log(LogLevel.Information, $"Replacing studies"); Connection.studies = newStudies; } else { //take the new studies from cloud and merge with existing foreach (var study in newStudies.ImagingStudy) { if (!Connection.studies.ImagingStudy.Exists(e => e.uid == study.uid)) { _logger.Log(LogLevel.Information, $"Adding {study.uid}"); Connection.studies.ImagingStudy.Add(study); } else { _logger.Log(LogLevel.Information, $"Study already exists: {study.uid}"); } } } } }
public async Task KeepAlive(LifeImageCloudConnection Connection, IHttpManager httpManager) { var taskInfo = $"connection: {Connection.name}"; try { _logger.Log(LogLevel.Debug, $"{taskInfo} Entering KeepAlive"); if (httpManager.loginNeeded == false) { if (!await _cloudPingService.Ping(Connection, httpManager)) { _logger.Log(LogLevel.Warning, $"{taskInfo} ping failed"); } } } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task was canceled."); } catch (Exception e) { _logger.LogFullException(e, taskInfo); } }
public async Task Download(int taskID, LifeImageCloudConnection Connection, IHttpManager httpManager) { var taskInfo = $"task: {taskID} connection: {Connection.name}"; _logger.Log(LogLevel.Debug, $"{taskInfo} Entering Download"); try { _logger.Log(LogLevel.Debug, $"{taskInfo} Processing downloadStudy"); //we can kick off some more //be careful that this is reentrant, meaning that kickoff launches this on an interval and we only want to start //new work if existing work to capacity is not already occurring. //to avoid Collection was modified; enumeration operation may not execute, making a copy just to iterate. await DownloadImpl(Connection, httpManager, taskInfo); } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task was canceled."); } catch (Exception e) { _logger.LogFullException(e, $"{taskInfo} Exiting Download"); } finally { _taskManager.Stop($"{Connection.name}.Download"); } }
public string GetCachedItemMetaData(LifeImageCloudConnection Connection, RoutedItem routedItem, long taskID) { Throw.IfNull(Connection); var taskInfo = $"conn: {Connection.name} taskID: {taskID}"; RootObject rootObject = new RootObject { ImagingStudy = new List <ImagingStudy>() }; ImagingStudy study = new ImagingStudy { accession = new Accession(), patient = new Patient(), series = new List <Series>() }; var cacheItem = LifeImageCloudConnection.cache[routedItem.id].ToList(); var query = cacheItem .GroupBy(item => new { item.Study, item.Series, item.PatientID, item.AccessionNumber }) .Select(grp => new { Study = grp.Key.Study, Series = grp.Key.Series, PatientID = grp.Key.PatientID, Accession = grp.Key.AccessionNumber, Instances = grp.Count() }); foreach (var result in query) { _logger.Log(LogLevel.Debug, $"{taskInfo} Study:{result.Study} Series:{result.Series} PatientID:{result.PatientID} Accession:{result.Accession} Instances:{result.Instances}"); var series = new Series { number = result.Instances, uid = result.Series }; study.series.Add(series); } study.numberOfInstances = query.Sum(x => x.Instances); study.numberOfSeries = query.Count(); rootObject.ImagingStudy.Add(study); var json = JsonSerializer.Serialize(rootObject); _logger.Log(LogLevel.Debug, $"{taskInfo} Json:{json}"); return(json); }
public async Task ExpireCache(LifeImageCloudConnection Connection, IConnectionRoutedCacheManager manager, int taskID) { Throw.IfNull(manager); var taskInfo = $"task: {taskID} connection: {Connection.name}"; try { while (!_taskManager.cts.IsCancellationRequested) { await Task.Delay(_profileStorage.Current.KickOffInterval, _taskManager.cts.Token); //age the response cache before asking for more foreach (var cacheItem in LifeImageCloudConnection.cache.ToArray()) { _logger.Log(LogLevel.Debug, $"{taskInfo} Cache entry id: {cacheItem.Key}"); foreach (var item in cacheItem.Value.ToArray()) { ProcessRoutedItem(item, Connection, manager, taskInfo); } } } } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task was canceled."); } catch (System.InvalidOperationException) //for the Collection was Modified, we can wait { _logger.Log(LogLevel.Information, $"{taskInfo} Waiting for requests to complete before getting new requests"); } catch (Exception e) { _logger.LogFullException(e, taskInfo); } finally { try { _taskManager.Stop($"{Connection.name}.ExpireCache"); } catch (Exception e) { _logger.LogFullException(e, taskInfo); } } }
public async Task <bool> Ping(LifeImageCloudConnection Connection, IHttpManager httpManager) { Throw.IfNull(Connection); Throw.IfNull(httpManager); var taskInfo = $"connection: {Connection.name}"; var httpClient = _liteHttpClient.GetClient(Connection); try { if (httpClient != null) { try { var task = httpClient.GetAsync(Connection.URL + CloudAgentConstants.PingUrl); var result = await task; if (result.StatusCode == HttpStatusCode.OK) { return(true); } } catch (Exception e) { _logger.LogFullException(e, $"{taskInfo} ping failed:"); _liteHttpClient.DumpHttpClientDetails(); } _logger.Log(LogLevel.Warning, $"{taskInfo} ping failed: setting loginNeeded = true"); } httpManager.loginNeeded = true; } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task was canceled."); } catch (Exception e) { _logger.LogFullException(e, taskInfo); } return(false); }
public async Task Upload(int taskID, LifeImageCloudConnection Connection, ILifeImageCloudConnectionManager manager, IConnectionRoutedCacheManager cache, IHttpManager httpManager) { var taskInfo = $"task: {taskID} connection: {Connection.name}"; _logger.Log(LogLevel.Debug, $"{taskInfo} Entering Upload"); try { bool success = await manager.ToCloudSignal.WaitAsync(_profileStorage.Current.KickOffInterval, _taskManager.cts.Token) .ConfigureAwait(false); // ToCloudSignal.Dispose(); // ToCloudSignal = new SemaphoreSlim(0, 1); await _sendToCloudService.SendToCloud(taskID, Connection, cache, httpManager); //if (_profileStorage.Current.rules.DoesRouteDestinationExistForSource(Connection.name)) if (_rulesManager.DoesRouteDestinationExistForSource(Connection.name)) { if (_taskManager.CanStart($"{Connection.name}.GetRequests")) { var newTaskID = _taskManager.NewTaskID(); Task task = new Task(new Action(async() => await _cloudAgentTaskLoader.GetRequests(taskID, Connection, cache, httpManager)), _taskManager.cts.Token); await _taskManager.Start(newTaskID, task, $"{Connection.name}.GetRequests", isLongRunning : false); } } } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task was canceled."); } catch (OperationCanceledException) { _logger.Log(LogLevel.Warning, $"{taskInfo} Wait Operation Canceled. Exiting Upload"); } catch (Exception e) { _logger.LogFullException(e, taskInfo); _logger.Log(LogLevel.Critical, $"{taskInfo} Exiting Upload"); } finally { _taskManager.Stop($"{Connection.name}.Upload"); } }
// logout public async Task logout(int taskID, LifeImageCloudConnection connection, IHttpManager _manager) { Connection = connection; var taskInfo = $"task: {taskID} connection: {Connection.name}"; var httpClient = _liteHttpClient.GetClient(connection); try { //set the URL string logoutURL = Connection.URL + "/logout"; _logger.Log(LogLevel.Debug, $"{taskInfo} logoutURL: {logoutURL}"); var cookies = _liteHttpClient.GetCookies(logoutURL); _logger.LogCookies(cookies, taskInfo); // issue the GET var task = httpClient.GetAsync(logoutURL, _taskManager.cts.Token); var response = await task; // output the result _logger.LogHttpResponseAndHeaders(response, taskInfo); _logger.Log(LogLevel.Debug, $"{taskInfo} await response.Content.ReadAsStringAsync(): {await response.Content.ReadAsStringAsync()}"); // convert from stream to JSON //BUG JSON is invalid as of 4/11/2016 //var serializer = new DataContractJsonSerializer(typeof(LoginJSON)); //var loginJSON = serializer.ReadObject(await response.Content.ReadAsStreamAsync()) as LoginJSON; //clear out the jSessionID and syncTokens _manager.jSessionID = null; Connection.syncToken = null; httpClient.Dispose(); } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task was canceled."); } catch (Exception e) { _logger.LogFullException(e); } }
private async Task DownloadImpl(LifeImageCloudConnection Connection, IHttpManager httpManager, string taskInfo) { //to avoid Collection was modified; enumeration operation may not execute, making a copy just to iterate. if (Connection.studies == null || Connection.studies.ImagingStudy == null) { return; } //var copyOfStudies = studies.ImagingStudy.ToList(); if (_profileStorage.Current.duplicatesDetectionDownload) { _logger.Log(LogLevel.Information, $"{taskInfo}: {Connection.studies.ImagingStudy.Count} studies to download (before duplicates elimination)."); } else { _logger.Log(LogLevel.Information, $"{taskInfo}: {Connection.studies.ImagingStudy.Count} studies to download."); } foreach (var imagingStudy in Connection.studies.ImagingStudy) { await ProcessImageStudy(Connection, imagingStudy, httpManager, taskInfo); } }
public async Task PostCompletion(LifeImageCloudConnection Connection, RoutedItem routedItem, IConnectionRoutedCacheManager cacheManager, IHttpManager httpManager, long taskID) { Throw.IfNull(Connection); Throw.IfNull(routedItem); Throw.IfNull(cacheManager); Throw.IfNull(httpManager); var stopWatch = new Stopwatch(); stopWatch.Start(); var taskInfo = $"task: {taskID} connection: {Connection.name} id: {routedItem.id} "; HttpResponseMessage response = null; var httpClient = _liteHttpClient.GetClient(Connection); try { if (routedItem.Study == null || routedItem.Study == "") { _logger.Log(LogLevel.Warning, $"{taskInfo} meta: {routedItem.RoutedItemMetaFile} cannot close routedItem.Study: {routedItem.Study} because null or blank."); cacheManager.RemoveCachedItem(routedItem); return; } //POST /api/agent/v1/study/{studyInstanceUid}/upload-close //string studyCloseURL = Connection.URL + $"/api/agent/v1/study/{routedItem.Study}/upload-close"; string studyCloseURL = Connection.URL + CloudAgentConstants.GetUploadCloseUrl(routedItem.Study); _logger.Log(LogLevel.Debug, $"{taskInfo} studyCloseURL: {studyCloseURL}"); var metadata = ""; try { metadata = _cloudConnectionCacheAccessor.GetCachedItemMetaData(Connection, routedItem, taskID); } catch (Exception e) { _logger.Log(LogLevel.Warning, $"{taskInfo} Unable to produce metadata for {routedItem.id} {routedItem.RoutedItemMetaFile}: {e.Message} {e.StackTrace}"); } using (HttpContent httpContent = new StringContent(metadata)) { var cookies = _liteHttpClient.GetCookies(studyCloseURL); _logger.LogCookies(cookies, taskInfo); response = await httpClient.PostAsync(studyCloseURL, httpContent, _taskManager.cts.Token); // output the result _logger.LogHttpResponseAndHeaders(response, taskInfo); if (response.StatusCode == HttpStatusCode.Unauthorized) { httpManager.loginNeeded = true; } //BOUR-995 we don't want to dequeue unless completed or failed if (response.StatusCode == HttpStatusCode.OK) { _routedItemManager.Init(routedItem); _routedItemManager.Dequeue(Connection, Connection.toCloud, nameof(Connection.toCloud), error: false); cacheManager.RemoveCachedItem(routedItem); } //BOUR-995 we don't want to dequeue unless completed or failed if ((response.StatusCode == HttpStatusCode.InternalServerError) || response.StatusCode == HttpStatusCode.BadRequest) { _logger.Log(LogLevel.Warning, $"{taskInfo} {response.StatusCode} {response.ReasonPhrase}. Dequeuing to error folder"); _liteHttpClient.DumpHttpClientDetails(); _routedItemManager.Init(routedItem); _routedItemManager.Dequeue(Connection, Connection.toCloud, nameof(Connection.toCloud), error: true); cacheManager.RemoveCachedItem(routedItem); } } stopWatch.Stop(); _logger.Log(LogLevel.Information, $"{taskInfo} elapsed: {stopWatch.Elapsed}"); } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task was canceled."); } catch (HttpRequestException e) { _logger.Log(LogLevel.Warning, $"{taskInfo} Exception: {e.Message} {e.StackTrace}"); if (e.InnerException != null) { _logger.Log(LogLevel.Warning, $"{taskInfo} Inner Exception: {e.InnerException}"); } _liteHttpClient.DumpHttpClientDetails(); } catch (Exception e) { _logger.LogFullException(e, taskInfo); _liteHttpClient.DumpHttpClientDetails(); cacheManager.RemoveCachedItem(routedItem); } finally { try { _taskManager.Stop($"{Connection.name}.PostCompletion"); if (response != null) { response.Dispose(); } } catch (Exception e) { _logger.LogFullException(e, taskInfo); } } }
public async Task PostResponse(LifeImageCloudConnection Connection, RoutedItem routedItem, IConnectionRoutedCacheManager cacheManager, IHttpManager httpManager, long taskID) { var stopWatch = new Stopwatch(); stopWatch.Start(); var taskInfo = $"task: {taskID} connection: {Connection.name} id: {routedItem.id} "; { _logger.Log(LogLevel.Debug, $"{taskInfo} request: {routedItem.request}"); foreach (var results in routedItem.response) { _logger.Log(LogLevel.Debug, $"{taskInfo} response: {results}"); } } HttpResponseMessage response = null; try { string json = JsonSerializer.Serialize(routedItem.cloudTaskResults); _logger.Log(LogLevel.Debug, $"{taskInfo} posting {json}"); string base64Results = Convert.ToBase64String(Encoding.ASCII.GetBytes(json)); //string agentTasksURL = Connection.URL + $"/api/agent/v1/agent-task-results/{routedItem.id}"; string agentTasksURL = Connection.URL + CloudAgentConstants.GetAgentTaskResultUrl(routedItem.id); //optional status="NEW", "PENDING", "COMPLETED", "FAILED" agentTasksURL += $"?status={routedItem.status}"; _logger.Log(LogLevel.Debug, $"{taskInfo} agentTasksURL: {agentTasksURL}"); var httpClient = _liteHttpClient.GetClient(Connection); using (HttpContent httpContent = new StringContent(base64Results)) { var cookies = _liteHttpClient.GetCookies(agentTasksURL); _logger.LogCookies(cookies, taskInfo); response = await httpClient.PostAsync(agentTasksURL, httpContent, _taskManager.cts.Token); // output the result _logger.LogHttpResponseAndHeaders(response, taskInfo); if (response.StatusCode == HttpStatusCode.Unauthorized) { httpManager.loginNeeded = true; } //BOUR-995 we don't want to dequeue unless completed or failed if (response.StatusCode == HttpStatusCode.OK && (routedItem.status == RoutedItem.Status.COMPLETED || routedItem.status == RoutedItem.Status.FAILED)) { _routedItemManager.Init(routedItem); _routedItemManager.Dequeue(Connection, Connection.toCloud, nameof(Connection.toCloud), error: false); cacheManager.RemoveCachedItem(routedItem); } //BOUR-995 we don't want to dequeue unless completed or failed if ((response.StatusCode == HttpStatusCode.InternalServerError || response.StatusCode == HttpStatusCode.BadRequest) && (routedItem.status == RoutedItem.Status.COMPLETED || routedItem.status == RoutedItem.Status.FAILED)) { _logger.Log(LogLevel.Warning, $"{taskInfo} {response.StatusCode} {response.ReasonPhrase}. Dequeuing to error folder"); _liteHttpClient.DumpHttpClientDetails(); _routedItemManager.Init(routedItem); _routedItemManager.Dequeue(Connection, Connection.toCloud, nameof(Connection.toCloud), error: true); cacheManager.RemoveCachedItem(routedItem); } } stopWatch.Stop(); _logger.Log(LogLevel.Information, $"{taskInfo} elapsed: {stopWatch.Elapsed}"); } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task was canceled."); } catch (HttpRequestException e) { _logger.Log(LogLevel.Warning, $"{taskInfo} Exception: {e.Message} {e.StackTrace}"); if (e.InnerException != null) { _logger.Log(LogLevel.Warning, $"{taskInfo} Inner Exception: {e.InnerException}"); } _liteHttpClient.DumpHttpClientDetails(); } catch (Exception e) { _logger.LogFullException(e, taskInfo); _liteHttpClient.DumpHttpClientDetails(); } finally { try { _taskManager.Stop($"{Connection.name}.PostResponse"); if (response != null) { response.Dispose(); } } catch (Exception e) { _logger.LogFullException(e, taskInfo); } } }
public async Task stowAsMultiPart(List <RoutedItem> batch, int taskID, LifeImageCloudConnection Connection, IHttpManager httpManager) { Throw.IfNull(Connection); Throw.IfNull(httpManager); var taskInfo = $"task: {taskID} connection: {Connection.name}"; StreamContent streamContent = null; MultipartContent content = null; HttpResponseMessage response = null; string testFile = null; var firstRecord = batch.First(); var httpClient = _liteHttpClient.GetClient(Connection); try { var stopWatch = new Stopwatch(); stopWatch.Start(); //set the URL //string stowURL = Connection.URL + "/api/agent/v1/stow/studies"; string stowURL = Connection.URL + CloudAgentConstants.StowStudies; _logger.Log(LogLevel.Debug, $"{taskInfo} stowURL: {stowURL}"); // generate guid for boundary...boundaries cannot be accidentally found in the content var boundary = Guid.NewGuid(); _logger.Log(LogLevel.Debug, $"{taskInfo} boundary: {boundary}"); // create the content content = new MultipartContent("related", boundary.ToString()); //add the sharing headers List <string> shareHeader = new List <string>(); if (Connection.shareDestinations != null) { foreach (var connectionSet in firstRecord.toConnections.FindAll(e => e.connectionName.Equals(Connection.name))) { if (connectionSet.shareDestinations != null) { foreach (var shareDestination in connectionSet.shareDestinations) { shareHeader.Add(shareDestination.boxUuid); _logger.Log(LogLevel.Debug, $"{taskInfo} sharing to: {shareDestination.boxId} {shareDestination.boxName} {shareDestination.groupId} {shareDestination.groupName} {shareDestination.organizationName} {shareDestination.publishableBoxType}"); } } } } content.Headers.Add("X-Li-Destination", shareHeader); long fileSize = 0; var dir = _profileStorage.Current.tempPath + Path.DirectorySeparatorChar + Connection.name + Path.DirectorySeparatorChar + Constants.Dirs.ToCloud; Directory.CreateDirectory(dir); testFile = dir + Path.DirectorySeparatorChar + Guid.NewGuid() + ".gz"; using (FileStream compressedFileStream = File.Create(testFile)) { using GZipStream compressionStream = new GZipStream(compressedFileStream, CompressionMode.Compress); foreach (var routedItem in batch) { if (File.Exists(routedItem.sourceFileName)) { routedItem.stream = File.OpenRead(routedItem.sourceFileName); if (Connection.CalcCompressionStats) { routedItem.stream.CopyTo(compressionStream); } fileSize += routedItem.length; streamContent = new StreamContent(routedItem.stream); streamContent.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = routedItem.sourceFileName }; content.Add(streamContent); //shb 2017-11-01 streamcontent (uncompressed) is inside content (can be compressed below) server is complaining so will comment out //streamContent.Headers.Add("Content-Transfer-Encoding", "gzip"); //shb 2017-11-10 shb added content-type header to solve //controller.DicomRSControllerBase: Content-Encoding header has value null !!! //controller.StowRSController: Unable to process part 1 no content-type parameter received streamContent.Headers.Add("content-type", "application/dicom"); } else { _logger.Log(LogLevel.Error, $"{taskInfo} {routedItem.sourceFileName} no longer exists. Increase tempFileRetentionHours for heavy transfer backlogs that may take hours!!"); } } } if (Connection.CalcCompressionStats) { FileInfo info = new FileInfo(testFile); _logger.Log(LogLevel.Information, $"{taskInfo} orgSize: {fileSize} compressedSize: {info.Length} reduction: {(fileSize == 0 ? 0 : (fileSize * 1.0 - info.Length) / (fileSize) * 100)}%"); } // issue the POST Task <HttpResponseMessage> task; if (firstRecord.Compress == true) { var compressedContent = new CompressedContent(content, "gzip"); _logger.Log(LogLevel.Debug, $"{taskInfo} compressedContent.Headers {compressedContent.Headers} "); var cookies = _liteHttpClient.GetCookies(stowURL); _logger.LogCookies(cookies, taskInfo); task = httpClient.PostAsync(stowURL, compressedContent, _taskManager.cts.Token); } else { _logger.Log(LogLevel.Debug, $"{taskInfo} will send content.Headers {content.Headers}"); var cookies = _liteHttpClient.GetCookies(stowURL); _logger.LogCookies(cookies, taskInfo); task = httpClient.PostAsync(stowURL, content, _taskManager.cts.Token); } response = await task; // output the result _logger.LogHttpResponseAndHeaders(response, taskInfo); _logger.Log(LogLevel.Debug, $"{taskInfo} response.Content.ReadAsStringAsync(): {await response.Content.ReadAsStringAsync()}"); stopWatch.Stop(); _logger.Log(LogLevel.Information, $"{taskInfo} elapsed: {stopWatch.Elapsed} size: {fileSize} rate: {(float)fileSize / stopWatch.Elapsed.TotalMilliseconds * 1000 / 1000000} MB/s"); if (!(response.StatusCode == HttpStatusCode.OK || response.StatusCode == HttpStatusCode.Accepted)) { if (response.StatusCode == HttpStatusCode.Unauthorized) { httpManager.loginNeeded = true; } _logger.Log(LogLevel.Warning, $"stow of {firstRecord.sourceFileName} and others in batch failed with {response.StatusCode}"); _liteHttpClient.DumpHttpClientDetails(); } else { //dequeue the work, we're done! if (streamContent != null) { streamContent.Dispose(); } if (response != null) { response.Dispose(); } if (content != null) { content.Dispose(); } foreach (var ri in batch) { _routedItemManager.Init(ri); _routedItemManager.Dequeue(Connection, Connection.toCloud, nameof(Connection.toCloud)); } } //delete the compression test file File.Delete(testFile); } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task was canceled."); } catch (HttpRequestException e) { _logger.Log(LogLevel.Warning, $"{taskInfo} Exception: {e.Message} {e.StackTrace}"); if (e.InnerException != null) { _logger.Log(LogLevel.Warning, $"{taskInfo} Inner Exception: {e.InnerException}"); } _liteHttpClient.DumpHttpClientDetails(); } catch (FileNotFoundException e) { _logger.Log(LogLevel.Warning, $"{taskInfo} {e.Message} {e.StackTrace}"); } catch (IOException e) { _logger.Log(LogLevel.Warning, $"{taskInfo} {e.Message} {e.StackTrace}"); } catch (Exception e) { _logger.LogFullException(e, taskInfo); _liteHttpClient.DumpHttpClientDetails(); } finally { try { if (streamContent != null) { streamContent.Dispose(); } if (response != null) { response.Dispose(); } if (content != null) { content.Dispose(); } File.Delete(testFile); _taskManager.Stop($"{Connection.name}.Stow"); } catch (Exception e) { _logger.LogFullException(e, taskInfo); } } }
public async Task wadoAsFileStream( LifeImageCloudConnection connection, int taskID, ImagingStudy study, IHttpManager httpManager, Series series = null, Instance instance = null, bool compress = true) { Connection = connection; var taskInfo = $"task: {taskID} connection: {Connection.name}"; var stopWatch = new Stopwatch(); stopWatch.Start(); string url = $"{study.url}"; string dir = _profileStorage.Current.tempPath + Path.DirectorySeparatorChar + Connection.name + Path.DirectorySeparatorChar + Constants.Dirs.ToRules + Path.DirectorySeparatorChar + Guid.NewGuid(); long fileSize = 0; HttpResponseMessage response = null; MultipartFileStreamProvider streamProvider = null; MultipartFileStreamProvider contents = null; var httpClient = _liteHttpClient.GetClient(connection); try { _logger.Log(LogLevel.Debug, $"{taskInfo} study url: {url} attempt {study.attempts}"); if (series != null) { Connection.URL += $"/series/{series.uid.Substring(8)}"; _logger.Log(LogLevel.Debug, $"{taskInfo} seriesURL: {url} attempt {series.attempts}"); } if (instance != null) { instance.downloadStarted = DateTime.Now; instance.attempts++; Connection.URL += $"/instances/{instance.uid}"; _logger.Log(LogLevel.Debug, $"{taskInfo} instanceURL: {url} attempt {instance.attempts}"); } var cookies = _liteHttpClient.GetCookies(url); _logger.LogCookies(cookies, taskInfo); // issue the GET var task = httpClient.GetAsync(url, HttpCompletionOption.ResponseHeadersRead, _taskManager.cts.Token); try { response = await task; } catch (System.Threading.Tasks.TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task Canceled"); } // output the result _logger.LogHttpResponseAndHeaders(response, taskInfo); //if(Logger.logger.FileTraceLevel == "Verbose") _logger.Log(LogLevel.Debug,$"{taskInfo} await response.Content.ReadAsStringAsync(): {await response.Content.ReadAsStringAsync()}"); if (response.StatusCode != HttpStatusCode.OK) { if (response.StatusCode == HttpStatusCode.Unauthorized) { httpManager.loginNeeded = true; } _liteHttpClient.DumpHttpClientDetails(); return; } // 2018-05-09 shb need to get header from Cloud to tell us how big it is if (!_util.IsDiskAvailable(dir, _profileStorage.Current, 16000000000)) //just using 16GB as a catch all { _logger.Log(LogLevel.Debug, $"{taskInfo} Insufficient disk to write {url} to {dir} guessing it could be 16GB"); return; //throw new Exception($"Insufficient disk to write {url} to {dir} guessing it could be 16GB"); } _logger.Log(LogLevel.Debug, $"{taskInfo} download dir will be {dir}"); Directory.CreateDirectory(dir); streamProvider = new MultipartFileStreamProvider(dir); contents = await response.Content.ReadAsMultipartAsync(streamProvider, _taskManager.cts.Token); int index = 0; _logger.Log(LogLevel.Debug, $"{taskInfo} Splitting {contents.FileData.Count} files into RoutedItems."); foreach (var part in contents.FileData) { try { index++; fileSize += new System.IO.FileInfo(part.LocalFileName).Length; _logger.Log(LogLevel.Debug, $"{taskInfo} downloaded file: {part.LocalFileName}"); RoutedItem routedItem = new RoutedItem(fromConnection: Connection.name, sourceFileName: part.LocalFileName, taskID: taskID, fileIndex: index, fileCount: contents.FileData.Count) { type = RoutedItem.Type.DICOM, Study = study.uid, AccessionNumber = study.accession?.value, //study.availability; //routedItem.Description = study.description; //study.extension; //study.modalityList; PatientID = study.patient?.display, //study.referrer; //study.resourceType; Series = series.uid }; //study.started; //study.url; _logger.Log(LogLevel.Debug, $"{taskInfo} Enqueuing RoutedItem {routedItem.sourceFileName}"); _routedItemManager.Init(routedItem); _routedItemManager.Enqueue(Connection, Connection.toRules, nameof(Connection.toRules)); } catch (Exception e) { _logger.LogFullException(e, taskInfo); } } //2018-04-27 shb moved completion marking outside of part loop to avoid duplicate entries in markSeriesComplete //also added duplicate check. if (series != null) { series.downloadCompleted = DateTime.Now; lock (Connection.markSeriesComplete) { if (!Connection.markSeriesComplete.Contains(series)) { Connection.markSeriesComplete.Add(series); } } } else if (instance != null) { //means this came from studies calls so we need to mark this download as complete instance.downloadCompleted = DateTime.Now; lock (Connection.markDownloadsComplete) { Connection.markDownloadsComplete.Add(new string[] { url, "download-complete" }); } } } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task was canceled."); } catch (NullReferenceException e) { _logger.Log(LogLevel.Warning, $"{taskInfo} Exception: {e.Message} {e.StackTrace}"); //throw e; } catch (HttpRequestException e) { _logger.Log(LogLevel.Warning, $"{taskInfo} Exception: {e.Message} {e.StackTrace}"); if (e.InnerException != null) { _logger.Log(LogLevel.Warning, $"{taskInfo} Inner Exception: {e.InnerException}"); } _liteHttpClient.DumpHttpClientDetails(); } catch (Exception e) { _logger.LogFullException(e, taskInfo); _liteHttpClient.DumpHttpClientDetails(); } finally { if (response != null) { response.Dispose(); } _taskManager.Stop($"{Connection.name}.Wado"); } stopWatch.Stop(); _logger.Log(LogLevel.Information, $"{taskInfo} elapsed: {stopWatch.Elapsed} size: {fileSize} rate: {(float)fileSize / stopWatch.Elapsed.TotalMilliseconds * 1000 / 1000000} MB/s"); }
public async Task DownloadStudy(int taskID, ImagingStudy imagingStudy, LifeImageCloudConnection connection, IHttpManager httpManager) { var Connection = connection; var stopWatch = new Stopwatch(); var taskInfo = $"task: {taskID} connection: {Connection.name}"; try { stopWatch.Start(); _logger.Log(LogLevel.Debug, $"{taskInfo} downloading study: {imagingStudy.uid} downloadStarted: {imagingStudy.downloadStarted.ToString("yyyy-MM-dd HH:mm:ss.ffff")} downloadCompleted: {imagingStudy.downloadCompleted.ToString("yyyy-MM-dd HH:mm:ss.ffff")} attempts: {imagingStudy.attempts}"); foreach (var series in imagingStudy?.series) { _logger.Log(LogLevel.Debug, $"{taskInfo} checking series: {series.uid} downloadStarted: {series.downloadStarted.ToString("yyyy-MM-dd HH:mm:ss.ffff")} downloadCompleted: {series.downloadCompleted.ToString("yyyy-MM-dd HH:mm:ss.ffff")} attempts: {series.attempts}"); if (series.downloadCompleted == DateTime.MinValue) //not completed { _logger.Log(LogLevel.Debug, $"{taskInfo} series: {series.uid} not completed."); if (series.downloadStarted < DateTime.Now.AddMinutes(-Connection.retryDelayMinutes)) //not attempted lately { _logger.Log(LogLevel.Debug, $"{taskInfo} series: {series.uid} not attempted lately."); if (imagingStudy.series?.FindAll(e => e.attempts > Connection.maxAttempts).Count == 0) //not exceeded max attempts { _logger.Log(LogLevel.Debug, $"{taskInfo} series: {series.uid} not exceeded max attempts."); var url = $"{imagingStudy.url}/series/{series.uid.Substring(8)}"; //if the tasklist already contains this series don't add it again //equal is determined by the reference field only //so in this case it is the imagingStudy.url if (await _taskManager.CountByReference(url) == 0) { _logger.Log(LogLevel.Debug, $"{taskInfo} series: {series.uid} not in task list."); _logger.Log(LogLevel.Debug, $"{taskInfo} series: {series.uid} selected for download downloadStarted: {series.downloadStarted.ToString("yyyy-MM-dd HH:mm:ss.ffff")} downloadCompleted: {series.downloadCompleted.ToString("yyyy-MM-dd HH:mm:ss.ffff")} attempts: {series.attempts}"); series.downloadStarted = DateTime.Now; series.attempts++; var newTaskID = _taskManager.NewTaskID(); Task task = new Task(new Action(async() => await _studiesDownloadManager.wadoAsFileStream(connection: connection, newTaskID, httpManager: httpManager, study: imagingStudy, series: series)), _taskManager.cts.Token); await _taskManager.Start(newTaskID, task, $"{Connection.name}.Wado", url, isLongRunning : false); } else { _logger.Log(LogLevel.Debug, $"{taskInfo} series: {series.uid} in task list. Skipping."); } } else { _logger.Log(LogLevel.Debug, $"{taskInfo} series: {series.uid} exceeded max attempts. Skipping."); } } else { _logger.Log(LogLevel.Debug, $"{taskInfo} series: {series.uid} attempted lately. Skipping."); } } else { _logger.Log(LogLevel.Debug, $"{taskInfo} series: {series.uid} completed. Skipping."); } } stopWatch.Stop(); _logger.Log(LogLevel.Information, $"{taskInfo} method level elapsed: {stopWatch.Elapsed} study: {imagingStudy.uid}"); } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task was canceled."); } catch (Exception e) { _logger.LogFullException(e, taskInfo); } finally { _taskManager.Stop($"{Connection.name}.downloadStudy"); } }
public async Task GetShareDestinations(int taskID, LifeImageCloudConnection Connection, IHttpManager httpManager) { Throw.IfNull(Connection); Throw.IfNull(httpManager); var taskInfo = $"task: {taskID} connection: {Connection.name}"; try { _logger.Log(LogLevel.Debug, $"{taskInfo} Processing getShareDestinations"); var httpClient = _liteHttpClient.GetClient(Connection); try { //set the URL //string shareURL = Connection.URL + "/api/box/v3/listAllPublishable"; string shareURL = Connection.URL + CloudAgentConstants.GetShareDestinationUrl; _logger.Log(LogLevel.Debug, $"{taskInfo} shareURL: {shareURL}"); var cookies = _liteHttpClient.GetCookies(shareURL); _logger.LogCookies(cookies, taskInfo); // issue the GET var task = httpClient.GetAsync(shareURL); var response = await task; // output the result _logger.LogHttpResponseAndHeaders(response, taskInfo); _logger.Log(LogLevel.Debug, $"{taskInfo} await response.Content.ReadAsStringAsync(): {await response.Content.ReadAsStringAsync()}"); if (response.StatusCode != HttpStatusCode.OK) { if (response.StatusCode == HttpStatusCode.Unauthorized) { httpManager.loginNeeded = true; } _logger.Log(LogLevel.Warning, $"{taskInfo} Problem getting share destinations. {response.StatusCode}"); _liteHttpClient.DumpHttpClientDetails(); } // convert from stream to JSON var serializer = new DataContractJsonSerializer(typeof(List <ShareDestinations>)); Connection.shareDestinations = serializer.ReadObject(await response.Content.ReadAsStreamAsync()) as List <ShareDestinations>; } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task was canceled."); } catch (HttpRequestException e) { _logger.Log(LogLevel.Warning, $"{taskInfo} Exception: {e.Message} {e.StackTrace}"); if (e.InnerException != null) { _logger.Log(LogLevel.Warning, $"{taskInfo} Inner Exception: {e.InnerException}"); } _liteHttpClient.DumpHttpClientDetails(); } catch (Exception e) { _logger.LogFullException(e, taskInfo); _liteHttpClient.DumpHttpClientDetails(); } } finally { _taskManager.Stop($"{Connection.name}.getShareDestinations"); } }
public async Task SendToCloud(int taskID, LifeImageCloudConnection Connection, IConnectionRoutedCacheManager cacheManager, IHttpManager httpManager) { //2018-02-02 shb RoutedItem does not necessarily have an open stream at this point any more var taskInfo = $"task: {taskID} connection: {Connection.name}"; _logger.Log(LogLevel.Information, $"{taskInfo} toCloud: {(Connection.toCloud == null ? 0 : Connection.toCloud.Count)} suggested items to send."); Dictionary <string, List <RoutedItem> > shareSet = new Dictionary <string, List <RoutedItem> >(); //I need a set for each sharing dest set List <List <RoutedItem> > sizeSet = new List <List <RoutedItem> >(); //I need a set for each minStowBatchSize try { Task.WaitAll(_taskManager.FindByType($"{Connection.name}.putHL7")); Task.WaitAll(_taskManager.FindByType($"{Connection.name}.PostResponse")); Task.WaitAll(_taskManager.FindByType($"{Connection.name}.Stow")); int retryDelayed = 0; List <RoutedItem> toCloudTemp = new List <RoutedItem>(); if (Connection.toCloud.Count <= 0) { await Task.CompletedTask; return; } lock (Connection.toCloud) { foreach (var routedItem in Connection.toCloud) { if (_profileStorage.Current.duplicatesDetectionUpload && routedItem.type == RoutedItem.Type.DICOM && routedItem.sourceFileName != null) { _duplicatesDetectionService.DuplicatesPurge(); lock (routedItem) { if (!_duplicatesDetectionService.DuplicatesReference(routedItem.fromConnection, routedItem.sourceFileName)) { continue; } } } if (routedItem.lastAttempt == null || (routedItem.lastAttempt != null && routedItem.lastAttempt < DateTime.Now.AddMinutes(-Connection.retryDelayMinutes))) { //not attempted lately routedItem.attempts++; routedItem.lastAttempt = DateTime.Now; if (routedItem.attempts > 1) { _logger.Log(LogLevel.Debug, $"{taskInfo} type: {routedItem.type} id: {routedItem.id} file: {routedItem.sourceFileName} meta: {routedItem.RoutedItemMetaFile} second attempt."); } toCloudTemp.Add(routedItem); } else { retryDelayed++; } } } foreach (var routedItem in toCloudTemp) { //remove the toCloud item if it has exceeded maxAttempts if (routedItem.attempts > Connection.maxAttempts) { // AMG LITE-1090 - put a break on then execution (add continue statement) and add routedItem status to the message. _logger.Log(LogLevel.Error, $"{taskInfo} type: {routedItem.type} id: {routedItem.id} status: {routedItem.status} file: {routedItem.sourceFileName} meta: {routedItem.RoutedItemMetaFile} has exceeded maxAttempts of {Connection.maxAttempts}. Will move to errors and not try again (removed from send queue)."); _routedItemManager.Init(routedItem); _routedItemManager.Dequeue(Connection, Connection.toCloud, nameof(Connection.toCloud), error: true); continue; } else { _logger.Log(LogLevel.Information, $"{taskInfo} type: {routedItem.type} id: {routedItem.id} file: {routedItem.sourceFileName} meta: {routedItem.RoutedItemMetaFile} attempt: {routedItem.attempts}"); } switch (routedItem.type) { case RoutedItem.Type.RPC: { _logger.Log(LogLevel.Debug, $"{taskInfo} PostResponse ID: {routedItem.id}"); var newTaskID = _taskManager.NewTaskID(); Task task = new Task(new Action(async() => await _postResponseCloudService.PostResponse(Connection, routedItem, cacheManager, httpManager, newTaskID)), _taskManager.cts.Token); await _taskManager.Start(newTaskID, task, $"{Connection.name}.PostResponse", $"{Connection.name}.PostResponse {routedItem.id}", isLongRunning : false); } break; case RoutedItem.Type.HL7: { _logger.Log(LogLevel.Debug, $"{taskInfo} putHL7 file: {routedItem.sourceFileName}"); var newTaskID = _taskManager.NewTaskID(); Task task = new Task(new Action(async() => await _sendFromCloudToHl7Service.putHL7(routedItem, newTaskID, Connection, httpManager)), _taskManager.cts.Token); await _taskManager.Start(newTaskID, task, $"{Connection.name}.putHL7", $"{Connection.name}.putHL7 {routedItem.sourceFileName}", isLongRunning : false); } break; case RoutedItem.Type.COMPLETION: { _logger.Log(LogLevel.Debug, $"{taskInfo} Completion ID: {routedItem.id} type: {routedItem.type} "); var newTaskID = _taskManager.NewTaskID(); Task task = new Task( new Action(async() => await _postCompletionCloudService.PostCompletion(Connection, routedItem, cacheManager, httpManager, newTaskID)), _taskManager.cts.Token); await _taskManager.Start(newTaskID, task, $"{Connection.name}.PostCompletion", $"{Connection.name}.PostCompletion {routedItem.id}", isLongRunning : false); } break; case RoutedItem.Type.DICOM: case RoutedItem.Type.FILE: //check if dicom, if not dicomize since cloud only does dicom via stow. if (File.Exists(routedItem.sourceFileName) && !_dicomUtil.IsDICOM(routedItem)) { _dicomUtil.Dicomize(routedItem); } //inspect the sharing headers and batch by set string shareString = ""; if (Connection.shareDestinations != null) { foreach (var connectionSet in routedItem.toConnections.FindAll(e => e.connectionName.Equals(Connection.name))) { if (connectionSet.shareDestinations != null) { foreach (var shareDestination in connectionSet.shareDestinations) { shareString += shareDestination.boxUuid; } } } } if (shareSet.ContainsKey(shareString)) { _logger.Log(LogLevel.Debug, $"{taskInfo} Adding {routedItem.sourceFileName} to shareString: {shareString}"); shareSet.GetValueOrDefault(shareString).Add(routedItem); } else { var list = new List <RoutedItem> { routedItem }; _logger.Log(LogLevel.Debug, $"{taskInfo} Adding {routedItem.sourceFileName} to shareString: {shareString}"); shareSet.Add(shareString, list); } break; default: _logger.Log(LogLevel.Critical, $"{taskInfo} meta: {routedItem.RoutedItemMetaFile} Unsupported type: {routedItem.type}"); break; } } //Now that each key in the Dictionary is to a single set of sharing destinations, let's break it up further by minStowBatchSize //What we want is a big enough upload to solve the small file problem, but small enough so the upload makes forward progress. //If this is not the first attempt, then disable batching and send individually. foreach (var share in shareSet.Values) { var batch = new List <RoutedItem>(); long bytes = 0; foreach (var element in share) { try { element.length = new FileInfo(element.sourceFileName).Length; } catch (FileNotFoundException e) { _logger.Log(LogLevel.Critical, $"{taskInfo} id: {element.id} meta:{element.RoutedItemMetaFile} source:{element.sourceFileName} type:{element.type} {e.Message} {e.StackTrace}"); _routedItemManager.Init(element); _routedItemManager.Dequeue(Connection, Connection.toCloud, nameof(Connection.toCloud), true); continue; } catch (IOException e) { _logger.Log(LogLevel.Critical, $"{taskInfo} id: {element.id} meta:{element.RoutedItemMetaFile} source:{element.sourceFileName} type:{element.type} {e.Message} {e.StackTrace}"); //condition may be transient like file in use so skip for the moment continue; } catch (Exception e) { _logger.Log(LogLevel.Critical, $"{taskInfo} id: {element.id} meta:{element.RoutedItemMetaFile} source:{element.sourceFileName} type:{element.type} {e.Message} {e.StackTrace}"); if (e.InnerException != null) { _logger.Log(LogLevel.Critical, $"Inner Exception: {e.InnerException}"); } //condition may be transient like file so skip for the moment continue; } //If this is not the first attempt, then disable batching and send individually. if (element.length < Connection.minStowBatchSize && bytes < Connection.minStowBatchSize && element.attempts == 1) { bytes += element.length; _logger.Log(LogLevel.Debug, $"{taskInfo} Adding {element.sourceFileName} to batch..."); batch.Add(element); } else { _logger.Log(LogLevel.Debug, $"{taskInfo} Batch is full with count: {batch.Count} size: {bytes} attempts: {element.attempts} {(element.attempts > 1 ? "items are sent individually after 1st attempt!" : "")}"); sizeSet.Add(batch); batch = new List <RoutedItem>(); bytes = element.length; batch.Add(element); } } if (!sizeSet.Contains(batch) && batch.Count > 0) { _logger.Log(LogLevel.Debug, $"{taskInfo} Add final batch to set with count: {batch.Count} size: {bytes}"); sizeSet.Add(batch); } } int tempcount = 0; foreach (var batch in sizeSet) { tempcount += batch.Count; } _logger.Log(LogLevel.Information, $"{taskInfo} {sizeSet.Count} batches to send, selected: {tempcount}/{toCloudTemp.Count} retry delayed: {retryDelayed}"); foreach (var batch in sizeSet) { if (httpManager.loginNeeded) { break; } if (batch.Count > 0) { var newTaskID = _taskManager.NewTaskID(); Task task = new Task(new Action(async() => await _stowAsMultiPartCloudService.stowAsMultiPart(batch, newTaskID, Connection, httpManager)), _taskManager.cts.Token); await _taskManager.Start(newTaskID, task, $"{Connection.name}.Stow", $"{Connection.name}.Stow batch {batch.Count}", isLongRunning : false); } } } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task was canceled."); } catch (Exception e) { _logger.LogFullException(e, taskInfo); } }
public async Task PutConfigurationToCloud(Profile profile, LifeImageCloudConnection conn) { var taskInfo = $"PutProfile"; if (conn == null) { //this can be called early in startup during replacement strategy await Task.CompletedTask; return; } var httpClient = _liteHttpClient.GetClient(conn); try { //set the URL //string profileURL = conn.URL + $"/api/agent/v1/agent-configuration?version={Profile.rowVersion}"; string profileURL = conn.URL + CloudAgentConstants.AgentConfigurationUrl + $"version={Profile.rowVersion}"; _logger.Log(LogLevel.Debug, $"{taskInfo} putProfileURL: {profileURL}"); // validate and put any errors in the profile so it can be returned to the server profile.errors = _profileValidator.FullValidate(profile, profile.ToString()); string json = profile.ToString(); byte[] toBytes = Encoding.ASCII.GetBytes(json); string json64 = Convert.ToBase64String(toBytes); // string json64 = Convert.ToBase64String(json); using HttpContent httpContent = new StringContent(json64); var cookies = _liteHttpClient.GetCookies(profileURL); _logger.LogCookies(cookies, taskInfo); HttpResponseMessage response = httpClient.PutAsync(profileURL, httpContent).Result; // output the result _logger.LogHttpResponseAndHeaders(response, taskInfo); if (response.StatusCode == HttpStatusCode.OK) { string resp = await response.Content.ReadAsStringAsync(); _logger.Log(LogLevel.Debug, $"{taskInfo} Profile successfully uploaded to cloud."); // Read out server version in case null was passed in, essentially allow the agent to write once // without a version (just in case its needed say to load a local version to the server) then the // version needs to be respected. In practice profile should be read first but that's not currently required. Dictionary <string, string> map = JsonHelper.DeserializeFromMap(resp); map.TryGetValue("version", out Profile.rowVersion); _logger.Log(LogLevel.Debug, $"{taskInfo} Profile version from server: {Profile.rowVersion}."); } else { _logger.Log(LogLevel.Warning, $"{taskInfo} {response.StatusCode}"); _liteHttpClient.DumpHttpClientDetails(); } } catch (System.Runtime.Serialization.SerializationException e) { _logger.LogFullException(e, taskInfo); } catch (InvalidOperationException e) { if (e.InnerException != null && e.InnerException.Message != null) { _logger.Log(LogLevel.Warning, $"{taskInfo} Unable to upload profile to cloud because profile is being iterated. Will try again: {e.Message} {e.StackTrace} Inner Exception: {e.InnerException.Message}"); if (e.InnerException.StackTrace != null) { _logger.Log(LogLevel.Warning, $"{taskInfo} e.InnerException.StackTrace: {e.InnerException.StackTrace}"); } } else { _logger.Log(LogLevel.Warning, $"{taskInfo} Unable to upload profile to cloud because profile is being iterated. Will try again: {e.Message} {e.StackTrace}"); } } catch (System.Threading.Tasks.TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task Canceled"); } catch (HttpRequestException e) { _logger.Log(LogLevel.Warning, $"{taskInfo} Exception: {e.Message} {e.StackTrace}"); if (e.InnerException != null) { _logger.Log(LogLevel.Warning, $"Inner Exception: {e.InnerException}"); } _liteHttpClient.DumpHttpClientDetails(); } catch (Exception e) { _logger.LogFullException(e); _liteHttpClient.DumpHttpClientDetails(); } }
public async Task <RegisterAsAdmin> register(LifeImageCloudConnection Connection, string username, string password, string org = null, string serviceName = null) { var taskInfo = $"username: {username}"; var httpClient = _liteHttpClient.GetClient(Connection); HttpResponseMessage response = null; try { //set the URL var uri = CloudAgentConstants.RegisterUrl;// $"/api/admin/v1/agents/setup"; //?username={username}&password={password}"; string registerURL = Connection.URL + uri; _logger.Log(LogLevel.Debug, $"registerURL: {registerURL}"); //set the form parameters FormUrlEncodedContent registerParams = null; if (org == null || serviceName == null) { registerParams = new FormUrlEncodedContent(new[] { new KeyValuePair <string, string>("username", username), new KeyValuePair <string, string>("password", password) }); } else { registerParams = new FormUrlEncodedContent(new[] { new KeyValuePair <string, string>("username", username), new KeyValuePair <string, string>("password", password), new KeyValuePair <string, string>("organizationCode", org), new KeyValuePair <string, string>("serviceName", serviceName) }); } // issue the POST response = await httpClient.PostAsync(registerURL, registerParams, _taskManager.cts.Token); // output the result _logger.Log(LogLevel.Debug, $"result: {response.Version} {response.StatusCode} {response.ReasonPhrase}"); foreach (var header in response.Headers) { foreach (var value in header.Value) { _logger.Log(LogLevel.Debug, $"response.Headers: {header.Key} {value}"); } } _logger.Log(LogLevel.Debug, $"await response.Content.ReadAsStringAsync(): {response.Content.ReadAsStringAsync().Result}"); // convert from stream to JSON var serializer = new DataContractJsonSerializer(typeof(RegisterAsAdmin)); var obj = await response.Content.ReadAsStreamAsync(); var registerAsAdmin = serializer.ReadObject(obj) as RegisterAsAdmin; // set the tenantID if (registerAsAdmin != null) { return(registerAsAdmin); } } catch (System.Threading.Tasks.TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task Canceled"); } catch (System.Runtime.Serialization.SerializationException e) { throw new System.Runtime.Serialization.SerializationException(response.Content.ReadAsStringAsync().Result, e); } catch (HttpRequestException e) { _logger.Log(LogLevel.Warning, $"{taskInfo} Exception: {e.Message} {e.StackTrace}"); if (e.InnerException != null) { _logger.Log(LogLevel.Warning, $"{taskInfo} Inner Exception: {e.InnerException}"); } _liteHttpClient.DumpHttpClientDetails(); } catch (Exception e) { _logger.LogFullException(e, taskInfo); _liteHttpClient.DumpHttpClientDetails(); } return(null); }
// register to get a tenantID, can only be done one time public async Task register(int taskID, LifeImageCloudConnection Connection, IHttpManager httpManager) { var taskInfo = $"task: {taskID} connection: {Connection.name}"; var httpClient = _liteHttpClient.GetClient(Connection); try { //set the URL string registerURL = Connection.URL + CloudAgentConstants.RegisterAsOrgUrl; _logger.Log(LogLevel.Debug, $"{taskInfo} registerURL: {registerURL}"); //set the form parameters var registerParams = new FormUrlEncodedContent(new[] { new KeyValuePair <string, string>("organizationCode", Connection.organizationCode), new KeyValuePair <string, string>("subscriptionCode", Connection.subscriptionCode) }); // issue the POST var task = httpClient.PostAsync(registerURL, registerParams, _taskManager.cts.Token); var response = await task; // output the result _logger.LogHttpResponseAndHeaders(response, taskInfo); if (response.StatusCode == HttpStatusCode.Unauthorized) { httpManager.loginNeeded = true; } _logger.Log(LogLevel.Debug, $"{taskInfo} await response.Content.ReadAsStringAsync(): {await response.Content.ReadAsStringAsync()}"); // convert from stream to JSON var serializer = new DataContractJsonSerializer(typeof(Register)); var register = serializer.ReadObject(await response.Content.ReadAsStreamAsync()) as Register; // set the tenantID if (register.tenantId != null) { Connection.tenantID = register.tenantId; } } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task was canceled."); } catch (HttpRequestException e) { _logger.Log(LogLevel.Warning, $"{taskInfo} Exception: {e.Message} {e.StackTrace}"); if (e.InnerException != null) { _logger.Log(LogLevel.Warning, $"{taskInfo} Inner Exception: {e.InnerException}"); } _liteHttpClient.DumpHttpClientDetails(); } catch (Exception e) { _logger.LogFullException(e, taskInfo); _liteHttpClient.DumpHttpClientDetails(); } }
public async Task putHL7(RoutedItem routedItem, int taskID, LifeImageCloudConnection connection, IHttpManager httpManager) { var Connection = connection; var httpClient = _liteHttpClient.GetClient(connection); var taskInfo = $"task: {taskID} connection: {Connection.name}"; MultipartContent content = null; StreamContent streamContent = null; HttpResponseMessage response = null; try { if (!File.Exists(routedItem.sourceFileName)) { routedItem.Error = "File Not Found"; _routedItemManager.Init(routedItem); _routedItemManager.Dequeue(Connection, Connection.toCloud, nameof(Connection.toCloud), error: true); return; } var stopWatch = new Stopwatch(); stopWatch.Start(); //set theConnection.URL http://localhost:8080/universal-inbox/api/agent/v1/hl7-upload //string putHL7URL = Connection.URL + "/api/agent/v1/hl7-upload?connectionName=" + routedItem.fromConnection; string putHL7URL = Connection.URL + CloudAgentConstants.GetPutHl7Url(routedItem.fromConnection); _logger.Log(LogLevel.Debug, $"{taskInfo} putHL7URL: {putHL7URL}"); //generate guid for boundary...boundaries cannot be accidentally found in the content var boundary = Guid.NewGuid(); _logger.Log(LogLevel.Debug, $"{taskInfo} boundary: {boundary}"); // create the content content = new MultipartContent("related", boundary.ToString()); //add the sharing headers List <string> shareHeader = new List <string>(); if (Connection.shareDestinations != null) { foreach (var connectionSet in routedItem.toConnections.FindAll(e => e.connectionName.Equals(Connection.name))) { if (connectionSet.shareDestinations != null) { foreach (var shareDestination in connectionSet.shareDestinations) { shareHeader.Add(shareDestination.boxUuid); } } } } content.Headers.Add("X-Li-Destination", shareHeader); // //var fileSize = routedItem.stream.Length; var fileSize = new FileInfo(routedItem.sourceFileName).Length; //var streamContent = new StreamContent(routedItem.stream); streamContent = new StreamContent(File.OpenRead(routedItem.sourceFileName)); streamContent.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { // FileName = filename FileName = routedItem.sourceFileName }; streamContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); //streamContent.Headers.Add("Content-Transfer-Encoding", "gzip"); content.Add(streamContent); // issue the POST Task <HttpResponseMessage> task; var cookies = _liteHttpClient.GetCookies(putHL7URL); _logger.LogCookies(cookies, taskInfo); if (routedItem.Compress == true) { task = httpClient.PostAsync(putHL7URL, new CompressedContent(content, "gzip"), _taskManager.cts.Token); } else { task = httpClient.PostAsync(putHL7URL, content, _taskManager.cts.Token); } response = await task; // output the result _logger.LogHttpResponseAndHeaders(response, taskInfo); if (response.StatusCode == HttpStatusCode.Unauthorized) { httpManager.loginNeeded = true; } _logger.Log(LogLevel.Debug, $"{taskInfo} response.Content.ReadAsStringAsync(): {await response.Content.ReadAsStringAsync()}"); // convert from stream to JSON //var serializer = new DataContractJsonSerializer(typeof(LoginJSON)); //var loginJSON = serializer.ReadObject(await response.Content.ReadAsStreamAsync()) as LoginJSON; stopWatch.Stop(); _logger.Log(LogLevel.Information, $"{taskInfo} elapsed: {stopWatch.Elapsed} size: {fileSize} rate: {(float)fileSize / stopWatch.Elapsed.TotalMilliseconds * 1000 / 1000000} MB/s"); //dequeue the work, we're done! if (streamContent != null) { streamContent.Dispose(); } if (response != null) { response.Dispose(); } if (content != null) { content.Dispose(); } _routedItemManager.Init(routedItem); _routedItemManager.Dequeue(Connection, Connection.toCloud, nameof(Connection.toCloud)); } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task was canceled."); } catch (HttpRequestException e) { _logger.Log(LogLevel.Warning, $"{taskInfo} Exception: {e.Message} {e.StackTrace}"); if (e.InnerException != null) { _logger.Log(LogLevel.Warning, $"{taskInfo} Inner Exception: {e.InnerException}"); } _liteHttpClient.DumpHttpClientDetails(); } catch (Exception e) { _logger.LogFullException(e, taskInfo); _liteHttpClient.DumpHttpClientDetails(); } finally { try { _taskManager.Stop($"{Connection.name}.putHL7"); if (streamContent != null) { streamContent.Dispose(); } if (response != null) { response.Dispose(); } if (content != null) { content.Dispose(); } } catch (Exception e) { _logger.LogFullException(e, taskInfo); } } }
/// <summary> /// Gets the cloud requests. /// </summary> /// <param name="taskID"></param> /// <param name="Connection"></param> /// <param name="cache"></param> /// <param name="httpManager"></param> /// <returns></returns> public async Task GetRequests(int taskID, LifeImageCloudConnection Connection, IConnectionRoutedCacheManager cache, IHttpManager httpManager) { var taskInfo = $"task: {taskID} connection: {Connection.name}"; HttpResponseMessage response = null; var httpClient = _liteHttpClient.GetClient(Connection); try { while (!_taskManager.cts.IsCancellationRequested) { await Task.Delay(_profileStorage.Current.KickOffInterval, _taskManager.cts.Token); //BOUR-1022 shb I think the code that checks for dictionary entry before enqueuing is enough to prevent duplicate behavior //skip if response cache is not empty if (LifeImageCloudConnectionManager.cache.Count > 0) { int requestCount = 0; //check to see if there are any requests foreach (var cacheItem in LifeImageCloudConnectionManager.cache.ToArray()) { _logger.Log(LogLevel.Debug, $"{taskInfo} Cache entry id: {cacheItem.Key}"); foreach (var item in cacheItem.Value.ToArray()) { _logger.Log(LogLevel.Debug, $"{taskInfo} fromConnection: {item.fromConnection} id: {item.id} started: {item.startTime} complete: {item.resultsTime} status: {item.status}"); if (item.type == RoutedItem.Type.RPC) { requestCount++; } } } if (requestCount > 0) { //BOUR-1060 relax condition and rely on dictionary logic below _logger.Log(LogLevel.Warning, $"{taskInfo} response cache has {requestCount} request items."); // _logger.Log(LogLevel.Warning, $"{taskInfo} response cache has {requestCount} request items, skipping getting new requests until clear"); // return; } } //set the URL //string agentTasksURL = Connection.URL + "/api/agent/v1/agent-tasks"; string agentTasksURL = Connection.URL + CloudAgentConstants.GetAgentTasksUrl; _logger.Log(LogLevel.Debug, $"{taskInfo} agentTasksURL: {agentTasksURL}"); var cookies = _liteHttpClient.GetCookies(agentTasksURL); _logger.LogCookies(cookies, taskInfo); // issue the GET var task = httpClient.GetAsync(agentTasksURL); response = await task; // output the result _logger.LogHttpResponseAndHeaders(response, taskInfo); if (response.StatusCode == HttpStatusCode.OK) { if (response.StatusCode == HttpStatusCode.Unauthorized) { httpManager.loginNeeded = true; } _logger.Log(LogLevel.Warning, $"{taskInfo} Problem getting agent tasks. {agentTasksURL} {response.StatusCode}"); _liteHttpClient.DumpHttpClientDetails(); } // convert from stream to JSON string results = await response.Content.ReadAsStringAsync(); var objResults = JsonSerializer.Deserialize <Dictionary <string, List <Dictionary <string, string> > > >(results); foreach (var key in objResults) { _logger.Log(LogLevel.Debug, $"{taskInfo} key: {key.Key}"); var list = key.Value; foreach (var item in list) { foreach (var subkey in item) { _logger.Log(LogLevel.Debug, $"{taskInfo} subkey.Key: {subkey.Key} subKey.Value: {subkey.Value} "); } } } if (objResults.Count > 0) { // 2018-09-19 shb unwrap encoded task data and then send to rules var agentRequestList = objResults["modelMapList"]; foreach (var agentRequest in agentRequestList) { byte[] data = Convert.FromBase64String(agentRequest["task"]); string agentRequestAsString = Encoding.UTF8.GetString(data); agentRequest["task"] = agentRequestAsString; string id = agentRequest["id"]; string request = agentRequest["task"]; string requestType = agentRequest["task_type"]; string connection = null; agentRequest.TryGetValue("connection", out connection); RoutedItem ri = new RoutedItem(fromConnection: Connection.name, id: id, request: request, requestType: requestType) { type = RoutedItem.Type.RPC, status = RoutedItem.Status.PENDING, startTime = DateTime.Now, TaskID = taskID }; if (connection != null && connection != "*") { ConnectionSet connSet = new ConnectionSet { connectionName = connection }; ri.toConnections.Add(connSet); } LifeImageCloudConnectionManager.cache.TryGetValue(ri.id, out List <RoutedItem> cacheItem); if (cacheItem == null) { //determine which connections will need to reply and prime the response cache _rulesManager.Init(_profileStorage.Current.rules); //var connsets = _profileStorage.Current.rules.Eval(ri); var connsets = _rulesManager.Eval(ri); foreach (var connset in connsets) { _routedItemManager.Init(ri); var prime = (RoutedItem)_routedItemManager.Clone(); prime.startTime = DateTime.Now; //clock starts ticking now prime.status = RoutedItem.Status.PENDING; prime.fromConnection = _profileStorage.Current.connections.Find(e => e.name == connset.connectionName).name; _logger.Log(LogLevel.Debug, $"{taskInfo} Priming Response cache id: {id} conn: {prime.fromConnection} "); cache.Route(prime); } //enqueue the request _logger.Log(LogLevel.Debug, $"{taskInfo} Enqueuing id: {id} requestType: {requestType} subKey.Value: {request} "); _routedItemManager.Init(ri); _routedItemManager.Enqueue(Connection, Connection.toRules, nameof(Connection.toRules)); //BOUR-995 let cloud know we got the request with a status of PENDING await _postResponseCloudService.PostResponse(Connection, ri, cache, httpManager, taskID); } else { _logger.Log(LogLevel.Debug, $"{taskInfo} Exists id: {id} requestType: {requestType} subKey.Value: {request} "); foreach (var item in cacheItem) { _logger.Log(LogLevel.Debug, $"{taskInfo} fromConnection: {item.fromConnection} started: {item.startTime} complete: {item.resultsTime} status: {item.status}"); foreach (var ctr in item.cloudTaskResults) { foreach (var result in ctr.results) { _logger.Log(LogLevel.Debug, $"{taskInfo} Exists id: {item.id} results: {result}"); } } } } } } } } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task was canceled."); } catch (System.InvalidOperationException) //for the Collection was Modified, we can wait { _logger.Log(LogLevel.Information, $"{taskInfo} Waiting for requests to complete before getting new requests"); } catch (Exception e) { _logger.LogFullException(e, taskInfo); } finally { try { _taskManager.Stop($"{Connection.name}.GetRequests"); if (response != null) { response.Dispose(); } } catch (Exception e) { _logger.LogFullException(e, taskInfo); } } }
// markDownloadComplete is used to remove an item that was in the /studies call public async Task markDownloadComplete(int taskID, LifeImageCloudConnection connection, IHttpManager httpManager) { Connection = connection; var httpClient = _liteHttpClient.GetClient(connection); var taskInfo = $"task: {taskID} connection: {Connection.name}"; HttpResponseMessage response = null; try { //process the series that have completed _logger.Log(LogLevel.Debug, $"{taskInfo} Processing Series Completion"); try { //loop through the studies and if a study has all of its series downloaded, then we can remove it and tell cloud we downloaded it if (Connection.studies != null && Connection.studies.ImagingStudy != null) { foreach (var study in Connection.studies.ImagingStudy.ToList()) { var remaining = new List <Series>(); bool seriesFail = false; if (study.series != null) { remaining = study.series?.FindAll(e => e.downloadCompleted == DateTime.MinValue); //var studyFail = study.attempts > maxAttempts; we aren't doing study LogLevel. seriesFail = study.series?.FindAll(e => e.attempts > Connection.maxAttempts).Count > 0; //var instanceFail = ins //the study object contains a list of series but the series object //does not contain a list of instances. So no marking and clearing at instance level yet. } if (remaining.Count == 0) { foreach (var series in study.series) { _logger.Log(LogLevel.Debug, $"{taskInfo} study: {study.uid} series: {series.uid} started: {series.downloadStarted.ToString("yyyy-MM-dd HH:mm:ss.ffff")} completed: {series.downloadCompleted.ToString("yyyy-MM-dd HH:mm:ss.ffff")} duration: {series.downloadCompleted - series.downloadStarted} attempts: {series.attempts}"); } study.downloadCompleted = study.series.Max(e => e.downloadCompleted); study.downloadStarted = study.series.FindAll(e => e.downloadStarted != null) .Min(e => e.downloadStarted); _logger.Log(LogLevel.Information, $"{taskInfo} study download (complete): {study.uid} started: {study.downloadStarted.ToString("yyyy-MM-dd HH:mm:ss.ffff")} completed: {study.downloadCompleted.ToString("yyyy-MM-dd HH:mm:ss.ffff")} duration: {study.downloadCompleted - study.downloadStarted} attempts: {study.attempts}"); Connection.markDownloadsComplete.Add(new string[] { study.url, "download-complete" }); } if (seriesFail) { foreach (var series in study.series) { _logger.Log(LogLevel.Debug, $"{taskInfo} study: {study.uid} series: {series.uid} started: {series.downloadStarted.ToString("yyyy-MM-dd HH:mm:ss.ffff")} completed: {series.downloadCompleted.ToString("yyyy-MM-dd HH:mm:ss.ffff")} duration: {series.downloadCompleted - series.downloadStarted} attempts: {series.attempts}"); } study.downloadCompleted = study.series.Max(e => e.downloadCompleted); study.downloadStarted = study.series.FindAll(e => e.downloadStarted != null).Min(e => e.downloadStarted); _logger.Log(LogLevel.Information, $"{taskInfo} study download (failed): {study.uid} started: {study.downloadStarted.ToString("yyyy-MM-dd HH:mm:ss.ffff")} completed: {study.downloadCompleted.ToString("yyyy-MM-dd HH:mm:ss.ffff")} duration: {study.downloadCompleted - study.downloadStarted} attempts: {study.attempts}"); _logger.Log(LogLevel.Information, $"{taskInfo} Failing study: {study.url}"); Connection.markDownloadsComplete.Add(new string[] { study.url, "download-fail" }); } } } foreach (var seriesObj in Connection.markSeriesComplete.ToArray()) { _logger.Log(LogLevel.Debug, $"{taskInfo} new Series Complete: {seriesObj.uid}"); if (Connection.studies != null && Connection.studies.ImagingStudy != null) { foreach (var study in Connection.studies?.ImagingStudy) { foreach (var series in study.series) { if (series.uid == seriesObj.uid) { if (series.downloadCompleted != null) { _logger.Log(LogLevel.Debug, $"{taskInfo} writing timestamps markSeriesComplete: {series.uid}"); series.downloadCompleted = seriesObj.downloadCompleted; series.downloadStarted = seriesObj.downloadStarted; series.attempts = seriesObj.attempts; } else { _logger.Log(LogLevel.Debug, $"{taskInfo} series already marked as complete: {series.uid}"); series.downloadCompleted = DateTime.Now; series.attempts = seriesObj.attempts; } } } } } } Connection.markSeriesComplete.Clear(); } catch (Exception e) { _logger.LogFullException(e, taskInfo); } foreach (var markinfo in Connection.markDownloadsComplete.ToList()) { try { _logger.Log(LogLevel.Debug, $"{taskInfo} marking: {markinfo[0]} {markinfo[1]}"); var stopWatch = new Stopwatch(); stopWatch.Start(); string markDownloadCompleteURL = markinfo[0] + "/" + markinfo[1]; _logger.Log(LogLevel.Debug, $"{taskInfo} markDownloadCompleteURL: {markDownloadCompleteURL}"); //set the form parameters var nothingParams = new FormUrlEncodedContent(new[] { new KeyValuePair <string, string>("nothing", "nothing"), }); var cookies = _liteHttpClient.GetCookies(markDownloadCompleteURL); _logger.LogCookies(cookies, taskInfo); // issue the POST var task = httpClient.PostAsync(markDownloadCompleteURL, nothingParams, _taskManager.cts.Token); response = await task; // output the result _logger.LogHttpResponseAndHeaders(response, taskInfo); if (response.StatusCode != HttpStatusCode.OK) { _logger.Log(LogLevel.Warning, $"{taskInfo} {response.StatusCode} {markDownloadCompleteURL}"); _liteHttpClient.DumpHttpClientDetails(); } if (response.StatusCode == HttpStatusCode.Unauthorized) { httpManager.loginNeeded = true; } if (response.StatusCode == HttpStatusCode.OK || response.StatusCode == HttpStatusCode.NotFound) { _logger.Log(LogLevel.Information, $"{taskInfo} {response.StatusCode} {markDownloadCompleteURL}"); lock (Connection.studies) { Connection.studies.ImagingStudy.RemoveAll(e => e.url == markinfo[0]); } lock (Connection.markDownloadsComplete) { Connection.markDownloadsComplete.Remove(markinfo); } } _logger.Log(LogLevel.Debug, $"{taskInfo} response.Content.ReadAsStringAsync(): {await response.Content.ReadAsStringAsync()}"); stopWatch.Stop(); _logger.Log(LogLevel.Information, $"{taskInfo} elapsed: {stopWatch.Elapsed}"); } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task Canceled"); } catch (HttpRequestException e) { _logger.Log(LogLevel.Warning, $"{taskInfo} Exception: {e.Message} {e.StackTrace}"); if (e.InnerException != null) { _logger.Log(LogLevel.Warning, $"{taskInfo} Inner Exception: {e.InnerException}"); } _liteHttpClient.DumpHttpClientDetails(); } catch (Exception e) { _logger.LogFullException(e, $"{taskInfo} markDownloadComplete failed"); _liteHttpClient.DumpHttpClientDetails(); } } _logger.Log(LogLevel.Debug, $"{taskInfo} Processing getStudies"); await _studyManager.getStudies(taskID, connection, httpManager); } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task was canceled."); } finally { try { _taskManager.Stop($"{Connection.name}.markDownloadComplete"); if (response != null) { response.Dispose(); } } catch (Exception e) { _logger.LogFullException(e, taskInfo); } } }
public async Task <string> login(int taskID, LifeImageCloudConnection connection, IHttpManager _manager) { Connection = connection; var taskInfo = $"task: {taskID} connection: {Connection.name}"; var httpClient = _liteHttpClient.GetClient(connection); try { Connection.loginAttempts++; //set the URL string loginURL = Connection.URL + "/login/authenticate"; _logger.Log(LogLevel.Debug, $"{taskInfo} loginURL: {loginURL}"); //set the form parameters var loginParams = new FormUrlEncodedContent(new[] { new KeyValuePair <string, string>("j_username", Connection.username), new KeyValuePair <string, string>("j_password", GetUnprotectedPassword(connection)), new KeyValuePair <string, string>("OrganizationCode", Connection.organizationCode), new KeyValuePair <string, string>("ServiceName", Connection.serviceName), new KeyValuePair <string, string>("applTenantId", Connection.tenantID) }); // issue the POST HttpResponseMessage response = null; try { var cookies = _liteHttpClient.GetCookies(loginURL); _logger.LogCookies(cookies, taskInfo); var task = httpClient.PostAsync(loginURL, loginParams, _taskManager.cts.Token); //ServicePointManager.FindServicePoint(new Uri(loginURL)).ConnectionLeaseTimeout = 0; //(int)TimeSpan.FromMinutes(1).TotalMilliseconds; response = await task; } catch (System.Threading.Tasks.TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task Canceled"); return(null); } catch (HttpRequestException e) { _logger.LogFullException(e, $"{taskInfo} HttpRequestException: Unable to login:"******"{taskInfo} HttpRequestException: Unable to login:"******"{taskInfo} await response.Content.ReadAsStringAsync(): {await response.Content.ReadAsStringAsync()}"); if (response.StatusCode == HttpStatusCode.OK) { Connection.loginAttempts = 0; _logger.Log(LogLevel.Debug, $"{taskInfo} User Successfully logged in!"); _manager.loginNeeded = false; } else { _liteHttpClient.DumpHttpClientDetails(); _manager.loginNeeded = true; if (response.StatusCode == HttpStatusCode.Unauthorized && Connection.loginAttempts == Connection.maxAttempts) { Console.WriteLine("Exceeded max login attempts. Shutting down"); LiteEngine.shutdown(this, null); Environment.Exit(0); } } // grab the X-Li-Synctoken if (response.Headers.TryGetValues("X-Li-Synctoken", out IEnumerable <string> syncTokens)) { foreach (var token in syncTokens) { Connection.syncToken = token; httpClient.DefaultRequestHeaders.Remove("X-Li-Synctoken"); //in case we have to login again without recreating httpClient httpClient.DefaultRequestHeaders.Add("X-Li-Synctoken", Connection.syncToken); break; } } // get the session cookie var newcookies = _liteHttpClient.GetCookies(loginURL); foreach (var cookie in newcookies) { var cookiestr = cookie.ToString(); if (cookiestr.Contains("JSESSIONID")) { _logger.Log(LogLevel.Debug, $"{taskInfo} Cookie: {cookiestr}"); _manager.jSessionID = cookiestr; } } _logger.Log(LogLevel.Debug, $"{taskInfo} Login successful: {_manager.jSessionID}"); // convert from stream to JSON //BUG JSON is invalid as of 4/11/2016 //var serializer = new DataContractJsonSerializer(typeof(LoginJSON)); //var loginJSON = serializer.ReadObject(await response.Content.ReadAsStreamAsync()) as LoginJSON; return(response.StatusCode.ToString()); } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task was canceled."); return(null); } catch (Exception e) { _logger.LogFullException(e, taskInfo); return(null); } }
public async Task <Profile> GetAgentConfigurationFromCloud(LifeImageCloudConnection conn, string rowVersion, bool _overrideVersionAndModifiedDate) { var taskInfo = $"{conn.name}:"; string json = ""; var httpClient = _liteHttpClient.GetClient(conn); try { //set the URL string profileURL = conn.URL; if (rowVersion == null | _overrideVersionAndModifiedDate == true) { profileURL += CloudAgentConstants.AgentConfigurationUrl; } else { profileURL += $"{CloudAgentConstants.AgentConfigurationUrl}?version={rowVersion}"; } _logger.Log(LogLevel.Debug, $"{taskInfo} getProfileURL: {profileURL}"); var cookies = _liteHttpClient.GetCookies(profileURL); _logger.LogCookies(cookies, taskInfo); // issue the GET HttpResponseMessage httpResponse = await httpClient.GetAsync(profileURL); if (httpResponse.StatusCode == HttpStatusCode.NotModified) { return(null); } string response = await httpResponse.Content.ReadAsStringAsync(); _logger.Log(LogLevel.Debug, $"{taskInfo} response size: {response.Length}"); if (httpResponse.StatusCode == HttpStatusCode.OK && response != null && response.Length > 0) { // Cloud returns results in a map "configFile" -> value Dictionary <string, string> map = JsonHelper.DeserializeFromMap(response); map.TryGetValue("configFile", out string json64); // Convert back from base 64 (needed because json was getting munged) byte[] jsonBytes = Convert.FromBase64String(json64); json = System.Text.Encoding.Default.GetString(jsonBytes); _logger.Log(LogLevel.Debug, $"{taskInfo} Profile successfully downloaded from cloud."); _logger.Log(LogLevel.Debug, $"{taskInfo} Raw JSON: \n {json}"); map.TryGetValue("version", out rowVersion); } else { _logger.Log(LogLevel.Debug, $"{taskInfo} No profile update available from cloud."); } } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task Canceled"); } catch (HttpRequestException e) { _logger.Log(LogLevel.Warning, $"{taskInfo} Exception: {e.Message} {e.StackTrace}"); if (e.InnerException != null) { _logger.Log(LogLevel.Warning, $"Inner Exception: {e.InnerException}"); } _liteHttpClient.DumpHttpClientDetails(); } catch (Exception e) { _logger.LogFullException(e, $"{taskInfo} {e.Message}"); _liteHttpClient.DumpHttpClientDetails(); //throw e; //solves a perpetual state of unauthorized, now solved by inspection of response code in other liCloud calls } if (json == "") { return(null); } return(_jsonHelper.DeserializeObject(json)); }
private async Task <bool> ProcessImageStudy(LifeImageCloudConnection Connection, ImagingStudy imagingStudy, IHttpManager httpManager, string taskInfo) { string duplicatesDirName = Connection.name; if (_profileStorage.Current.duplicatesDetectionDownload) { _duplicatesDetectionService.DuplicatesPurge(); lock (imagingStudy) { if (!_duplicatesDetectionService.DuplicatesReference1(duplicatesDirName, imagingStudy.uid)) { //studies.ImagingStudy.Remove(imagingStudy); return(false); } } } _logger.Log(LogLevel.Information, $"{taskInfo} checking study: {imagingStudy.uid} downloadStarted:{imagingStudy.downloadStarted:yyyy-MM-dd HH:mm:ss.ffff} downloadCompleted:{imagingStudy.downloadCompleted:yyyy-MM-dd HH:mm:ss.ffff} attempts: {imagingStudy.attempts} seriesOverMaxAttempts:{imagingStudy.series?.FindAll(e => e.attempts > Connection.maxAttempts).Count}"); if (await _taskManager.CountByReference(imagingStudy.uid) != 0) //not in task { _logger.Log(LogLevel.Information, $"{taskInfo} study: {imagingStudy.uid} in current tasks. Skipping."); return(false); } _logger.Log(LogLevel.Debug, $"{taskInfo} study: {imagingStudy.uid} not in current tasks."); if (imagingStudy.downloadCompleted != DateTime.MinValue) //not completed { _logger.Log(LogLevel.Information, $"{taskInfo} study: {imagingStudy.uid} completed. Skipping."); return(false); } _logger.Log(LogLevel.Debug, $"{taskInfo} study: {imagingStudy.uid} not completed."); if (imagingStudy.downloadStarted >= DateTime.Now.AddMinutes(-Connection.retryDelayMinutes)) //not attempted lately { _logger.Log(LogLevel.Information, $"{taskInfo} study: {imagingStudy.uid} attempted lately. Skipping."); return(false); } _logger.Log(LogLevel.Debug, $"{taskInfo} study: {imagingStudy.uid} not attempted lately."); if ((imagingStudy.series?.FindAll(e => e.attempts > Connection.maxAttempts).Count) != 0) //not exceeded max attempts { _logger.Log(LogLevel.Information, $"{taskInfo} study: {imagingStudy.uid} has exceeded max attempts. Skipping."); return(false); } _logger.Log(LogLevel.Debug, $"{taskInfo} study: {imagingStudy.uid} has not exceeded max attempts."); _logger.Log(LogLevel.Information, $"{taskInfo} study: {imagingStudy.uid} attempts: {imagingStudy.attempts} selected for download."); imagingStudy.downloadStarted = DateTime.Now; imagingStudy.attempts++; var newTaskID = _taskManager.NewTaskID(); Task task = new Task(new Action(async() => await _studyManager.DownloadStudy(newTaskID, imagingStudy, Connection, httpManager)), _taskManager.cts.Token); await _taskManager.Start(newTaskID, task, $"{Connection.name}.downloadStudy", $"{imagingStudy.uid}", isLongRunning : false); return(true); }
private void ProcessRoutedItem(RoutedItem item, LifeImageCloudConnection Connection, IConnectionRoutedCacheManager manager, string taskInfo) { _logger.Log(LogLevel.Debug, $"{taskInfo} fromConnection: {item.fromConnection} id: {item.id} type: {item.type} started: {item.startTime} complete: {item.resultsTime} status: {item.status}"); if (item.type == RoutedItem.Type.DICOM && item.sourceFileName == null) { _logger.Log(LogLevel.Critical, $"{taskInfo} DICOM type requires sourceFileName!! fromConnection: {item.fromConnection} id: {item.id} type: {item.type} started: {item.startTime} complete: {item.resultsTime} status: {item.status}"); } DateTime purgetime; if (item.type == RoutedItem.Type.RPC) { purgetime = DateTime.Now.AddMinutes(Connection.MaxRequestAgeMinutes * -1); } else { purgetime = DateTime.Now.AddMinutes(Connection.StudyCloseInterval * -1); } if (item.startTime.CompareTo(purgetime) < 0) { if (item.startTime == DateTime.MinValue) { _logger.Log(LogLevel.Error, $"{taskInfo} id: {item.id} has unassigned start time"); } if (item.type == RoutedItem.Type.RPC) { _logger.Log(LogLevel.Information, $"{taskInfo} id: {item.id} did not complete, purging cache based on Type: {item.type} MaxRequestAgeMinutes:{Connection.MaxRequestAgeMinutes} with calculated purgetime:{purgetime} and closing out"); item.status = RoutedItem.Status.FAILED; Dictionary <string, string> returnTagData = new Dictionary <string, string> { { "StatusCode", "-1" }, { "StatusDescription", $"Error: request: {item.id} Connection: {item.fromConnection} startTime: {item.startTime} did not complete based on maxRequestAgeMinutes: {Connection.MaxRequestAgeMinutes}. Closing out request." } }; string key = "response"; Dictionary <string, Dictionary <string, string> > status = new Dictionary <string, Dictionary <string, string> > { { key, returnTagData } }; string jsonResults = JsonSerializer.Serialize(status); item.response.Add(jsonResults); manager.Route(item); //can't await inside a lock return; //send just one of the items for each request id, not each cached item for the id. } else { _logger.Log(LogLevel.Information, $"{taskInfo} id: {item.id} purging cache based on Type: {item.type} StudyCloseInterval:{Connection.StudyCloseInterval} with calculated purgetime:{purgetime} and closing out"); _routedItemManager.Init(item); var completionItem = (RoutedItem)_routedItemManager.Clone(); completionItem.status = RoutedItem.Status.COMPLETED; completionItem.type = RoutedItem.Type.COMPLETION; completionItem.sourceFileName = null; //a completion can be for a file/study that was previously routed so the file refererence is old and now meaningless. manager.Route(completionItem); return; //send just one of the items for each request id, not each cached item for the id. } } var responseCacheExpiry = DateTime.Now.AddMinutes(Connection.ResponseCacheExpiryMinutes * -1); if (item.startTime.CompareTo(responseCacheExpiry) < 0) { _logger.Log(LogLevel.Information, $"{taskInfo} id: {item.id} did not complete, purging cache based on Type: {item.type} ResponseCacheExpiryMinutes:{Connection.ResponseCacheExpiryMinutes} with calculated responseCacheExpiry:{responseCacheExpiry}"); manager.RemoveCachedItem(item); } }
public async Task getStudies(int taskID, LifeImageCloudConnection connection, IHttpManager httpManager) { var Connection = connection; var taskInfo = $"task: {taskID} connection: {Connection.name}"; var httpClient = _liteHttpClient.GetClient(connection); try { //set the URL //string studiesURL = Connection.URL + "/api/agent/v1/studies?state=NEEDS_DOWNLOADING&lifeImageSummary=true"; //add summary string studiesURL = Connection.URL + CloudAgentConstants.GetStudies; //add summary _logger.Log(LogLevel.Debug, $"{taskInfo} studiesURL: {studiesURL}"); var cookies = _liteHttpClient.GetCookies(studiesURL); _logger.LogCookies(cookies, taskInfo); // issue the GET var task = httpClient.GetAsync(studiesURL); var response = await task; // output the result _logger.LogHttpResponseAndHeaders(response, taskInfo); _logger.Log(LogLevel.Debug, $"{taskInfo} response.Content.ReadAsStringAsync(): {await response.Content.ReadAsStringAsync()}"); if (response.StatusCode != HttpStatusCode.OK) { if (response.StatusCode == HttpStatusCode.Unauthorized) { httpManager.loginNeeded = true; } _logger.Log(LogLevel.Warning, $"{taskInfo} {response.StatusCode} {response.ReasonPhrase}"); _liteHttpClient.DumpHttpClientDetails(); } //2018-02-06 shb convert from stream to JSON and clean up any non UTF-8 that appears like it did // when receiving "contains invalid UTF8 bytes" exception var serializer = new DataContractJsonSerializer(typeof(RootObject)); var streamReader = new StreamReader(await response.Content.ReadAsStreamAsync(), Encoding.UTF8); byte[] byteArray = Encoding.UTF8.GetBytes(streamReader.ReadToEnd()); MemoryStream stream = new MemoryStream(byteArray); var newStudies = serializer.ReadObject(stream) as RootObject; MergeStudies(newStudies, Connection); if (Connection.studies != null && Connection.studies.ImagingStudy != null) { _logger.Log(LogLevel.Information, $"{taskInfo} studies.ImagingStudy.Count: {Connection.studies.ImagingStudy.Count}"); foreach (var imagingStudy in Connection.studies.ImagingStudy) { _logger.Log(LogLevel.Information, $"{taskInfo} ImagingStudy.uid: {imagingStudy.uid} series:{imagingStudy.numberOfSeries} instances:{imagingStudy.numberOfInstances}"); } } } catch (TaskCanceledException) { _logger.Log(LogLevel.Information, $"{taskInfo} Task was canceled."); } catch (System.Runtime.Serialization.SerializationException e) { //eat it for now _logger.Log(LogLevel.Warning, $"{taskInfo} {e.Message} {e.StackTrace}"); } catch (System.Net.Http.HttpRequestException e) { _logger.Log(LogLevel.Warning, $"{taskInfo} {e.Message} {e.StackTrace}"); if (e.InnerException != null) { _logger.Log(LogLevel.Warning, $"{taskInfo} Inner Exception: {e.InnerException}"); } } catch (Exception e) { _logger.LogFullException(e, taskInfo); //throw e; } }