private RegionResultModel BuildRegionResultModel(string hashes, bool isPointSet) { var jobManager = WebApiApplication.GetSharedJobManager(HttpContext); JobStatus[] stats = HttpUtility.UrlDecode(hashes).Split(',').Select(h => jobManager.GetStatus(h)).ToArray(); if (stats.All(s => s.State == JobOrPartState.Failed)) { return(new RegionResultModel(RegionResultStatus.Failed, null, isPointSet)); } else if (stats.All(s => s.State == JobOrPartState.Pending)) { return(new RegionResultModel(RegionResultStatus.Pending, stats.Min(s => s.PendingCount), isPointSet)); } else if (stats.All(s => s.State == JobOrPartState.Completed)) { return(new RegionResultModel(RegionResultStatus.Succeeded, hashes, isPointSet)); } else if (stats.All(s => s.State == JobOrPartState.Completed || s.State == JobOrPartState.Failed)) { return(new RegionResultModel(RegionResultStatus.PartiallySucceeded, hashes, isPointSet)); } else { var prg = stats.Where(s => s.State == JobOrPartState.InProgress).ToArray(); var completedCount = stats.Where(s => s.State == JobOrPartState.Completed).Count(); var otherCount = stats.Where(s => s.State != JobOrPartState.Completed && s.State != JobOrPartState.InProgress).Count(); return(new RegionResultModel(RegionResultStatus.InProgress, (prg.Select(s => s.ProgressPercent).Sum() + 100 * completedCount) / (prg.Length + completedCount + otherCount), isPointSet)); } }
// GET api/Status?hash=blobHash public string Get(string hash) { var jobManager = WebApiApplication.GetSharedJobManager(Request); var status = jobManager.GetStatus(hash).ToString(); FrontendTrace.TraceVerbose("{0}: Reporting status {1}", hash, status); return(status); }
private void MergeCSV(DataSet dst, Tuple <string, string[]>[] requests) { var jobManager = WebApiApplication.GetSharedJobManager(Request); var dsr = requests.Select(r => new Tuple <DataSet, string[]>(DataSet.Open(jobManager.ResultDataSetUri(r.Item1, false)), r.Item2)).ToArray(); try { var config = WebApiApplication.GetFetchConfiguration(DateTime.MaxValue); TableExportHelper.MergeTable(config, dst, dsr); dst.Commit(); } finally { foreach (var r in dsr) { r.Item1.Dispose(); } } }
private void MergeCSV(DataSet dst, string[] hashes) { for (var i = 0; i < hashes.Length; i++) { using (var src = DataSet.Open(WebApiApplication.GetSharedJobManager(Request).ResultDataSetUri(hashes[i], false))) { var name = src.Metadata[RequestDataSetFormat.EnvironmentVariableNameKey].ToString(); if (src.Metadata.ContainsKey(RequestDataSetFormat.DataSourceNameKey)) { string[] dataSources = (string[])src.Metadata[RequestDataSetFormat.DataSourceNameKey]; var config = WebApiApplication.GetFetchConfiguration(DateTime.MaxValue); dst.Metadata[name + "_dataSourceNames"] = dataSources; dst.Metadata[name + "_dataSourceIDs"] = dataSources.Select(ds => config.DataSources.Where(dsd => dsd.Name == ds).First().ID).ToArray(); } bool isPointSet = src.Variables[RequestDataSetFormat.ValuesVariableName].Dimensions.Count() == 1; string visMethod = isPointSet ? "Points" : "Colormap"; if (i == 0) { if (src.Variables.Contains("lat")) { dst.AddVariableByValue(src.Variables["lat"]); } if (src.Variables.Contains("latmax")) { dst.AddVariableByValue(src.Variables["latmax"]); } if (src.Variables.Contains("latmin")) { dst.AddVariableByValue(src.Variables["latmin"]); } if (src.Variables.Contains("lon")) { dst.AddVariableByValue(src.Variables["lon"]); } if (src.Variables.Contains("lonmax")) { dst.AddVariableByValue(src.Variables["lonmax"]); } if (src.Variables.Contains("lonmin")) { dst.AddVariableByValue(src.Variables["lonmin"]); } dst.AddVariableByValue(src.Variables["hours"]); dst.AddVariableByValue(src.Variables["days"]); dst.AddVariableByValue(src.Variables["years"]); dst.Metadata["VisualHints"] = name + "_" + RequestDataSetFormat.ValuesVariableName + "Style: " + visMethod; } var valuesVar = src[RequestDataSetFormat.ValuesVariableName]; dst.AddVariable <double>(name + "_" + RequestDataSetFormat.ValuesVariableName, valuesVar.GetData(), valuesVar.Dimensions.Select(d => d.Name).ToArray()).Metadata["VisualHints"] = "Style: " + visMethod; var sdVar = src[RequestDataSetFormat.UncertaintyVariableName]; dst.AddVariable <double>(name + "_" + RequestDataSetFormat.UncertaintyVariableName, sdVar.GetData(), sdVar.Dimensions.Select(d => d.Name).ToArray()); if (src.Variables.Contains(RequestDataSetFormat.ProvenanceVariableName)) { var provVar = src[RequestDataSetFormat.ProvenanceVariableName]; dst.AddVariable <ushort>(name + "_" + RequestDataSetFormat.ProvenanceVariableName, provVar.GetData(), provVar.Dimensions.Select(d => d.Name).ToArray()); } } } dst.Commit(); }
// GET: /form?v=variables // POST: /form (by 'Download results', 'View results', 'Download request' or 'Upload request') public ActionResult Form() { var method = HttpContext.Request.HttpMethod; var config = WebApiApplication.GetExtendedFetchConfiguration(DateTime.MaxValue); if (method == "GET") { return(View("Form", new RequestFormModel(config, Request.QueryString, false))); } else if (method == "POST") { if (Request.Form["uploadRequest"] != null) { if (Request.Files == null || Request.Files.Count < 1) { var model = new RequestFormModel(config); model.RequestUploadErrors = "No file with request is specified"; return(View("Form", model)); } else { return(View("Form", new RequestFormModel(config, Request.Files[0].InputStream))); } } else { var model = new RequestFormModel(config, Request.Form, true); if (model.HasErrors) { return(View("Form", model)); } if (Request.Form["downloadRequest"] != null) { return(File( Encoding.UTF8.GetBytes(model.GetRequestText()), "text/plain", "request.txt")); } else if (Request.Form["view"] != null) { return(Redirect("v1/FetchClimate2.html#" + model.GetClientUrlParameters())); } // Download data int minPtsPerPartition = FrontendSettings.Current.MinPtsPerPartition; int maxPtsPerPartition = FrontendSettings.Current.MaxPtsPerPartition; double jobRegistrationPermitedTime = FrontendSettings.Current.AllowedJobRegistrationSpan; int totalWorkers = RoleEnvironment.Roles["FetchWorker"].Instances.Count; string query = ""; var jobManager = WebApiApplication.GetSharedJobManager(HttpContext); if (model.Points.Count > 0) { string points = ""; foreach (var fr in model.GetRequestsForPoints()) { var jobStatus = jobManager.Submit(fr, fr.GetSHAHash(), jobRegistrationPermitedTime, minPtsPerPartition, maxPtsPerPartition, totalWorkers); if (points.Length > 0) { points += ","; } points += jobStatus.Hash; } query += "?p=" + HttpUtility.UrlEncode(points); } int index = 1; foreach (var g in model.Grids) { string hashes = ""; foreach (var fr in model.GetRequestsForGrid(g)) { var jobStatus = jobManager.Submit(fr, fr.GetSHAHash(), jobRegistrationPermitedTime, minPtsPerPartition, maxPtsPerPartition, totalWorkers); if (hashes.Length > 0) { hashes += ","; } hashes += jobStatus.Hash; } if (query.Length > 0) { query += "&"; } else { query += "?"; } query = String.Concat(query, "g", index++, "=", HttpUtility.UrlEncode(hashes)); } return(Redirect("results" + query)); } } else { throw new Exception("Method is not allowed"); } }
// POST api/Compute public string Post(Microsoft.Research.Science.FetchClimate2.Serializable.FetchRequest request) { try { if (request.ReproducibilityTimestamp == new DateTime()) { request.ReproducibilityTimestamp = DateTime.MaxValue; } var exactTS = WebApiApplication.GetExactConfigurationTimestamp(request.ReproducibilityTimestamp); if (exactTS == DateTime.MinValue) { FrontendTrace.TraceError("Cannot process request. Timestamp {0} is too early for current service configuration", request.ReproducibilityTimestamp); return(String.Format(Constants.FaultReply, "Timestamp is too early for this service")); } else { FrontendTrace.TraceVerbose("Request received: timestamp = {0}, exact timestamp = {1}", request.ReproducibilityTimestamp, exactTS); request.ReproducibilityTimestamp = exactTS; } var jobManager = WebApiApplication.GetSharedJobManager(Request); jobManager.MarkOutdatedAsFailed(permitedTouchTimeTreshold); var fetchRequest = request.ConvertFromSerializable(); string errorMsg; if (!fetchRequest.Domain.IsContentValid(out errorMsg)) //checking request content { return(string.Format(Constants.FaultReply, errorMsg)); } string hash = fetchRequest.GetSHAHash(); FrontendTrace.TraceInfo("{0}: Hash is computed for request", hash); var jobStatus = jobManager.Submit(fetchRequest, hash, jobRegistrationPermitedTime, minPtsPerPartition, maxPtsPerPartition, RoleEnvironment.Roles["FetchWorker"].Instances.Count); if (jobStatus.State == JobOrPartState.Pending || jobStatus.State == JobOrPartState.InProgress) { // Waiting for some time before response in case job manage to complete FrontendTrace.TraceVerbose("{0}:Waiting for request completion", hash); for (int i = 0; i *jobStatusCheckIntervalMilisec * 0.001 < waitingFastResultPeriodSec; ++i) { Thread.Sleep(jobStatusCheckIntervalMilisec); jobStatus = jobManager.GetStatus(hash); if ((jobStatus.State == JobOrPartState.Completed) || (jobStatus.State == JobOrPartState.Failed)) { FrontendTrace.TraceVerbose("{0}:Request result is ready in less than {1} seconds, reporting {2} status", hash, waitingFastResultPeriodSec, jobStatus.ToString()); return(jobStatus.ToString()); } } FrontendTrace.TraceVerbose("{0}:Request result is not ready in {1} seconds, reporting {2} status", hash, waitingFastResultPeriodSec, jobStatus.ToString()); return(jobStatus.ToString()); } else { return(jobStatus.ToString()); } } catch (Exception exc) { FrontendTrace.TraceError("Request is processing error: {0}", exc.ToString()); return(JobStatus.GetFailedStatus(exc.ToString()).ToString()); } }