public string SerializeJobDetails(Hadoop.Client.JobDetails jobDetails)
        {
            var result = new PassthroughResponse();

            if (jobDetails.ErrorCode.IsNotNullOrEmpty() || jobDetails.HttpStatusCode != HttpStatusCode.Accepted)
            {
                result.Error = new PassthroughErrorResponse {
                    StatusCode = jobDetails.HttpStatusCode, ErrorId = jobDetails.ErrorCode
                };
            }
            var details = new Microsoft.ClusterServices.RDFEProvider.ResourceExtensions.JobSubmission.Models.JobDetails()
            {
                ErrorOutputPath   = jobDetails.ErrorOutputPath,
                ExitCode          = jobDetails.ExitCode,
                LogicalOutputPath = jobDetails.LogicalOutputPath,
                Name = jobDetails.Name,
                PhysicalOutputPath = new Uri(jobDetails.PhysicalOutputPath),
                Query          = jobDetails.Query,
                SubmissionTime = jobDetails.SubmissionTime.Ticks.ToString()
            };

            Microsoft.ClusterServices.RDFEProvider.ResourceExtensions.JobSubmission.Models.JobStatusCode statusCode;
            Assert.IsTrue(Microsoft.ClusterServices.RDFEProvider.ResourceExtensions.JobSubmission.Models.JobStatusCode.TryParse(jobDetails.StatusCode.ToString(), out statusCode));
            details.StatusCode = statusCode;
            result.Data        = details;
            return(this.SerializeJobDetails(result));
        }
        //This code is tightly coupled to Templeton. If parsing fails, we capture the full json payload, the error
        //then log it upstream. I've left the constants here, since this is A) The only place they are used and B) there are a lot of them. 
        //In the future, if we see this being something that is reused, they could be moved.
        //For a sample response see the large comment at the end of this file. 
        internal async Task<JobDetails> GetJobDetailsFromServerResponse(HttpContent content)
        {
            const string userArgsSection = "userargs";
            const string defineSection = "define";
            const string statusSection = "status";

            const string jobNameKey = "hdInsightJobName=";
            const string statusDirectory = "statusdir";
            const string exitCodeValue = "exitValue";
            const string startTimeValue = "startTime";
            const string jobStatusValue = "runState";
            const string hiveQueryValue = "execute";

            const string outputFile = "/stdout";
            const string errorFile = "/stderr";

           Contract.AssertArgNotNull(content, "content");
           
            JObject result = null;
            try
            {
                result = await content.ReadAsAsync<JObject>();
                Contract.Assert(result != null);

                var outputAsvPath = (string)result[userArgsSection][statusDirectory];
                var outputFolderUri = GetOutputFolderUri(outputAsvPath);

                var defines = result[userArgsSection][defineSection].ToArray();
                var jobNameItem = (string)defines.First(s => ((string)s).Contains(jobNameKey));
                var jobName = jobNameItem.Split('=')[1];

                var details = new JobDetails
                    {
                        ExitCode = (int)result[exitCodeValue],
                        SubmissionTime = result[statusSection][startTimeValue].ToString(),
                        Name = jobName,
                        StatusCode = (JobStatusCode)Enum.Parse(typeof(JobStatusCode), result[statusSection][jobStatusValue].ToString()),
                        PhysicalOutputPath = new Uri(outputFolderUri + outputFile),
                        LogicalOutputPath = outputAsvPath + outputFile,
                        ErrorOutputPath = outputFolderUri + errorFile,
                        Query = (string)result[userArgsSection][hiveQueryValue],
                    };
                return details;
            }
            catch (Exception ex)
            {
                var rawJson = string.Empty;
                if(result != null)
                {
                    rawJson = result.ToString();
                    if (rawJson.Length > 4000)
                    {
                        //truncating the response if its large then 4000 char, in order to prevent large data in the logs
                        rawJson = rawJson.Substring(0, 4000);
                    }
                }
             
                throw new HttpParseException(string.Format(JobSubmissionConstants.UnableToParseJobDetailsLogMessage, ex.Message, rawJson));
            }
        }
 public string SerializeJobDetails(Hadoop.Client.JobDetails jobDetails)
 {
     var result = new PassthroughResponse();
     if (jobDetails.ErrorCode.IsNotNullOrEmpty() || jobDetails.HttpStatusCode != HttpStatusCode.Accepted)
     {
         result.Error = new PassthroughErrorResponse { StatusCode = jobDetails.HttpStatusCode, ErrorId = jobDetails.ErrorCode };
     }
     var details = new Microsoft.ClusterServices.RDFEProvider.ResourceExtensions.JobSubmission.Models.JobDetails()
     {
         ErrorOutputPath = jobDetails.ErrorOutputPath,
         ExitCode = jobDetails.ExitCode,
         LogicalOutputPath = jobDetails.LogicalOutputPath,
         Name = jobDetails.Name,
         PhysicalOutputPath = new Uri(jobDetails.PhysicalOutputPath),
         Query = jobDetails.Query,
         SubmissionTime = jobDetails.SubmissionTime.Ticks.ToString()
     };
     Microsoft.ClusterServices.RDFEProvider.ResourceExtensions.JobSubmission.Models.JobStatusCode statusCode;
     Assert.IsTrue(Microsoft.ClusterServices.RDFEProvider.ResourceExtensions.JobSubmission.Models.JobStatusCode.TryParse(jobDetails.StatusCode.ToString(), out statusCode));
     details.StatusCode = statusCode;
     result.Data = details;
     return this.SerializeJobDetails(result);
 }