public static SparkJobSyncResult ParseLivyBatchResult(LivyBatchResult batchResult) { var sparkUiUrl = batchResult.AppInfo?.GetOrDefault("sparkUiUrl", null); var links = sparkUiUrl == null ? null : new Dictionary <string, string>() { { "App UI", sparkUiUrl }, { "Logs", sparkUiUrl.Replace("/proxy/", "/cluster/app/") } }; SparkJobSyncResult sr = new SparkJobSyncResult(); try { var note = batchResult.Log == null ? null : string.Join("\n", batchResult.Log); sr.JobId = batchResult.Id.ToString(); sr.JobState = ParseLivyBatchState(batchResult.State); sr.ClientCache = JObject.FromObject(batchResult); sr.Links = links; sr.Note = note; } catch (Exception ex) { throw new GeneralException($"Couldn't parse response from Livy service:'{batchResult}', message:'{ex.Message}'"); } return(sr); }
public static SparkJobSyncResult ParseDatabricksJobResult(DatabricksJobResult jobResult) { SparkJobSyncResult sr = new SparkJobSyncResult(); try { sr.JobId = jobResult.JobId.ToString(); sr.JobState = ParseDatabricksJobState(jobResult.State.GetOrDefault("life_cycle_state", null)); sr.ClientCache = JObject.FromObject(jobResult); sr.Note = jobResult.State.GetOrDefault("state_message", null); } catch (Exception ex) { throw new GeneralException($"Couldn't parse response from Databricks service:'{jobResult}', message:'{ex.Message}'"); } return(sr); }
public void TestParseJobInfoFromDatabricksHttpResult() { DatabricksHttpResult httpResult = new DatabricksHttpResult { Content = "{\"job_id\":40,\"run_id\":49,\"number_in_job\":1,\"original_attempt_run_id\":49,\"state\":{\"life_cycle_state\":\"RUNNING\",\"state_message\":\"In run\"}}", IsSuccess = true, StatusCode = System.Net.HttpStatusCode.OK }; var expectedValue = new SparkJobSyncResult { ClientCache = JToken.Parse("{\"job_id\": 40,\"run_id\": 49,\"state\": {\"life_cycle_state\": \"RUNNING\",\"state_message\": \"In run\"}}"), JobId = "40", JobState = JobState.Running, Links = null, Note = "In run" }; var actualValue = DatabricksClient.ParseJobInfoFromDatabricksHttpResult(httpResult); Assert.AreEqual(expectedValue, actualValue, "ParseJobInfoFromDatabricksHttpResult() failed"); }
public async Task <SparkJobSyncResult> StopJob(JToken jobClientData) { var clientData = JsonConvert.DeserializeObject <DatabricksJobResult>(jobClientData.ToString()); await CallDatabricksService(HttpMethod.Post, "jobs/runs/cancel", $@"{{""run_id"":{clientData.RunId}}}"); await CallDatabricksService(HttpMethod.Post, "jobs/delete", $@"{{""job_id"":{clientData.JobId}}}"); var result = new SparkJobSyncResult(); //Fetch status of job after it has been stopped var numRetry = 0; do { var jobStatus = await CallDatabricksService(HttpMethod.Get, $"jobs/runs/get?run_id={clientData.RunId}"); result = ParseJobInfoFromDatabricksHttpResult(jobStatus); //When job is in progress of termination, fetch the latest job state max 5 times untill the job has stopped. numRetry++; } while (result.JobState == JobState.Running && numRetry <= 5); return(result); }
public void TestParseDatabricksJobResult() { var jobResult = new DatabricksJobResult { JobId = 40, RunId = 49, State = new System.Collections.Generic.Dictionary <string, string>() { { "life_cycle_state", "RUNNING" }, { "state_message", "In run" } } }; var expectedValue = new SparkJobSyncResult { ClientCache = JToken.Parse("{\"job_id\": 40,\"run_id\": 49,\"state\": {\"life_cycle_state\": \"RUNNING\",\"state_message\": \"In run\"}}"), JobId = "40", JobState = JobState.Running, Links = null, Note = "In run" }; var actualValue = DatabricksClient.ParseDatabricksJobResult(jobResult); Assert.AreEqual(expectedValue, actualValue, "ParseDatabricksJobResult() failed"); }