public void CancelSparkBatchJob() { int jobId = 0; #region Snippet:DeleteSparkBatchJob Response operation = batchClient.CancelSparkBatchJob(jobId); #endregion }
public void CancelSparkBatchJob(int batchId, bool waitForCompletion) { _sparkBatchClient.CancelSparkBatchJob(batchId); if (waitForCompletion) { var batchJob = GetSparkBatchJob(batchId); PollSparkBatchJobExecution(batchJob); } }
public void SubmitSparkJobSync() { // Environment variable with the Synapse workspace endpoint. string endpoint = TestEnvironment.EndpointUrl; // Environment variable with the Synapse Spark pool name. string sparkPoolName = TestEnvironment.SparkPoolName; // Environment variable with the ADLS Gen2 storage account associated with the Synapse workspace. string storageAccount = TestEnvironment.StorageAccountName; // Environment variable with the file system of ADLS Gen2 storage account associated with the Synapse workspace. string fileSystem = TestEnvironment.StorageFileSystemName; #region Snippet:CreateSparkBatchClient SparkBatchClient client = new SparkBatchClient(new Uri(endpoint), sparkPoolName, new DefaultAzureCredential()); #endregion #region Snippet:SubmitSparkBatchJob string name = $"batch-{Guid.NewGuid()}"; string file = string.Format("abfss://{0}@{1}.dfs.core.windows.net/samples/net/wordcount/wordcount.zip", fileSystem, storageAccount); SparkBatchJobOptions request = new SparkBatchJobOptions(name, file) { ClassName = "WordCount", Arguments = { string.Format("abfss://{0}@{1}.dfs.core.windows.net/samples/net/wordcount/shakespeare.txt", fileSystem, storageAccount), string.Format("abfss://{0}@{1}.dfs.core.windows.net/samples/net/wordcount/result/", fileSystem, storageAccount), }, DriverMemory = "28g", DriverCores = 4, ExecutorMemory = "28g", ExecutorCores = 4, ExecutorCount = 2 }; SparkBatchJob jobCreated = client.CreateSparkBatchJob(request); #endregion #region Snippet:ListSparkBatchJobs Response <SparkBatchJobCollection> jobs = client.GetSparkBatchJobs(); foreach (SparkBatchJob job in jobs.Value.Sessions) { Console.WriteLine(job.Name); } #endregion #region Snippet:GetSparkBatchJob SparkBatchJob retrievedJob = client.GetSparkBatchJob(jobCreated.Id); Debug.WriteLine($"Job is returned with name {retrievedJob.Name} and state {retrievedJob.State}"); #endregion #region Snippet:DeleteSparkBatchJob Response operation = client.CancelSparkBatchJob(jobCreated.Id); #endregion }
public void SparkSample() { #region Snippet:CreateBatchClient // Replace the string below with your actual endpoint url. string endpoint = "<my-endpoint-url>"; /*@@*/ endpoint = TestEnvironment.EndpointUrl; string sparkPoolName = TestEnvironment.SparkPoolName; SparkBatchClient client = new SparkBatchClient(endpoint: new Uri(endpoint), sparkPoolName: sparkPoolName, credential: new DefaultAzureCredential()); #endregion // Environment variable with the storage account associated with the Synapse workspace endpoint. string storageAccount = TestEnvironment.StorageAccountName; // Environment variable with the file system of the storage account. string fileSystem = TestEnvironment.StorageFileSystemName; #region Snippet:CreateBatchJob string name = $"batchSample"; string file = string.Format("abfss://{0}@{1}.dfs.core.windows.net/samples/java/wordcount/wordcount.jar", fileSystem, storageAccount); SparkBatchJobOptions options = new SparkBatchJobOptions(name: name, file: file) { ClassName = "WordCount", Arguments = { string.Format("abfss://{0}@{1}.dfs.core.windows.net/samples/java/wordcount/shakespeare.txt", fileSystem, storageAccount), string.Format("abfss://{0}@{1}.dfs.core.windows.net/samples/java/wordcount/result/", fileSystem, storageAccount), }, DriverMemory = "28g", DriverCores = 4, ExecutorMemory = "28g", ExecutorCores = 4, ExecutorCount = 2 }; SparkBatchJob jobCreated = client.CreateSparkBatchJob(options); #endregion #region Snippet:ListSparkBatchJobs Response <SparkBatchJobCollection> jobs = client.GetSparkBatchJobs(); foreach (SparkBatchJob job in jobs.Value.Sessions) { Console.WriteLine(job.Name); } #endregion #region Snippet:DeleteSparkBatchJob /*@@*/ int jobId = jobs.Value.Sessions.First().Id; // Replace the integer below with your actual job ID. //@@ string jobId = 0; Response operation = client.CancelSparkBatchJob(jobId); #endregion }
public void SubmitSparkJobSync() { // Environment variable with the Synapse workspace endpoint. string workspaceUrl = TestEnvironment.WorkspaceUrl; // Environment variable with the Synapse Spark pool name. string sparkPoolName = TestEnvironment.SparkPoolName; // Environment variable with the ADLS Gen2 storage account associated with the Synapse workspace. string storageAccount = TestEnvironment.StorageAccountName; // Environment variable with the file system of ADLS Gen2 storage account associated with the Synapse workspace. string fileSystem = TestEnvironment.StorageFileSystemName; #region Snippet:SparkBatchSample1SparkBatchClient SparkBatchClient client = new SparkBatchClient(new Uri(workspaceUrl), sparkPoolName, new DefaultAzureCredential()); #endregion #region Snippet:SparkBatchSample1SubmitSparkJob string name = $"batch-{Guid.NewGuid()}"; string file = string.Format("abfss://{0}@{1}.dfs.core.windows.net/samples/java/wordcount/wordcount.jar", fileSystem, storageAccount); SparkBatchJobOptions request = new SparkBatchJobOptions(name, file) { ClassName = "WordCount", Arguments = new List <string> { string.Format("abfss://{0}@{1}.dfs.core.windows.net/samples/java/wordcount/shakespeare.txt", fileSystem, storageAccount), string.Format("abfss://{0}@{1}.dfs.core.windows.net/samples/java/wordcount/result/", fileSystem, storageAccount), }, DriverMemory = "28g", DriverCores = 4, ExecutorMemory = "28g", ExecutorCores = 4, ExecutorCount = 2 }; SparkBatchJob jobCreated = client.CreateSparkBatchJob(request); #endregion #region Snippet:SparkBatchSample1GetSparkJob SparkBatchJob job = client.GetSparkBatchJob(jobCreated.Id); Debug.WriteLine($"Job is returned with name {job.Name} and state {job.State}"); #endregion #region Snippet:SparkBatchSample1CancelSparkJob Response operation = client.CancelSparkBatchJob(jobCreated.Id); #endregion }
public void SubmitSparkJobSync() { #region Snippet:CreateSparkBatchClient #if SNIPPET // Replace the strings below with the spark, endpoint, and file system information string sparkPoolName = "<my-spark-pool-name>"; string endpoint = "<my-endpoint-url>"; string storageAccount = "<my-storage-account-name>"; string fileSystem = "<my-storage-filesystem-name>"; #else string sparkPoolName = TestEnvironment.SparkPoolName; string endpoint = TestEnvironment.EndpointUrl; string storageAccount = TestEnvironment.StorageAccountName; string fileSystem = TestEnvironment.StorageFileSystemName; #endif SparkBatchClient client = new SparkBatchClient(new Uri(endpoint), sparkPoolName, new DefaultAzureCredential()); #endregion #region Snippet:SubmitSparkBatchJob string name = $"batch-{Guid.NewGuid()}"; string file = string.Format("abfss://{0}@{1}.dfs.core.windows.net/samples/net/wordcount/wordcount.zip", fileSystem, storageAccount); SparkBatchJobOptions request = new SparkBatchJobOptions(name, file) { ClassName = "WordCount", Arguments = { string.Format("abfss://{0}@{1}.dfs.core.windows.net/samples/net/wordcount/shakespeare.txt", fileSystem, storageAccount), string.Format("abfss://{0}@{1}.dfs.core.windows.net/samples/net/wordcount/result/", fileSystem, storageAccount), }, DriverMemory = "28g", DriverCores = 4, ExecutorMemory = "28g", ExecutorCores = 4, ExecutorCount = 2 }; SparkBatchOperation createOperation = client.StartCreateSparkBatchJob(request); while (!createOperation.HasCompleted) { System.Threading.Thread.Sleep(2000); createOperation.UpdateStatus(); } SparkBatchJob jobCreated = createOperation.Value; #endregion #region Snippet:ListSparkBatchJobs Response <SparkBatchJobCollection> jobs = client.GetSparkBatchJobs(); foreach (SparkBatchJob job in jobs.Value.Sessions) { Console.WriteLine(job.Name); } #endregion #region Snippet:GetSparkBatchJob SparkBatchJob retrievedJob = client.GetSparkBatchJob(jobCreated.Id); Debug.WriteLine($"Job is returned with name {retrievedJob.Name} and state {retrievedJob.State}"); #endregion #region Snippet:CancelSparkBatchJob Response operation = client.CancelSparkBatchJob(jobCreated.Id); #endregion }