Ejemplo n.º 1
0
        public ExtendedLivyBatchResponse SubmitSparkBatchJob(string workspaceName, string sparkPoolName, ExtendedLivyBatchRequest livyRequest, bool waitForCompletion)
        {
            var batch = _synapseClient.SparkBatch.Create(workspaceName, sparkPoolName, livyRequest, detailed: true);

            if (!waitForCompletion)
            {
                return(batch);
            }

            return(PollSparkBatchJobSubmission(workspaceName, sparkPoolName, batch));
        }
        /// <summary>
        /// Create new spark batch job.
        /// </summary>
        /// <param name='workspaceName'>
        /// The name of the workspace to execute operations on.
        /// </param>
        /// <param name='sparkPoolName'>
        /// Name of the spark pool. "ondemand" targets the ondemand pool.
        /// </param>
        /// <param name='livyRequest'>
        /// Livy compatible batch job request payload.
        /// </param>
        /// <param name='detailed'>
        /// Optional query param specifying whether detailed response is returned
        /// beyond plain livy.
        /// </param>
        /// <param name='customHeaders'>
        /// Headers that will be added to request.
        /// </param>
        /// <param name='cancellationToken'>
        /// The cancellation token.
        /// </param>
        /// <exception cref="CloudException">
        /// Thrown when the operation returned an invalid status code
        /// </exception>
        /// <exception cref="SerializationException">
        /// Thrown when unable to deserialize the response
        /// </exception>
        /// <exception cref="ValidationException">
        /// Thrown when a required parameter is null
        /// </exception>
        /// <exception cref="System.ArgumentNullException">
        /// Thrown when a required parameter is null
        /// </exception>
        /// <return>
        /// A response object containing the response body and response headers.
        /// </return>
        public async Task <AzureOperationResponse <ExtendedLivyBatchResponse> > CreateWithHttpMessagesAsync(string workspaceName, string sparkPoolName, ExtendedLivyBatchRequest livyRequest, bool?detailed = default(bool?), Dictionary <string, List <string> > customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
        {
            if (workspaceName == null)
            {
                throw new ValidationException(ValidationRules.CannotBeNull, "workspaceName");
            }
            if (Client.SynapseDnsSuffix == null)
            {
                throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.SynapseDnsSuffix");
            }
            if (Client.LivyApiVersion == null)
            {
                throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.LivyApiVersion");
            }
            if (sparkPoolName == null)
            {
                throw new ValidationException(ValidationRules.CannotBeNull, "sparkPoolName");
            }
            if (livyRequest == null)
            {
                throw new ValidationException(ValidationRules.CannotBeNull, "livyRequest");
            }
            // Tracing
            bool   _shouldTrace  = ServiceClientTracing.IsEnabled;
            string _invocationId = null;

            if (_shouldTrace)
            {
                _invocationId = ServiceClientTracing.NextInvocationId.ToString();
                Dictionary <string, object> tracingParameters = new Dictionary <string, object>();
                tracingParameters.Add("workspaceName", workspaceName);
                tracingParameters.Add("sparkPoolName", sparkPoolName);
                tracingParameters.Add("detailed", detailed);
                tracingParameters.Add("livyRequest", livyRequest);
                tracingParameters.Add("cancellationToken", cancellationToken);
                ServiceClientTracing.Enter(_invocationId, this, "Create", tracingParameters);
            }
            // Construct URL
            var _baseUrl = Client.BaseUri;
            var _url     = _baseUrl + (_baseUrl.EndsWith("/") ? "" : "/") + "livyApi/versions/{livyApiVersion}/sparkPools/{sparkPoolName}/batches";

            _url = _url.Replace("{workspaceName}", workspaceName);
            _url = _url.Replace("{SynapseDnsSuffix}", Client.SynapseDnsSuffix);
            _url = _url.Replace("{livyApiVersion}", Client.LivyApiVersion);
            _url = _url.Replace("{sparkPoolName}", System.Uri.EscapeDataString(sparkPoolName));
            List <string> _queryParameters = new List <string>();

            if (detailed != null)
            {
                _queryParameters.Add(string.Format("detailed={0}", System.Uri.EscapeDataString(Rest.Serialization.SafeJsonConvert.SerializeObject(detailed, Client.SerializationSettings).Trim('"'))));
            }
            if (_queryParameters.Count > 0)
            {
                _url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
            }
            // Create HTTP transport objects
            var _httpRequest = new HttpRequestMessage();
            HttpResponseMessage _httpResponse = null;

            _httpRequest.Method     = new HttpMethod("POST");
            _httpRequest.RequestUri = new System.Uri(_url);
            // Set Headers
            if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
            {
                _httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
            }
            if (Client.AcceptLanguage != null)
            {
                if (_httpRequest.Headers.Contains("accept-language"))
                {
                    _httpRequest.Headers.Remove("accept-language");
                }
                _httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
            }


            if (customHeaders != null)
            {
                foreach (var _header in customHeaders)
                {
                    if (_httpRequest.Headers.Contains(_header.Key))
                    {
                        _httpRequest.Headers.Remove(_header.Key);
                    }
                    _httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
                }
            }

            // Serialize Request
            string _requestContent = null;

            if (livyRequest != null)
            {
                _requestContent      = Rest.Serialization.SafeJsonConvert.SerializeObject(livyRequest, Client.SerializationSettings);
                _httpRequest.Content = new StringContent(_requestContent, System.Text.Encoding.UTF8);
                _httpRequest.Content.Headers.ContentType = System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
            }
            // Set Credentials
            if (Client.Credentials != null)
            {
                cancellationToken.ThrowIfCancellationRequested();
                await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
            }
            // Send Request
            if (_shouldTrace)
            {
                ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
            }
            cancellationToken.ThrowIfCancellationRequested();
            _httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);

            if (_shouldTrace)
            {
                ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
            }
            HttpStatusCode _statusCode = _httpResponse.StatusCode;

            cancellationToken.ThrowIfCancellationRequested();
            string _responseContent = null;

            if ((int)_statusCode != 200)
            {
                var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
                try
                {
                    _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);

                    CloudError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject <CloudError>(_responseContent, Client.DeserializationSettings);
                    if (_errorBody != null)
                    {
                        ex      = new CloudException(_errorBody.Message);
                        ex.Body = _errorBody;
                    }
                }
                catch (JsonException)
                {
                    // Ignore the exception
                }
                ex.Request  = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
                ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
                if (_httpResponse.Headers.Contains("x-ms-request-id"))
                {
                    ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
                }
                if (_shouldTrace)
                {
                    ServiceClientTracing.Error(_invocationId, ex);
                }
                _httpRequest.Dispose();
                if (_httpResponse != null)
                {
                    _httpResponse.Dispose();
                }
                throw ex;
            }
            // Create Result
            var _result = new AzureOperationResponse <ExtendedLivyBatchResponse>();

            _result.Request  = _httpRequest;
            _result.Response = _httpResponse;
            if (_httpResponse.Headers.Contains("x-ms-request-id"))
            {
                _result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
            }
            // Deserialize Response
            if ((int)_statusCode == 200)
            {
                _responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);

                try
                {
                    _result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject <ExtendedLivyBatchResponse>(_responseContent, Client.DeserializationSettings);
                }
                catch (JsonException ex)
                {
                    _httpRequest.Dispose();
                    if (_httpResponse != null)
                    {
                        _httpResponse.Dispose();
                    }
                    throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
                }
            }
            if (_shouldTrace)
            {
                ServiceClientTracing.Exit(_invocationId, _result);
            }
            return(_result);
        }
Ejemplo n.º 3
0
        public override void ExecuteCmdlet()
        {
            this.Language = LanguageType.Parse(this.Language);
            if (string.IsNullOrEmpty(this.MainClassName))
            {
                if (LanguageType.SparkDotNet == this.Language || LanguageType.Spark == this.Language)
                {
                    throw new SynapseException(Resources.MissingMainClassName);
                }
            }

            if (this.IsParameterBound(c => c.SparkPoolObject))
            {
                var resourceIdentifier = new ResourceIdentifier(this.SparkPoolObject.Id);
                this.WorkspaceName = resourceIdentifier.ParentResource;
                this.WorkspaceName = this.WorkspaceName.Substring(this.WorkspaceName.LastIndexOf('/') + 1);
                this.SparkPoolName = resourceIdentifier.ResourceName;
            }

            this.MainDefinitionFile = Utils.NormalizeUrl(this.MainDefinitionFile);
            if (this.CommandLineArgument != null)
            {
                for (int i = 0; i < this.CommandLineArgument.Length; i++)
                {
                    this.CommandLineArgument[i] = Utils.NormalizeUrl(this.CommandLineArgument[i]);
                }
            }

            if (this.ReferenceFile != null)
            {
                for (int i = 0; i < this.ReferenceFile.Length; i++)
                {
                    this.ReferenceFile[i] = Utils.NormalizeUrl(this.ReferenceFile[i]);
                }
            }

            Utils.CategorizedFiles(this.ReferenceFile, out IList <string> jars, out IList <string> files);
            bool isSparkDotNet = this.Language == LanguageType.SparkDotNet;
            var  batchRequest  = new ExtendedLivyBatchRequest
            {
                Name = this.Name,
                File = isSparkDotNet
                    ? SynapseConstants.SparkDotNetJarFile
                    : this.MainDefinitionFile,
                ClassName = isSparkDotNet
                    ? SynapseConstants.SparkDotNetClassName
                    : (this.Language == LanguageType.PySpark ? null : this.MainClassName),
                Args = isSparkDotNet
                    ? new List <string> {
                    this.MainDefinitionFile, this.MainClassName
                }
                .Concat(this.CommandLineArgument ?? new string[0]).ToArray()
                    : this.CommandLineArgument,
                Jars     = jars,
                Files    = files,
                Archives = isSparkDotNet
                    ? new List <string> {
                    $"{this.MainDefinitionFile}#{SynapseConstants.SparkDotNetUdfsFolderName}"
                }
                    : null,
                Conf           = this.Configuration?.ToDictionary(),
                ExecutorMemory = SynapseConstants.ComputeNodeSizes[this.ExecutorSize].Memory + "g",
                ExecutorCores  = SynapseConstants.ComputeNodeSizes[this.ExecutorSize].Cores,
                DriverMemory   = SynapseConstants.ComputeNodeSizes[this.ExecutorSize].Memory + "g",
                DriverCores    = SynapseConstants.ComputeNodeSizes[this.ExecutorSize].Cores,
                NumExecutors   = this.ExecutorCount
            };

            // Ensure the relative path of UDFs is add to "--conf".
            if (isSparkDotNet)
            {
                batchRequest.Conf = batchRequest.Conf ?? new Dictionary <string, string>();
                string udfsRelativePath = "./" + SynapseConstants.SparkDotNetUdfsFolderName;
                batchRequest.Conf.TryGetValue(SynapseConstants.SparkDotNetAssemblySearchPathsKey, out string pathValue);
                var paths = pathValue?.Split(',').Select(path => path.Trim()).Where(path => !string.IsNullOrEmpty(path)).ToList() ?? new List <string>();
                if (!paths.Contains(udfsRelativePath))
                {
                    paths.Add(udfsRelativePath);
                }

                batchRequest.Conf[SynapseConstants.SparkDotNetAssemblySearchPathsKey] = string.Join(",", paths);
            }

            var jobInformation = SynapseAnalyticsClient.SubmitSparkBatchJob(this.WorkspaceName, this.SparkPoolName, batchRequest, waitForCompletion: false);

            WriteObject(new PSSynapseSparkJob(jobInformation));
        }
Ejemplo n.º 4
0
        public override void ExecuteCmdlet()
        {
            this.Language = LanguageType.Parse(this.Language);
            if (string.IsNullOrEmpty(this.MainClassName))
            {
                if (LanguageType.SparkDotNet == this.Language || LanguageType.Spark == this.Language)
                {
                    throw new SynapseException(Resources.MissingMainClassName);
                }
            }

            if (this.IsParameterBound(c => c.SparkPoolObject))
            {
                var resourceIdentifier = new ResourceIdentifier(this.SparkPoolObject.Id);
                this.WorkspaceName = resourceIdentifier.ParentResource;
                this.WorkspaceName = this.WorkspaceName.Substring(this.WorkspaceName.LastIndexOf('/') + 1);
                this.SparkPoolName = resourceIdentifier.ResourceName;
            }

            this.MainDefinitionFile = Utils.NormalizeUrl(this.MainDefinitionFile);
            if (this.CommandLineArguments != null)
            {
                for (int i = 0; i < this.CommandLineArguments.Length; i++)
                {
                    this.CommandLineArguments[i] = Utils.NormalizeUrl(this.CommandLineArguments[i]);
                }
            }

            if (this.ReferenceFiles != null)
            {
                for (int i = 0; i < this.ReferenceFiles.Length; i++)
                {
                    this.ReferenceFiles[i] = Utils.NormalizeUrl(this.ReferenceFiles[i]);
                }
            }

            Utils.CategorizedFiles(this.ReferenceFiles, out IList <string> jars, out IList <string> files);
            bool isSparkDotNet = this.Language == LanguageType.SparkDotNet;
            var  batchRequest  = new ExtendedLivyBatchRequest
            {
                Name = this.Name,
                File = isSparkDotNet
                    ? SynapseConstants.SparkDotNetJarFile
                    : this.MainDefinitionFile,
                ClassName = isSparkDotNet
                    ? SynapseConstants.SparkDotNetClassName
                    : (this.Language == LanguageType.PySpark ? null : this.MainClassName),
                Args = isSparkDotNet
                    ? new List <string> {
                    this.MainDefinitionFile, this.MainClassName
                }
                .Concat(this.CommandLineArguments ?? new string[0]).ToArray()
                    : this.CommandLineArguments,
                Jars     = jars,
                Files    = files,
                Archives = isSparkDotNet
                    ? new List <string> {
                    $"{this.MainDefinitionFile}#{SynapseConstants.SparkDotNetUdfsFolderName}"
                }
                    : null,
                Conf           = this.Configuration?.ToDictionary(),
                ExecutorMemory = SynapseConstants.ComputeNodeSizes[this.ExecutorSize].Memory + "g",
                ExecutorCores  = SynapseConstants.ComputeNodeSizes[this.ExecutorSize].Cores,
                DriverMemory   = SynapseConstants.ComputeNodeSizes[this.ExecutorSize].Memory + "g",
                DriverCores    = SynapseConstants.ComputeNodeSizes[this.ExecutorSize].Cores,
                NumExecutors   = this.ExecutorCount
            };

            var jobInformation = SynapseAnalyticsClient.SubmitSparkBatchJob(this.WorkspaceName, this.SparkPoolName, batchRequest, waitForCompletion: false);

            WriteObject(new PSSynapseSparkJob(jobInformation));
        }
Ejemplo n.º 5
0
 /// <summary>
 /// Create new spark batch job.
 /// </summary>
 /// <param name='operations'>
 /// The operations group for this extension method.
 /// </param>
 /// <param name='workspaceName'>
 /// The name of the workspace to execute operations on.
 /// </param>
 /// <param name='sparkPoolName'>
 /// Name of the spark pool. "ondemand" targets the ondemand pool.
 /// </param>
 /// <param name='livyRequest'>
 /// Livy compatible batch job request payload.
 /// </param>
 /// <param name='detailed'>
 /// Optional query param specifying whether detailed response is returned
 /// beyond plain livy.
 /// </param>
 /// <param name='cancellationToken'>
 /// The cancellation token.
 /// </param>
 public static async Task <ExtendedLivyBatchResponse> CreateAsync(this ISparkBatchOperations operations, string workspaceName, string sparkPoolName, ExtendedLivyBatchRequest livyRequest, bool?detailed = default(bool?), CancellationToken cancellationToken = default(CancellationToken))
 {
     using (var _result = await operations.CreateWithHttpMessagesAsync(workspaceName, sparkPoolName, livyRequest, detailed, null, cancellationToken).ConfigureAwait(false))
     {
         return(_result.Body);
     }
 }
Ejemplo n.º 6
0
 /// <summary>
 /// Create new spark batch job.
 /// </summary>
 /// <param name='operations'>
 /// The operations group for this extension method.
 /// </param>
 /// <param name='workspaceName'>
 /// The name of the workspace to execute operations on.
 /// </param>
 /// <param name='sparkPoolName'>
 /// Name of the spark pool. "ondemand" targets the ondemand pool.
 /// </param>
 /// <param name='livyRequest'>
 /// Livy compatible batch job request payload.
 /// </param>
 /// <param name='detailed'>
 /// Optional query param specifying whether detailed response is returned
 /// beyond plain livy.
 /// </param>
 public static ExtendedLivyBatchResponse Create(this ISparkBatchOperations operations, string workspaceName, string sparkPoolName, ExtendedLivyBatchRequest livyRequest, bool?detailed = default(bool?))
 {
     return(operations.CreateAsync(workspaceName, sparkPoolName, livyRequest, detailed).GetAwaiter().GetResult());
 }