internal SparkJobDefinition(string description, BigDataPoolReference targetBigDataPool, string requiredSparkVersion, string language, SparkJobProperties jobProperties, IDictionary <string, object> additionalProperties) { Description = description; TargetBigDataPool = targetBigDataPool; RequiredSparkVersion = requiredSparkVersion; Language = language; JobProperties = jobProperties; AdditionalProperties = additionalProperties; }
internal static SparkJobDefinition DeserializeSparkJobDefinition(JsonElement element) { Optional <string> description = default; BigDataPoolReference targetBigDataPool = default; Optional <string> requiredSparkVersion = default; Optional <string> language = default; SparkJobProperties jobProperties = default; Optional <SparkJobDefinitionFolder> folder = default; IDictionary <string, object> additionalProperties = default; Dictionary <string, object> additionalPropertiesDictionary = new Dictionary <string, object>(); foreach (var property in element.EnumerateObject()) { if (property.NameEquals("description")) { description = property.Value.GetString(); continue; } if (property.NameEquals("targetBigDataPool")) { targetBigDataPool = BigDataPoolReference.DeserializeBigDataPoolReference(property.Value); continue; } if (property.NameEquals("requiredSparkVersion")) { requiredSparkVersion = property.Value.GetString(); continue; } if (property.NameEquals("language")) { language = property.Value.GetString(); continue; } if (property.NameEquals("jobProperties")) { jobProperties = SparkJobProperties.DeserializeSparkJobProperties(property.Value); continue; } if (property.NameEquals("folder")) { if (property.Value.ValueKind == JsonValueKind.Null) { folder = null; continue; } folder = SparkJobDefinitionFolder.DeserializeSparkJobDefinitionFolder(property.Value); continue; } additionalPropertiesDictionary.Add(property.Name, property.Value.GetObject()); } additionalProperties = additionalPropertiesDictionary; return(new SparkJobDefinition(description.Value, targetBigDataPool, requiredSparkVersion.Value, language.Value, jobProperties, folder.Value, additionalProperties)); }
public SparkJobDefinition(BigDataPoolReference targetBigDataPool, SparkJobProperties jobProperties) { if (targetBigDataPool == null) { throw new ArgumentNullException(nameof(targetBigDataPool)); } if (jobProperties == null) { throw new ArgumentNullException(nameof(jobProperties)); } TargetBigDataPool = targetBigDataPool; JobProperties = jobProperties; AdditionalProperties = new ChangeTrackingDictionary <string, object>(); }
internal static SparkJobDefinition DeserializeSparkJobDefinition(JsonElement element) { string description = default; BigDataPoolReference targetBigDataPool = default; string requiredSparkVersion = default; string language = default; SparkJobProperties jobProperties = default; IDictionary <string, object> additionalProperties = default; Dictionary <string, object> additionalPropertiesDictionary = default; foreach (var property in element.EnumerateObject()) { if (property.NameEquals("description")) { if (property.Value.ValueKind == JsonValueKind.Null) { continue; } description = property.Value.GetString(); continue; } if (property.NameEquals("targetBigDataPool")) { targetBigDataPool = BigDataPoolReference.DeserializeBigDataPoolReference(property.Value); continue; } if (property.NameEquals("requiredSparkVersion")) { if (property.Value.ValueKind == JsonValueKind.Null) { continue; } requiredSparkVersion = property.Value.GetString(); continue; } if (property.NameEquals("language")) { if (property.Value.ValueKind == JsonValueKind.Null) { continue; } language = property.Value.GetString(); continue; } if (property.NameEquals("jobProperties")) { jobProperties = SparkJobProperties.DeserializeSparkJobProperties(property.Value); continue; } additionalPropertiesDictionary ??= new Dictionary <string, object>(); if (property.Value.ValueKind == JsonValueKind.Null) { additionalPropertiesDictionary.Add(property.Name, null); } else { additionalPropertiesDictionary.Add(property.Name, property.Value.GetObject()); } } additionalProperties = additionalPropertiesDictionary; return(new SparkJobDefinition(description, targetBigDataPool, requiredSparkVersion, language, jobProperties, additionalProperties)); }