internal BigDataPoolResourceInfo(string id, string name, string type, IDictionary <string, string> tags, string location, string provisioningState, AutoScaleProperties autoScale, DateTimeOffset?creationDate, AutoPauseProperties autoPause, bool?isComputeIsolationEnabled, string sparkEventsFolder, int?nodeCount, LibraryRequirements libraryRequirements, LibraryRequirements sparkConfigProperties, string sparkVersion, string defaultSparkLogFolder, NodeSize?nodeSize, NodeSizeFamily?nodeSizeFamily) : base(id, name, type, tags, location) { ProvisioningState = provisioningState; AutoScale = autoScale; CreationDate = creationDate; AutoPause = autoPause; IsComputeIsolationEnabled = isComputeIsolationEnabled; SparkEventsFolder = sparkEventsFolder; NodeCount = nodeCount; LibraryRequirements = libraryRequirements; SparkConfigProperties = sparkConfigProperties; SparkVersion = sparkVersion; DefaultSparkLogFolder = defaultSparkLogFolder; NodeSize = nodeSize; NodeSizeFamily = nodeSizeFamily; }
internal BigDataPoolResourceInfo(string id, string name, string type, IDictionary <string, string> tags, string location, string provisioningState, AutoScaleProperties autoScale, DateTimeOffset?creationDate, AutoPauseProperties autoPause, bool?isComputeIsolationEnabled, bool?sessionLevelPackagesEnabled, int?cacheSize, DynamicExecutorAllocation dynamicExecutorAllocation, string sparkEventsFolder, int?nodeCount, LibraryRequirements libraryRequirements, IList <LibraryInfo> customLibraries, LibraryRequirements sparkConfigProperties, string sparkVersion, string defaultSparkLogFolder, NodeSize?nodeSize, NodeSizeFamily?nodeSizeFamily, DateTimeOffset?lastSucceededTimestamp) : base(id, name, type, tags, location) { ProvisioningState = provisioningState; AutoScale = autoScale; CreationDate = creationDate; AutoPause = autoPause; IsComputeIsolationEnabled = isComputeIsolationEnabled; SessionLevelPackagesEnabled = sessionLevelPackagesEnabled; CacheSize = cacheSize; DynamicExecutorAllocation = dynamicExecutorAllocation; SparkEventsFolder = sparkEventsFolder; NodeCount = nodeCount; LibraryRequirements = libraryRequirements; CustomLibraries = customLibraries; SparkConfigProperties = sparkConfigProperties; SparkVersion = sparkVersion; DefaultSparkLogFolder = defaultSparkLogFolder; NodeSize = nodeSize; NodeSizeFamily = nodeSizeFamily; LastSucceededTimestamp = lastSucceededTimestamp; }
internal static BigDataPoolResourceInfo DeserializeBigDataPoolResourceInfo(JsonElement element) { Optional <IDictionary <string, string> > tags = default; string location = default; Optional <string> id = default; Optional <string> name = default; Optional <string> type = default; Optional <string> provisioningState = default; Optional <AutoScaleProperties> autoScale = default; Optional <DateTimeOffset> creationDate = default; Optional <AutoPauseProperties> autoPause = default; Optional <bool> isComputeIsolationEnabled = default; Optional <string> sparkEventsFolder = default; Optional <int> nodeCount = default; Optional <LibraryRequirements> libraryRequirements = default; Optional <LibraryRequirements> sparkConfigProperties = default; Optional <string> sparkVersion = default; Optional <string> defaultSparkLogFolder = default; Optional <NodeSize> nodeSize = default; Optional <NodeSizeFamily> nodeSizeFamily = default; foreach (var property in element.EnumerateObject()) { if (property.NameEquals("tags")) { if (property.Value.ValueKind == JsonValueKind.Null) { property.ThrowNonNullablePropertyIsNull(); continue; } Dictionary <string, string> dictionary = new Dictionary <string, string>(); foreach (var property0 in property.Value.EnumerateObject()) { dictionary.Add(property0.Name, property0.Value.GetString()); } tags = dictionary; continue; } if (property.NameEquals("location")) { location = property.Value.GetString(); continue; } if (property.NameEquals("id")) { id = property.Value.GetString(); continue; } if (property.NameEquals("name")) { name = property.Value.GetString(); continue; } if (property.NameEquals("type")) { type = property.Value.GetString(); continue; } if (property.NameEquals("properties")) { if (property.Value.ValueKind == JsonValueKind.Null) { property.ThrowNonNullablePropertyIsNull(); continue; } foreach (var property0 in property.Value.EnumerateObject()) { if (property0.NameEquals("provisioningState")) { provisioningState = property0.Value.GetString(); continue; } if (property0.NameEquals("autoScale")) { if (property0.Value.ValueKind == JsonValueKind.Null) { property0.ThrowNonNullablePropertyIsNull(); continue; } autoScale = AutoScaleProperties.DeserializeAutoScaleProperties(property0.Value); continue; } if (property0.NameEquals("creationDate")) { if (property0.Value.ValueKind == JsonValueKind.Null) { property0.ThrowNonNullablePropertyIsNull(); continue; } creationDate = property0.Value.GetDateTimeOffset("O"); continue; } if (property0.NameEquals("autoPause")) { if (property0.Value.ValueKind == JsonValueKind.Null) { property0.ThrowNonNullablePropertyIsNull(); continue; } autoPause = AutoPauseProperties.DeserializeAutoPauseProperties(property0.Value); continue; } if (property0.NameEquals("isComputeIsolationEnabled")) { if (property0.Value.ValueKind == JsonValueKind.Null) { property0.ThrowNonNullablePropertyIsNull(); continue; } isComputeIsolationEnabled = property0.Value.GetBoolean(); continue; } if (property0.NameEquals("sparkEventsFolder")) { sparkEventsFolder = property0.Value.GetString(); continue; } if (property0.NameEquals("nodeCount")) { if (property0.Value.ValueKind == JsonValueKind.Null) { property0.ThrowNonNullablePropertyIsNull(); continue; } nodeCount = property0.Value.GetInt32(); continue; } if (property0.NameEquals("libraryRequirements")) { if (property0.Value.ValueKind == JsonValueKind.Null) { property0.ThrowNonNullablePropertyIsNull(); continue; } libraryRequirements = LibraryRequirements.DeserializeLibraryRequirements(property0.Value); continue; } if (property0.NameEquals("sparkConfigProperties")) { if (property0.Value.ValueKind == JsonValueKind.Null) { property0.ThrowNonNullablePropertyIsNull(); continue; } sparkConfigProperties = LibraryRequirements.DeserializeLibraryRequirements(property0.Value); continue; } if (property0.NameEquals("sparkVersion")) { sparkVersion = property0.Value.GetString(); continue; } if (property0.NameEquals("defaultSparkLogFolder")) { defaultSparkLogFolder = property0.Value.GetString(); continue; } if (property0.NameEquals("nodeSize")) { if (property0.Value.ValueKind == JsonValueKind.Null) { property0.ThrowNonNullablePropertyIsNull(); continue; } nodeSize = new NodeSize(property0.Value.GetString()); continue; } if (property0.NameEquals("nodeSizeFamily")) { if (property0.Value.ValueKind == JsonValueKind.Null) { property0.ThrowNonNullablePropertyIsNull(); continue; } nodeSizeFamily = new NodeSizeFamily(property0.Value.GetString()); continue; } } continue; } } return(new BigDataPoolResourceInfo(id.Value, name.Value, type.Value, Optional.ToDictionary(tags), location, provisioningState.Value, autoScale.Value, Optional.ToNullable(creationDate), autoPause.Value, Optional.ToNullable(isComputeIsolationEnabled), sparkEventsFolder.Value, Optional.ToNullable(nodeCount), libraryRequirements.Value, sparkConfigProperties.Value, sparkVersion.Value, defaultSparkLogFolder.Value, Optional.ToNullable(nodeSize), Optional.ToNullable(nodeSizeFamily))); }