NpgsqlArrayTypeMapping GetArrayTypeMapping(ColumnModel column) { Debug.Assert(column.Npgsql().ElementDataType != null); var elementMapping = TypeMapper.FindMapping(column.Npgsql().ElementDataType); var elementClrType = elementMapping?.ClrType; if (elementClrType == null) { Logger.LogWarning($"Could not find type mapping for array column '{column.Name}' with data type '{column.DataType}' - array of unknown element type {column.Npgsql().ElementDataType}. Skipping column."); return(null); } return((NpgsqlArrayTypeMapping)TypeMapper.FindMapping(elementClrType.MakeArrayType())); }
protected override RelationalTypeMapping GetTypeMapping(ColumnModel column) { Check.NotNull(column, nameof(column)); var postgresType = column.Npgsql().PostgresTypeType; switch (postgresType) { case PostgresTypeType.Base: return(base.GetTypeMapping(column)); case PostgresTypeType.Array: return(GetArrayTypeMapping(column)); case PostgresTypeType.Range: case PostgresTypeType.Enum: case PostgresTypeType.Composite: default: Logger.LogWarning($"Can't scaffold PostgreSQL {postgresType} for column '{column.Name}' of type '{column.DataType}'"); return(null); } }
private void GetColumns() { using (var command = new NpgsqlCommand(GetColumnsQuery, _connection)) using (var reader = command.ExecuteReader()) { while (reader.Read()) { var schemaName = reader.GetString(0); var tableName = reader.GetString(1); if (!_tableSelectionSet.Allows(schemaName, tableName)) { continue; } var columnName = reader.GetString(2); var dataType = reader.GetString(3); var ordinal = reader.GetInt32(4) - 1; var typeModifier = reader.GetInt32(5); var isNullable = reader.GetBoolean(6); int?maxLength = null; int?precision = null; int?scale = null; var defaultValue = reader.IsDBNull(7) ? null : reader.GetString(7); if (typeModifier != -1) { switch (dataType) { case "bpchar": case "char": case "varchar": maxLength = typeModifier - 4; break; case "numeric": case "decimal": // See http://stackoverflow.com/questions/3350148/where-are-numeric-precision-and-scale-for-a-field-found-in-the-pg-catalog-tables precision = ((typeModifier - 4) >> 16) & 65535; scale = (typeModifier - 4) & 65535; break; } } var table = _tables[TableKey(tableName, schemaName)]; var column = new ColumnModel { Table = table, Name = columnName, DataType = dataType, Ordinal = ordinal, IsNullable = isNullable, MaxLength = maxLength, Precision = precision, Scale = scale, DefaultValue = defaultValue }; if (defaultValue != null) { column.ValueGenerated = ValueGenerated.OnAdd; // Somewhat hacky... We identify serial columns by examining their default expression, // and reverse-engineer these as ValueGenerated.OnAdd if (defaultValue == $"nextval('{tableName}_{columnName}_seq'::regclass)" || defaultValue == $"nextval('\"{tableName}_{columnName}_seq\"'::regclass)") { // TODO: Scaffold as serial, bigserial, not int... // But in normal code-first I don't have to set the column type...! // TODO: Think about composite keys. Do serial magic only for non-composite. column.Npgsql().IsSerial = true; column.DefaultValue = null; } } table.Columns.Add(column); _tableColumns.Add(ColumnKey(table, column.Name), column); } } }
void GetColumns() { using (var command = new NpgsqlCommand(GetColumnsQuery, _connection)) using (var reader = command.ExecuteReader()) { while (reader.Read()) { var schemaName = reader.GetString(0); var tableName = reader.GetString(1); if (!_tableSelectionSet.Allows(schemaName, tableName)) { continue; } var table = _tables[TableKey(tableName, schemaName)]; // We need to know about dropped columns because constraints take them into // account when referencing columns. We'll get rid of them before returning the model. var isDropped = reader.GetBoolean(2); if (isDropped) { table.Columns.Add(null); continue; } var columnName = reader.GetString(3); var dataType = reader.GetString(4); var ordinal = reader.GetInt32(5) - 1; var typeModifier = reader.GetInt32(6); var typeChar = reader.GetChar(7); var elemDataType = reader.IsDBNull(8) ? null : reader.GetString(8); var isNullable = reader.GetBoolean(9); int?maxLength = null; int?precision = null; int?scale = null; var defaultValue = reader.IsDBNull(10) ? null : reader.GetString(10); if (typeModifier != -1) { switch (dataType) { case "bpchar": case "char": case "varchar": maxLength = typeModifier - 4; break; case "numeric": case "decimal": // See http://stackoverflow.com/questions/3350148/where-are-numeric-precision-and-scale-for-a-field-found-in-the-pg-catalog-tables precision = ((typeModifier - 4) >> 16) & 65535; scale = (typeModifier - 4) & 65535; break; } } var column = new ColumnModel { Table = table, Name = columnName, DataType = dataType, Ordinal = ordinal, IsNullable = isNullable, MaxLength = maxLength, Precision = precision, Scale = scale, DefaultValue = defaultValue }; if (defaultValue != null) { column.ValueGenerated = ValueGenerated.OnAdd; // Somewhat hacky... We identify serial columns by examining their default expression, // and reverse-engineer these as ValueGenerated.OnAdd if (defaultValue == $"nextval('{tableName}_{columnName}_seq'::regclass)" || defaultValue == $"nextval('\"{tableName}_{columnName}_seq\"'::regclass)") { // TODO: Scaffold as serial, bigserial, not int... // But in normal code-first I don't have to set the column type...! // TODO: Think about composite keys. Do serial magic only for non-composite. column.Npgsql().IsSerial = true; column.DefaultValue = null; } } switch (typeChar) { case 'b': // Base (regular), is the default break; case 'a': column.Npgsql().PostgresTypeType = PostgresTypeType.Array; column.Npgsql().ElementDataType = elemDataType; break; case 'r': column.Npgsql().PostgresTypeType = PostgresTypeType.Range; break; case 'e': column.Npgsql().PostgresTypeType = PostgresTypeType.Enum; break; default: Logger.LogWarning($"Can't scaffold column '{columnName}' of type '{dataType}': unknown type char '{typeChar}'"); continue; } table.Columns.Add(column); _tableColumns.Add(ColumnKey(table, column.Name), column); } } }