} // DecodeRawBinaryDataUsingTypeIDs /// <summary> /// Given IDs of records types registered with Kinetica, decode binary /// data into distinct records (objects). /// </summary> /// <typeparam name="T">The type of the records.</typeparam> /// <param name="type_ids">The IDs for each of the lists of records.</param> /// <param name="lists_records_binary">The binary encoded data to be decoded in a 2d list.</param> /// <param name="record_lists">The decoded objects/records in a 2d list.</param> public void DecodeRawBinaryDataUsingTypeIDs <T>(IList <string> type_ids, IList <IList <byte[]> > lists_records_binary, IList <IList <T> > record_lists) where T : new() { // Make sure that the length of the type IDs and records are the same if (type_ids.Count != lists_records_binary.Count) { throw new KineticaException("Unequal numbers of type IDs and binary encoded data objects provided."); } // Decode all the records for (int i = 0; i < lists_records_binary.Count; ++i) { // Per object, use the respective type ID to create the appropriate KineticaType KineticaType ktype = KineticaType.fromTypeID(this, type_ids[i]); // Get the binary encoded data for this list IList <byte[]> records_binary = lists_records_binary[i]; // Create a container to put the decoded records IList <T> records = new List <T>(); // The inner list actually contains the binary data foreach (var bin_record in records_binary) { // Using the KineticaType object, decode the record. T obj = AvroDecode <T>(bin_record, ktype); records.Add(obj); } // Add the records into the outgoing list record_lists.Add(records); } } // DecodeRawBinaryDataUsingTypeIDs
} // end AvroEncode /// <summary> /// Make a copy of an object as an Avro GenericRecord /// </summary> /// <param name="obj">Original object</param> /// <param name="ktype">An associated KineticaType object that /// describes the original object.</param> /// <returns>GenericRecord object which is a copy of the specified object</returns> private Avro.Generic.GenericRecord MakeGenericRecord(object obj, KineticaType ktype) { // Get the schema var schema = KineticaData.SchemaFromType(obj.GetType(), ktype); // Create a new GenericRecord for this schema var recordToSend = new Avro.Generic.GenericRecord(schema); // Copy each field from obj to recordToSend foreach (var field in schema.Fields) { var property = obj.GetType() .GetProperties() .FirstOrDefault(prop => prop.Name.ToLowerInvariant() == field.Name.ToLowerInvariant()); if (property == null) { continue; } recordToSend.Add(field.Name, property.GetValue(obj, null)); } // Return the newly created object return(recordToSend); }
} // DecodeRawBinaryDataUsingSchemaString /// <summary> /// Given a list of schema strings, decode binary data into distinct /// records (objects). /// </summary> /// <typeparam name="T">The type of the records.</typeparam> /// <param name="schema_strings">The schemas for the records.</param> /// <param name="lists_records_binary">The binary encoded data to be decoded (the data is /// in a 2D list).</param> /// <param name="record_lists">The decoded objects/records in a 2d list.</param> public void DecodeRawBinaryDataUsingSchemaString <T>(IList <string> schema_strings, IList <IList <byte[]> > lists_records_binary, IList <IList <T> > record_lists) where T : new() { // Check that the list of schemas and list of binary encode data match in length if (schema_strings.Count != lists_records_binary.Count) { throw new KineticaException("List of schemas and list of binary encoded data do not match in count."); } // Using the KineticaType object, decode all the records from avro binary encoding for (int i = 0; i < schema_strings.Count; ++i) { // Create a KineticaType object based on the schema string KineticaType ktype = new KineticaType("", schema_strings[i], null); // Get the binary encoded data for this list IList <byte[]> records_binary = lists_records_binary[i]; // Create a container to put the decoded records IList <T> records = new List <T>(); // The inner list actually contains the binary data foreach (var bin_record in records_binary) { T obj = AvroDecode <T>(bin_record, ktype); records.Add(obj); } // Add the records into the outgoing list record_lists.Add(records); } } // DecodeRawBinaryDataUsingSchemaString
} // lookupKineticaType() /// <summary> /// Encode specified object using Avro /// </summary> /// <param name="obj">Object to encode</param> /// <returns>Byte array of binary Avro-encoded data</returns> internal byte[] AvroEncode(object obj) { // Create a stream that will allow us to view the underlying memory using (var ms = new MemoryStream()) { // Write the object to the memory stream // If obj is an ISpecificRecord, this is more efficient if (obj is Avro.Specific.ISpecificRecord) { var schema = (obj as Avro.Specific.ISpecificRecord).Schema; Avro.Specific.SpecificDefaultWriter writer = new Avro.Specific.SpecificDefaultWriter(schema); writer.Write(schema, obj, new BinaryEncoder(ms)); } else // Not an ISpecificRecord - this way is less efficient { // Get the KineticaType associated with the object to be encoded Type obj_type = obj.GetType(); KineticaType ktype = lookupKineticaType(obj_type); if (ktype == null) { throw new KineticaException("No known KineticaType associated with the given object. " + "Need a known KineticaType to encode the object."); } // Make a copy of the object to send as a GenericRecord, then write that to the memory stream var schema = KineticaData.SchemaFromType(obj.GetType(), ktype); var recordToSend = MakeGenericRecord(obj, ktype); var writer = new Avro.Generic.DefaultWriter(schema); writer.Write(schema, recordToSend, new BinaryEncoder(ms)); } // Get the memory from the stream return(ms.ToArray()); } } // end AvroEncode
} // end SetKineticaSourceClassToTypeMapping /// <summary> /// Given a KineticaType object for a certain record type, decode binary data into distinct /// records (objects). /// </summary> /// <typeparam name="T">The type of the records.</typeparam> /// <param name="record_type">The type for the records.</param> /// <param name="records_binary">The binary encoded data to be decoded.</param> /// <param name="records">The decoded objects/records.</param> public void DecodeRawBinaryDataUsingRecordType <T>(KineticaType record_type, IList <byte[]> records_binary, IList <T> records) where T : new() { // Using the KineticaType object, decode all the records from avro binary encoding foreach (var bin_record in records_binary) { T obj = AvroDecode <T>(bin_record, record_type); records.Add(obj); } } // DecodeRawBinaryDataUsingRecordType
/// <summary> /// Retrieve a KineticaType object by the type label. /// </summary> /// <param name="typeName">The label/name of the type.</param> /// <returns></returns> private KineticaType GetType(string typeName) { KineticaType type = null; string typeId; if (typeNameLookup.TryGetValue(typeName, out typeId)) { knownTypes.TryGetValue(typeId, out type); } return(type); }
} // DecodeRawBinaryDataUsingRecordType /// <summary> /// Given a schema string for a certain record type, decode binary data into distinct /// records (objects). /// </summary> /// <typeparam name="T">The type of the records.</typeparam> /// <param name="schema_string">The schema for the records.</param> /// <param name="records_binary">The binary encoded data to be decoded.</param> /// <param name="records">The decoded objects/records.</param> public void DecodeRawBinaryDataUsingSchemaString <T>(string schema_string, IList <byte[]> records_binary, IList <T> records) where T : new() { // Create a KineticaType object based on the schema string KineticaType ktype = new KineticaType("", schema_string, null); // Using the KineticaType object, decode all the records from avro binary encoding foreach (var bin_record in records_binary) { T obj = AvroDecode <T>(bin_record, ktype); records.Add(obj); } } // DecodeRawBinaryDataUsingSchemaString
/// <summary> /// Decode binary Avro data into an object. /// </summary> /// <typeparam name="T">Type of expected object</typeparam> /// <param name="bytes">Binary Avro data</param> /// <param name="ktype">An optional KineticaType object to help in decoding the object.</param> /// <returns>New object</returns> private T AvroDecode <T>(byte[] bytes, KineticaType ktype = null) where T : new() { // Get the schema var schema = KineticaData.SchemaFromType(typeof(T), ktype); // Create a stream to read the binary data using (var ms = new MemoryStream(bytes)) { // Create a new object to return T obj = new T(); if (obj is Avro.Specific.ISpecificRecord) { var reader = new Avro.Specific.SpecificDefaultReader(schema, schema); reader.Read(obj, new BinaryDecoder(ms)); } else { // Not ISpecificRecord, so first read into a new GenericRecord var reader = new Avro.Generic.DefaultReader(schema, schema); Avro.Generic.GenericRecord recordToReceive = new Avro.Generic.GenericRecord(schema); reader.Read(recordToReceive, new BinaryDecoder(ms)); // Now, copy all the fields from the GenericRecord to obj foreach (var field in schema.Fields) { var property = obj.GetType() .GetProperties() .FirstOrDefault(prop => prop.Name.ToLowerInvariant() == field.Name.ToLowerInvariant()); if (property == null) { continue; } object val; // Try to get the property if (recordToReceive.TryGetValue(field.Name, out val)) { // If successful, write the property to obj property.SetValue(obj, val); } } // end foreach } // end if-else // Return the new object return(obj); } // end using } // end AvroDecode<T>
} // DecodeRawBinaryDataUsingSchemaString /// <summary> /// Given IDs of records types registered with Kinetica, decode binary /// data into distinct records (objects). /// </summary> /// <typeparam name="T">The type of the records.</typeparam> /// <param name="type_ids">The IDs for each of the records' types.</param> /// <param name="records_binary">The binary encoded data to be decoded.</param> /// <param name="records">The decoded objects/records.</param> public void DecodeRawBinaryDataUsingTypeIDs <T>(IList <string> type_ids, IList <byte[]> records_binary, IList <T> records) where T : new() { // Make sure that the length of the type IDs and records are the same if (type_ids.Count != records_binary.Count) { throw new KineticaException("Unequal numbers of type IDs and binary encoded data objects provided."); } // Decode all the records for (int i = 0; i < records_binary.Count; ++i) { // Per object, use the respective type ID to create the appropriate KineticaType KineticaType ktype = KineticaType.fromTypeID(this, type_ids[i]); // Using the KineticaType object, decode the record. T obj = AvroDecode <T>(records_binary[i], ktype); records.Add(obj); } } // DecodeRawBinaryDataUsingTypeIDs
/// <summary> /// Given a table name, add its record type to enable proper encoding of records /// for insertion or updates. /// </summary> /// <param name="table_name">Name of the table.</param> /// <param name="obj_type">The type associated with the table.</param> public void AddTableType(string table_name, Type obj_type) { try { // Get the type from the table KineticaType ktype = KineticaType.fromTable(this, table_name); if (ktype.getTypeID() == null) { throw new KineticaException($"Could not get type ID for table '{table_name}'"); } this.knownTypes.TryAdd(ktype.getTypeID(), ktype); // Save a mapping of the object to the KineticaType if (obj_type != null) { this.SetKineticaSourceClassToTypeMapping(obj_type, ktype); } } catch (KineticaException ex) { throw new KineticaException("Error creating type from table", ex); } } // end AddTableType
/// <summary> /// Create an Avro Schema from a System.Type and a KineticaType. /// </summary> /// <param name="t">System.Type to be processed.</param> /// <param name="ktype">KineticaType to be processed;</param> /// <returns></returns> public static RecordSchema SchemaFromType(System.Type t, KineticaType ktype = null) { string jsonType = AvroType(t, ktype); return(Avro.Schema.Parse(jsonType) as RecordSchema); }
/// <summary> /// Constructor from Kinetica Type /// </summary> /// <param name="type">Type received from Kinetica Server</param> public KineticaData(KineticaType type) { m_schema = Avro.Schema.Parse(type.getSchemaString()) as RecordSchema; m_properties = this.GetType().GetProperties(); }
} // end fromClass() /// <summary> /// Create a KineticaType object from properties of a record class and Kinetica column properties. /// It ignores any properties inherited from base classes, and also ignores any member fields of /// the class. /// </summary> /// <param name="recordClass">A class type.</param> /// <param name="label">Any label for the type.</param> /// <param name="properties">Properties for the columns.</param> /// <returns></returns> public static KineticaType fromClass(Type recordClass, string label, IDictionary <string, IList <string> > properties = null) { // Get the fields in order (******skipping properties inherited from base classes******) // (fields only from this type, i.e. do not include any inherited fields), and public types only System.Reflection.PropertyInfo[] type_properties = recordClass.GetProperties(System.Reflection.BindingFlags.DeclaredOnly | System.Reflection.BindingFlags.Instance | System.Reflection.BindingFlags.Public); Array.Sort(type_properties, delegate(System.Reflection.PropertyInfo p1, System.Reflection.PropertyInfo p2) { return(p1.MetadataToken.CompareTo(p2.MetadataToken)); }); // Need to have a list of columns List <Column> columns = new List <Column>(); List <string> column_names = new List <string>(); // Per property, check that it is one of: int, long, float, double, string, bytes foreach (var property in type_properties) { string column_name = ""; Column.ColumnType column_type = Column.ColumnType.DEFAULT; IList <string> column_properties = null; bool is_column_nullable = false; // Get the column name column_name = property.Name; Type prop_type = property.PropertyType; // Check if the field is nullable (declared as T? or Nullable<T>) if (property.PropertyType.IsGenericType && (property.PropertyType.GetGenericTypeDefinition() == typeof(Nullable <>))) { // the field is a nullable field is_column_nullable = true; // Change the property type to be the underlying type prop_type = Nullable.GetUnderlyingType(prop_type); } // Check the column data type (must be one of int, long, float, double, string, and bytes) if (prop_type == typeof(System.String)) { column_type = Column.ColumnType.STRING; } else if (prop_type == typeof(System.Int32)) { column_type = Column.ColumnType.INT; } else if (prop_type == typeof(System.Int64)) { column_type = Column.ColumnType.LONG; } else if (prop_type == typeof(float)) { column_type = Column.ColumnType.FLOAT; } else if (prop_type == typeof(double)) { column_type = Column.ColumnType.DOUBLE; } else if (prop_type == typeof(byte)) { column_type = Column.ColumnType.BYTES; } else { throw new KineticaException("Unsupported data type for " + prop_type.Name + ": " + prop_type + " (must be one of int, long, float, double, string, and byte)"); } // Extract the given column's properties, if any if (properties != null) { // This column has properties given properties.TryGetValue(column_name, out column_properties); } // Keep a list of the column names for checking the properties column_names.Add(column_name); // Create the column Column column = new Column(column_name, column_type, column_properties); if (is_column_nullable) // Set the appropriate nullable flag for the column { column.setIsNullable(true); } // Save the column columns.Add(column); } // end looping over all members of the class type // Check for extraneous properties if (properties != null) { IEnumerable <string> property_keys = properties.Keys; var unknown_columns = property_keys.Where(e => !column_names.Contains(e)); // Check if any property is provided for wrong/non-existing columns if (unknown_columns.Any()) { throw new KineticaException("Properties specified for unknown columns."); } } // Create the kinetica type KineticaType kType = new KineticaType(label, columns, properties); // Save the class information in the type kType.saveSourceType(recordClass); return(kType); } // end fromClass()
/// <summary> /// /// </summary> /// <param name="kdb"></param> /// <param name="table_name"></param> /// <param name="batch_size"></param> /// <param name="ktype"></param> /// <param name="options"></param> /// <param name="workers"></param> public KineticaIngestor(Kinetica kdb, string table_name, int batch_size, KineticaType ktype, Dictionary <string, string> options = null, Utils.WorkerList workers = null) { this.kineticaDB = kdb; this.table_name = table_name; this.ktype = ktype; // Validate and save the batch size if (batch_size < 1) { throw new KineticaException($"Batch size must be greater than one; given {batch_size}."); } this.batch_size = batch_size; // Save the options (make it read-only if it exists) if (options != null) { this.options = options; //this.options = options.ToImmutableDictionary<string, string>(); } else { this.options = null; } // Set up the primary and shard key builders // ----------------------------------------- this.primary_key_builder = new Utils.RecordKeyBuilder <T>(true, this.ktype); this.shard_key_builder = new Utils.RecordKeyBuilder <T>(false, this.ktype); // Based on the Java implementation if (this.primary_key_builder.hasKey()) { // There is a primary key for the given T // Now check if there is a distinct shard key if (!this.shard_key_builder.hasKey() || this.shard_key_builder.hasSameKey(this.primary_key_builder)) { this.shard_key_builder = this.primary_key_builder; // no distinct shard key } } else // there is no primary key for the given T { this.primary_key_builder = null; // Check if there is shard key for T if (!this.shard_key_builder.hasKey()) { this.shard_key_builder = null; } } // done setting up the key builders // Set up the worker queues // ------------------------- // Do we update records if there are matching primary keys in the // database already? bool update_on_existing_pk = ((options != null) && options.ContainsKey(InsertRecordsRequest <T> .Options.UPDATE_ON_EXISTING_PK) && options[InsertRecordsRequest <T> .Options.UPDATE_ON_EXISTING_PK].Equals(InsertRecordsRequest <T> .Options.TRUE)); // Do T type records have a primary key? bool has_primary_key = (this.primary_key_builder != null); this.worker_queues = new List <Utils.WorkerQueue <T> >(); try { // If no workers are given, try to get them from Kinetica if ((workers == null) || (workers.Count == 0)) { workers = new Utils.WorkerList(kdb); } // If we end up with multiple workers, either given by the // user or obtained from Kinetica, then use those if ((workers != null) && (workers.Count > 0)) { // Add worker queues per worker foreach (System.Uri worker_url in workers) { string insert_records_worker_url_str = (worker_url.ToString() + "insert/records"); System.Uri url = new System.Uri(insert_records_worker_url_str); Utils.WorkerQueue <T> worker_queue = new Utils.WorkerQueue <T>(url, batch_size, has_primary_key, update_on_existing_pk); this.worker_queues.Add(worker_queue); } // Get the worker rank information from Kinetica this.routing_table = kdb.adminShowShards().rank; // Check that enough worker URLs are specified for (int i = 0; i < routing_table.Count; ++i) { if (this.routing_table[i] > this.worker_queues.Count) { throw new KineticaException("Not enough worker URLs specified."); } } } else // multihead-ingest is NOT turned on; use the regular Kinetica IP address { string insert_records_url_str = (kdb.URL.ToString() + "insert/records"); System.Uri url = new System.Uri(insert_records_url_str); Utils.WorkerQueue <T> worker_queue = new Utils.WorkerQueue <T>(url, batch_size, has_primary_key, update_on_existing_pk); this.worker_queues.Add(worker_queue); this.routing_table = null; } } catch (Exception ex) { throw new KineticaException(ex.ToString()); } // Create the random number generator this.random = new Random((int)DateTime.Now.Ticks); } // end constructor KineticaIngestor
} // end AddTableType /// <summary> /// Saves an object class type to a KineticaType association. If the class type already exists /// in the map, replaces the old KineticaType value. /// </summary> /// <param name="objectType">The type of the object.</param> /// <param name="kineticaType">The associated KinetiaType object.</param> public void SetKineticaSourceClassToTypeMapping(Type objectType, KineticaType kineticaType) { this.kineticaTypeLookup.Add(objectType, kineticaType); return; } // end SetKineticaSourceClassToTypeMapping
/// <summary> /// Create a JSON type-string from System.Type /// </summary> /// <param name="t">System.Type to be evaluated</param> /// <param name="ktype">A KineticaType object that describes the whole type /// to which <paramref name="t"/> belongs. </param> /// <returns>JSON-formatted String</returns> private static string AvroType(System.Type t, KineticaType ktype) { if (t == null) { throw new KineticaException("Null type passed to AvroType()"); } switch (t.Name) { case "Boolean": return("\"boolean\""); case "Int32": return("\"int\""); case "Int64": return("\"long\""); case "Double": return("\"double\""); case "Single": return("\"float\""); case "Byte[]": return("\"bytes\""); case "String": return("\"string\""); case "String[]": return($"{{ \"type\":\"array\", \"items\":\"string\"}}"); case "String[][]": return($"{{ \"type\":\"array\", \"items\":{{ \"type\":\"array\", \"items\":\"string\"}}}}"); // For a nullable object, return the avro type of the underlying type (e.g. double) case "Nullable`1": return(AvroType(Nullable.GetUnderlyingType(t), ktype)); case "List`1": case "IList`1": if (t.IsGenericType) { var genericParams = t.GenericTypeArguments; if (1 == genericParams.Length) { return($"{{ \"type\":\"array\", \"items\":{AvroType( genericParams[0], ktype )}}}"); } } break; case "Dictionary`2": case "IDictionary`2": if (t.IsGenericType) { var genericParams = t.GenericTypeArguments; if (2 == genericParams.Length) { return($"{{ \"type\":\"map\", \"values\":{AvroType( genericParams[1], ktype )}}}"); } } break; // Ignore the "Schema" property inherited from KineticaData case "Schema": break; // Ignore the "RecordSchema" property inherited from KineticaRecord case "RecordSchema": break; // If Type is an object, treat it as a sub-record in Avro default: if (t.IsSubclassOf(typeof(Object))) { string fields = ""; // Create the avro string for each property of the class PropertyInfo[] type_properties = t.GetProperties(BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public); Array.Sort(type_properties, delegate(PropertyInfo p1, PropertyInfo p2) { return(p1.MetadataToken.CompareTo(p2.MetadataToken)); }); foreach (var prop in type_properties) { bool is_nullable = false; var prop_type = prop.PropertyType; if (prop_type.IsGenericType && prop_type.GetGenericTypeDefinition() == typeof(Nullable <>)) { // the property is nullable based on reflection is_nullable = true; } else if ((ktype != null) && ktype.getColumn(prop.Name).isNullable()) { // the property is nullable based on information saved in the associated KineticaType is_nullable = true; } // Get the avro type string for the property type string avroType = AvroType(prop_type, ktype); if (!String.IsNullOrWhiteSpace(avroType)) { if (is_nullable) { // the field is nullable fields += $"{{\"name\":\"{prop.Name}\",\"type\":[{avroType},\"null\"]}},"; } else { // it's a regular field fields += $"{{\"name\":\"{prop.Name}\",\"type\":{avroType}}},"; } } } // Trim the trailing comma from the fields char[] comma = { ',' }; fields = fields.TrimEnd(comma); // Put together the avro fields with the name to create a record type return($"{{\"type\":\"record\",\"name\":\"{t.Name}\",\"fields\":[{fields}]}}"); } System.Diagnostics.Debug.WriteLine($"Unkonwn type: {t.Name}"); break; } return(""); } // end AvroType
} // end ContentsToString /// <summary> /// Decodes binary encoded data of a dynamically created table returned by the server. /// </summary> /// <param name="dynamic_table_schema_string">The schema string for the dynamically created table.</param> /// <param name="encoded_data">The binary encoded data.</param> /// <returns>A list of KineticaRecord objects with the decoded data.</returns> public static IList <KineticaRecord> DecodeDynamicTableRecords(string dynamic_table_schema_string, byte[] encoded_data) { // Get a record schema from the schema string Schema dynamic_table_schema; try { dynamic_table_schema = Avro.Schema.Parse(dynamic_table_schema_string); } catch (Exception ex) { throw new KineticaException(ex.ToString()); } // The container for the decoded data (put into distinct records) IList <KineticaRecord> records = new List <KineticaRecord>(); // Decode the dynamic table schema to extract the column names and types. // Then, decode each individual record. using (var ms = new MemoryStream(encoded_data)) { // Read the table schema into a new GenericRecord // ---------------------------------------------- var reader = new Avro.Generic.DefaultReader(dynamic_table_schema, dynamic_table_schema); BinaryDecoder decoder = new BinaryDecoder(ms); Avro.Generic.GenericRecord obj = (Avro.Generic.GenericRecord)reader.Read(null, dynamic_table_schema, dynamic_table_schema, decoder); // Extract the column names from the encoded data object column_headers_0 = new object(); Object[] column_headers = null; if (obj.TryGetValue("column_headers", out column_headers_0)) // try to get the data out { column_headers = ( Object[] )column_headers_0; } // Extract the column types from the encoded data object column_types_0 = new object(); Object[] column_types = null; if (obj.TryGetValue("column_datatypes", out column_types_0)) // try to get the data out { column_types = ( Object[] )column_types_0; } // Find out how many columns are returned int num_columns = column_headers.Length; // Extract the column data from the encoded data (ignore the headers and types) // and create a list with only the record data Object[][] encoded_column_data = new Object[num_columns][]; for (int i = 0; i < num_columns; ++i) { // Get the column name (e.g. the first column is titled "column_1") string column_name = $"column_{i+1}"; // Get the column data out object column_data_0 = new object(); Object[] column_data = null; if (obj.TryGetValue(column_name, out column_data_0)) // try to get the data out { column_data = ( Object[] )column_data_0; } // Save this column's data in the 2D array declared above encoded_column_data[i] = column_data; } // done separating the column data from the headers and type // Find out how many values per column are returned int num_records = encoded_column_data[0].Length; // Make sure that all the column data are of the same length foreach (Object[] l in encoded_column_data) { if (l.Length != num_records) { throw new KineticaException("Dynamic table has uneven column data lengths"); } } // Based on the column headers and types, create a KineticaType KineticaType dynamic_record_type = KineticaType.fromDynamicSchema(dynamic_table_schema_string, column_headers, column_types); // Using the dynamic record type, create a RecordSchema object // ( off which we'll create the records) Avro.RecordSchema record_schema = (Avro.RecordSchema)dynamic_record_type.getSchema(); // Create the records by decoding the binary encoded data // (column-major data into row-major data) for (int record_idx = 0; record_idx < num_records; ++record_idx) { // Create a GenericRecord object based on the KineticaType KineticaRecord record = new KineticaRecord(record_schema); // Go through each column, decode the next value and put it into the record for (int column_idx = 0; column_idx < num_columns; ++column_idx) { // Get the value to be put var val = encoded_column_data[column_idx][record_idx]; // Get the property of the record into which the value need to saved var field = record_schema.Fields[column_idx]; // Set the value record.Add(field.Name, val); } // end inner for loop // Save the record records.Add(record); } // end outer for loop } // end decoding block return(records); } // end DecodeDynamicTableRecords
/// <summary> /// Create a RecordRetriever object with the given parameters. /// </summary> /// <param name="kdb"></param> /// <param name="table_name"></param> /// <param name="ktype"></param> /// <param name="workers"></param> public RecordRetriever(Kinetica kdb, string table_name, KineticaType ktype, Utils.WorkerList workers = null) { this.kineticaDB = kdb; this.table_name = table_name; this.ktype = ktype; // Set up the shard key builder // ---------------------------- this.shard_key_builder = new Utils.RecordKeyBuilder <T>(false, this.ktype); // Check if there is shard key for T if (!this.shard_key_builder.hasKey()) { this.shard_key_builder = null; } // Set up the worker queues // ------------------------- this.worker_queues = new List <Utils.WorkerQueue <T> >(); try { // If no workers are given, try to get them from Kinetica if ((workers == null) || (workers.Count == 0)) { workers = new Utils.WorkerList(kdb); } // If we end up with multiple workers, either given by the // user or obtained from Kinetica, then use those if ((workers != null) && (workers.Count > 0)) { // Add worker queues per worker foreach (System.Uri worker_url in workers) { string get_records_worker_url_str = (worker_url.ToString() + "get/records"); System.Uri url = new System.Uri(get_records_worker_url_str); Utils.WorkerQueue <T> worker_queue = new Utils.WorkerQueue <T>(url); this.worker_queues.Add(worker_queue); } // Get the worker rank information from Kinetica this.routing_table = kdb.adminShowShards().rank; // Check that enough worker URLs are specified for (int i = 0; i < routing_table.Count; ++i) { if (this.routing_table[i] > this.worker_queues.Count) { throw new KineticaException("Not enough worker URLs specified."); } } } else // multihead-ingest is NOT turned on; use the regular Kinetica IP address { string get_records_url_str = (kdb.URL.ToString() + "get/records"); System.Uri url = new System.Uri(get_records_url_str); Utils.WorkerQueue <T> worker_queue = new Utils.WorkerQueue <T>(url); this.worker_queues.Add(worker_queue); this.routing_table = null; } } catch (Exception ex) { throw new KineticaException(ex.ToString()); } // Create the random number generator this.random = new Random((int)DateTime.Now.Ticks); } // end constructor RecordRetriever