private void AddValue(object classInstance, DataColumn dc) { //get the value object value = null; TypeInfo ti = _classType.GetTypeInfo(); PropertyInfo pi = ti.GetDeclaredProperty(dc.Field.Name); if (pi != null) { value = pi.GetValue(classInstance); } //add value if (dc.Field.IsArray) { IEnumerable ienum = value as IEnumerable; if (ienum != null) { dc.IncrementLevel(); foreach (object element in ienum) { dc.Add(element); } dc.DecrementLevel(); } } else { dc.Add(value); } }
protected object WriteReadSingle(DataField field, object value) { using (var ms = new MemoryStream()) { // write single value using (var writer = new ParquetWriter3(new Schema(field), ms)) { writer.CompressionMethod = CompressionMethod.None; using (ParquetRowGroupWriter rg = writer.CreateRowGroup(1)) { var column = new DataColumn(field); column.Add(value); rg.Write(column); } } // read back single value ms.Position = 0; using (var reader = new ParquetReader3(ms)) { foreach (ParquetRowGroupReader rowGroupReader in reader) { DataColumn column = rowGroupReader.ReadColumn(field); return(column.DefinedData.OfType <object>().FirstOrDefault()); } return(null); } } }
public void Simple_repeated_field_write_read() { //v2 var ds = new DataSet( new DataField <int>("id"), new DataField <IEnumerable <string> >("items")); ds.Add(1, new[] { "one", "two" }); DataSet ds1 = ds.WriteRead(); Assert.Equal(1, ds1[0][0]); Assert.Equal(new[] { "one", "two" }, ds1[0][1]); //v3 // arrange var field = new DataField <IEnumerable <int> >("items"); var column = new DataColumn(field); column.IncrementLevel(); column.Add(1); column.Add(2); column.DecrementLevel(); column.IncrementLevel(); column.Add(3); column.Add(4); column.DecrementLevel(); // act DataColumn rc = WriteReadSingleColumn(field, 2, column); // assert Assert.Equal(new int[] { 1, 2, 3, 4 }, rc.DefinedData); Assert.Equal(new int[] { 1, 1, 1, 1 }, rc.DefinitionLevels); Assert.Equal(new int[] { 0, 1, 0, 1 }, rc.RepetitionLevels); }
protected object WriteReadSingle(DataField field, object value, CompressionMethod compressionMethod = CompressionMethod.None) { //for sanity, use disconnected streams byte[] data; using (var ms = new MemoryStream()) { // write single value using (var writer = new ParquetWriter3(new Schema(field), ms)) { writer.CompressionMethod = compressionMethod; using (ParquetRowGroupWriter rg = writer.CreateRowGroup(1)) { var column = new DataColumn(field); column.Add(value); rg.Write(column); } } data = ms.ToArray(); //F.WriteAllBytes($"c:\\tmp\\{compressionMethod}.parquet", data); } using (var ms = new MemoryStream(data)) { // read back single value ms.Position = 0; using (var reader = new ParquetReader3(ms)) { using (ParquetRowGroupReader rowGroupReader = reader.OpenRowGroupReader(0)) { DataColumn column = rowGroupReader.ReadColumn(field); return(column.DefinedData.OfType <object>().FirstOrDefault()); } } } }
protected object WriteReadSingle( DataField field, object value, CompressionMethod compressionMethod = CompressionMethod.None, bool flushToDisk = false) { using (var ms = new MemoryStream()) { // write single value using (var writer = new ParquetWriter3(new Schema(field), ms)) { writer.CompressionMethod = compressionMethod; using (ParquetRowGroupWriter rg = writer.CreateRowGroup(1)) { var column = new DataColumn(field); column.Add(value); rg.Write(column); } } if (flushToDisk) { FlushTempFile(ms); } // read back single value ms.Position = 0; using (var reader = new ParquetReader3(ms)) { using (ParquetRowGroupReader rowGroupReader = reader.OpenRowGroupReader(0)) { DataColumn column = rowGroupReader.ReadColumn(field); return(column.DefinedData.OfType <object>().FirstOrDefault()); } } } }
private bool AddAttribute(string attributeName, Type attributeType) { if (_data.ContainsKey(attributeName)) { return(false); } var column = new Attribute(attributeName, attributeType); var dataColumnn = new DataColumn(column); if (Count > 0) { var defaultValue = GetDefaultValue(attributeType); for (int i = 0; i < Count; i++) { dataColumnn.Add(defaultValue); } } _data[attributeName] = dataColumnn; return(true); }