public static void printJsonMetaData(List <string> files, Configuration conf, List <int> rowIndexCols, bool prettyPrint, bool printTimeZone) { JsonWriter writer = new JsonWriter(); bool multiFile = files.Count > 1; if (multiFile) { writer.array(); } else { writer.newObject(); } foreach (string filename in files) { if (multiFile) { writer.newObject(); } writer.key("fileName").value(Path.GetFileName(filename)); Reader reader = OrcFile.createReader(filename, OrcFile.readerOptions(conf)); writer.key("fileVersion").value(OrcFile.VersionHelper.getName(reader.getFileVersion())); writer.key("writerVersion").value(reader.getWriterVersion().ToString()); using (RecordReaderImpl rows = (RecordReaderImpl)reader.rows()) { writer.key("numberOfRows").value(reader.getNumberOfRows()); writer.key("compression").value(reader.getCompression().ToString()); if (reader.getCompression() != CompressionKind.NONE) { writer.key("compressionBufferSize").value(reader.getCompressionSize()); } writer.key("schemaString").value(reader.getObjectInspector().getTypeName()); writer.key("schema").array(); writeSchema(writer, reader.getTypes()); writer.endArray(); writer.key("stripeStatistics").array(); List <StripeStatistics> stripeStatistics = reader.getStripeStatistics(); for (int n = 0; n < stripeStatistics.Count; n++) { writer.newObject(); writer.key("stripeNumber").value(n + 1); StripeStatistics ss = stripeStatistics[n]; writer.key("columnStatistics").array(); for (int i = 0; i < ss.getColumnStatistics().Length; i++) { writer.newObject(); writer.key("columnId").value(i); writeColumnStatistics(writer, ss.getColumnStatistics()[i]); writer.endObject(); } writer.endArray(); writer.endObject(); } writer.endArray(); ColumnStatistics[] stats = reader.getStatistics(); int colCount = stats.Length; writer.key("fileStatistics").array(); for (int i = 0; i < stats.Length; ++i) { writer.newObject(); writer.key("columnId").value(i); writeColumnStatistics(writer, stats[i]); writer.endObject(); } writer.endArray(); writer.key("stripes").array(); int stripeIx = -1; foreach (StripeInformation stripe in reader.getStripes()) { ++stripeIx; long stripeStart = stripe.getOffset(); OrcProto.StripeFooter footer = rows.readStripeFooter(stripe); writer.newObject(); // start of stripe information writer.key("stripeNumber").value(stripeIx + 1); writer.key("stripeInformation"); writeStripeInformation(writer, stripe); if (printTimeZone) { writer.key("writerTimezone").value( footer.HasWriterTimezone ? footer.WriterTimezone : FileDump.UNKNOWN); } long sectionStart = stripeStart; writer.key("streams").array(); foreach (OrcProto.Stream section in footer.StreamsList) { writer.newObject(); string kind = section.HasKind ? section.Kind.ToString() : FileDump.UNKNOWN; writer.key("columnId").value(section.Column); writer.key("section").value(kind); writer.key("startOffset").value(sectionStart); writer.key("length").value(section.Length); sectionStart += (long)section.Length; writer.endObject(); } writer.endArray(); writer.key("encodings").array(); for (int i = 0; i < footer.ColumnsCount; ++i) { writer.newObject(); OrcProto.ColumnEncoding encoding = footer.ColumnsList[i]; writer.key("columnId").value(i); writer.key("kind").value(encoding.Kind.ToString()); if (encoding.Kind == OrcProto.ColumnEncoding.Types.Kind.DICTIONARY || encoding.Kind == OrcProto.ColumnEncoding.Types.Kind.DICTIONARY_V2) { writer.key("dictionarySize").value(encoding.DictionarySize); } writer.endObject(); } writer.endArray(); if (rowIndexCols != null && rowIndexCols.Count != 0) { // include the columns that are specified, only if the columns are included, bloom filter // will be read bool[] sargColumns = new bool[colCount]; foreach (int colIdx in rowIndexCols) { sargColumns[colIdx] = true; } RecordReaderImpl.Index indices = rows.readRowIndex(stripeIx, null, sargColumns); writer.key("indexes").array(); foreach (int col in rowIndexCols) { writer.newObject(); writer.key("columnId").value(col); writeRowGroupIndexes(writer, col, indices.getRowGroupIndex()); writeBloomFilterIndexes(writer, col, indices.getBloomFilterIndex()); writer.endObject(); } writer.endArray(); } writer.endObject(); // end of stripe information } writer.endArray(); long fileLen = new FileInfo(filename).Length; long paddedBytes = FileDump.getTotalPaddingSize(reader); // empty ORC file is ~45 bytes. Assumption here is file length always >0 double percentPadding = ((double)paddedBytes / (double)fileLen) * 100; writer.key("fileLength").value(fileLen); writer.key("paddingLength").value(paddedBytes); writer.key("paddingRatio").value(percentPadding); rows.close(); } writer.endObject(); } if (multiFile) { writer.endArray(); } if (prettyPrint) { #if false string prettyJson; if (multiFile) { JSONArray jsonArray = new JSONArray(writer.toString()); prettyJson = jsonArray.toString(2); } else { JSONObject jsonObject = new JSONObject(writer.toString()); prettyJson = jsonObject.toString(2); } #else string prettyJson = writer.ToString(); #endif System.Console.WriteLine(prettyJson); } else { System.Console.WriteLine(writer.ToString()); } }
public RecordReaderImpl.Index readRowIndex(StripeInformation stripe, OrcProto.StripeFooter footer, bool[] included, OrcProto.RowIndex[] indexes, bool[] sargColumns, OrcProto.BloomFilterIndex[] bloomFilterIndices) { if (footer == null) { footer = readStripeFooter(stripe); } if (indexes == null) { indexes = new OrcProto.RowIndex[typeCount]; } if (bloomFilterIndices == null) { bloomFilterIndices = new OrcProto.BloomFilterIndex[typeCount]; } long offset = stripe.getOffset(); IList<OrcProto.Stream> streams = footer.StreamsList; for (int i = 0; i < streams.Count; i++) { OrcProto.Stream stream = streams[i]; OrcProto.Stream nextStream = null; if (i < streams.Count - 1) { nextStream = streams[i + 1]; } int col = (int)stream.Column; int len = (int)stream.Length; // row index stream and bloom filter are interlaced, check if the sarg column contains bloom // filter and combine the io to read row index and bloom filters for that column together if (stream.HasKind && (stream.Kind == OrcProto.Stream.Types.Kind.ROW_INDEX)) { bool readBloomFilter = false; if (sargColumns != null && sargColumns[col] && nextStream.Kind == OrcProto.Stream.Types.Kind.BLOOM_FILTER) { len += (int)nextStream.Length; i += 1; readBloomFilter = true; } if ((included == null || included[col]) && indexes[col] == null) { byte[] buffer = new byte[len]; file.readFully(offset, buffer, 0, buffer.Length); ByteBuffer bb = ByteBuffer.wrap(buffer); indexes[col] = OrcProto.RowIndex.ParseFrom(InStream.create(null, "index", new List<DiskRange> { new RecordReaderImpl.BufferChunk(bb, 0) }, (long)stream.Length, codec, bufferSize)); if (readBloomFilter) { bb.position((int)stream.Length); bloomFilterIndices[col] = OrcProto.BloomFilterIndex.ParseFrom(InStream.create( null, "bloom_filter", new List<DiskRange> { new RecordReaderImpl.BufferChunk(bb, 0) }, (long)nextStream.Length, codec, bufferSize)); } } } offset += len; } RecordReaderImpl.Index index = new RecordReaderImpl.Index(indexes, bloomFilterIndices); return index; }
public RecordReaderImpl.Index readRowIndex(StripeInformation stripe, OrcProto.StripeFooter footer, bool[] included, OrcProto.RowIndex[] indexes, bool[] sargColumns, OrcProto.BloomFilterIndex[] bloomFilterIndices) { if (footer == null) { footer = readStripeFooter(stripe); } if (indexes == null) { indexes = new OrcProto.RowIndex[typeCount]; } if (bloomFilterIndices == null) { bloomFilterIndices = new OrcProto.BloomFilterIndex[typeCount]; } long offset = stripe.getOffset(); IList <OrcProto.Stream> streams = footer.StreamsList; for (int i = 0; i < streams.Count; i++) { OrcProto.Stream stream = streams[i]; OrcProto.Stream nextStream = null; if (i < streams.Count - 1) { nextStream = streams[i + 1]; } int col = (int)stream.Column; int len = (int)stream.Length; // row index stream and bloom filter are interlaced, check if the sarg column contains bloom // filter and combine the io to read row index and bloom filters for that column together if (stream.HasKind && (stream.Kind == OrcProto.Stream.Types.Kind.ROW_INDEX)) { bool readBloomFilter = false; if (sargColumns != null && sargColumns[col] && nextStream.Kind == OrcProto.Stream.Types.Kind.BLOOM_FILTER) { len += (int)nextStream.Length; i += 1; readBloomFilter = true; } if ((included == null || included[col]) && indexes[col] == null) { byte[] buffer = new byte[len]; file.readFully(offset, buffer, 0, buffer.Length); ByteBuffer bb = ByteBuffer.wrap(buffer); indexes[col] = OrcProto.RowIndex.ParseFrom(InStream.create(null, "index", new List <DiskRange> { new RecordReaderImpl.BufferChunk(bb, 0) }, (long)stream.Length, codec, bufferSize)); if (readBloomFilter) { bb.position((int)stream.Length); bloomFilterIndices[col] = OrcProto.BloomFilterIndex.ParseFrom(InStream.create( null, "bloom_filter", new List <DiskRange> { new RecordReaderImpl.BufferChunk(bb, 0) }, (long)nextStream.Length, codec, bufferSize)); } } } offset += len; } RecordReaderImpl.Index index = new RecordReaderImpl.Index(indexes, bloomFilterIndices); return(index); }