不管是 readAllDataStreams 还是 readPartialDataStreams ,最后都要 经过 createStreams
createStreams 最后的输出结果是这个 Map<StreamName, InStream> streams
每个 StreamName 对应的内存流
void createStreams(List<OrcProto.Stream> streamDescriptions, DiskRangeList ranges, boolean[] includeColumn, CompressionCodec codec, int bufferSize, Map<StreamName, InStream> streams) throws IOException { long streamOffset = 0; for (OrcProto.Stream streamDesc : streamDescriptions) { int column = streamDesc.getColumn(); if ((includeColumn != null && (column < includeColumn.length && !includeColumn[column])) || streamDesc.hasKind() && (StreamName.getArea(streamDesc.getKind()) != StreamName.Area.DATA)) { streamOffset += streamDesc.getLength(); continue; } //获得流对应的 撕裂开的 bytebuffer List<DiskRange> buffers = RecordReaderUtils.getStreamBuffers( ranges, streamOffset, streamDesc.getLength()); StreamName name = new StreamName(column, streamDesc.getKind()); // 输出的结果 streams.put(name, InStream.create(name.toString(), buffers, streamDesc.getLength(), codec, bufferSize)); streamOffset += streamDesc.getLength(); } } /** * Create an input stream from a list of disk ranges with data. * @param name the name of the stream * @param input the list of ranges of bytes for the stream; from disk or cache * @param length the length in bytes of the stream * @param codec the compression codec * @param bufferSize the compression buffer size * @return an input stream * @throws IOException */ public static InStream create(String name, List<DiskRange> input, long length, CompressionCodec codec, int bufferSize) throws IOException { if (codec == null) { return new UncompressedStream(name, input, length); } else { return new CompressedStream(name, input, length, codec, bufferSize); } }```