populateFieldInfoMap(props);
//generateParquetFile(fileName, props);
Configuration dfsConfig = new Configuration();
List<Footer> footers = ParquetFileReader.readFooters(dfsConfig, new Path(fileName));
Footer f = footers.iterator().next();
List<SchemaPath> columns = Lists.newArrayList();
columns.add(new SchemaPath("_MAP.integer", ExpressionPosition.UNKNOWN));
columns.add(new SchemaPath("_MAP.bigInt", ExpressionPosition.UNKNOWN));
columns.add(new SchemaPath("_MAP.f", ExpressionPosition.UNKNOWN));
columns.add(new SchemaPath("_MAP.d", ExpressionPosition.UNKNOWN));
columns.add(new SchemaPath("_MAP.b", ExpressionPosition.UNKNOWN));
columns.add(new SchemaPath("_MAP.bin", ExpressionPosition.UNKNOWN));
columns.add(new SchemaPath("_MAP.bin2", ExpressionPosition.UNKNOWN));
int totalRowCount = 0;
FileSystem fs = new CachedSingleFileSystem(fileName);
BufferAllocator allocator = new TopLevelAllocator();
for(int i = 0; i < 25; i++) {
ParquetRecordReader rr = new ParquetRecordReader(context, 256000, fileName, 0, fs,
new CodecFactoryExposer(dfsConfig), f.getParquetMetadata(), columns);
TestOutputMutator mutator = new TestOutputMutator(allocator);
rr.setup(mutator);
Stopwatch watch = new Stopwatch();
watch.start();