use of org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_ALL_COLUMNS in project presto by prestodb.
the class RcFileTester method assertFileContentsOld.
private static <K extends LongWritable, V extends BytesRefArrayWritable> void assertFileContentsOld(Type type, TempFile tempFile, Format format, Iterable<?> expectedValues) throws Exception {
JobConf configuration = new JobConf(new Configuration(false));
configuration.set(READ_COLUMN_IDS_CONF_STR, "0");
configuration.setBoolean(READ_ALL_COLUMNS, false);
Properties schema = new Properties();
schema.setProperty(META_TABLE_COLUMNS, "test");
schema.setProperty(META_TABLE_COLUMN_TYPES, getJavaObjectInspector(type).getTypeName());
Deserializer deserializer;
if (format == Format.BINARY) {
deserializer = new LazyBinaryColumnarSerDe();
} else {
deserializer = new ColumnarSerDe();
}
deserializer.initialize(configuration, schema);
configuration.set(SERIALIZATION_LIB, deserializer.getClass().getName());
InputFormat<K, V> inputFormat = new RCFileInputFormat<>();
RecordReader<K, V> recordReader = inputFormat.getRecordReader(new FileSplit(new Path(tempFile.getFile().getAbsolutePath()), 0, tempFile.getFile().length(), (String[]) null), configuration, NULL);
K key = recordReader.createKey();
V value = recordReader.createValue();
StructObjectInspector rowInspector = (StructObjectInspector) deserializer.getObjectInspector();
StructField field = rowInspector.getStructFieldRef("test");
Iterator<?> iterator = expectedValues.iterator();
while (recordReader.next(key, value)) {
Object expectedValue = iterator.next();
Object rowData = deserializer.deserialize(value);
Object actualValue = rowInspector.getStructFieldData(rowData, field);
actualValue = decodeRecordReaderValue(type, actualValue);
assertColumnValueEquals(type, actualValue, expectedValue);
}
assertFalse(iterator.hasNext());
}
use of org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_ALL_COLUMNS in project presto by prestodb.
the class OrcTester method assertFileContentsDwrfHive.
private static void assertFileContentsDwrfHive(List<Type> types, TempFile tempFile, List<List<?>> expectedValues) throws Exception {
JobConf configuration = new JobConf(new Configuration(false));
configuration.set(READ_COLUMN_IDS_CONF_STR, "0");
configuration.setBoolean(READ_ALL_COLUMNS, false);
Path path = new Path(tempFile.getFile().getAbsolutePath());
com.facebook.hive.orc.Reader reader = com.facebook.hive.orc.OrcFile.createReader(path.getFileSystem(configuration), path, configuration);
boolean[] include = new boolean[reader.getTypes().size() + 100000];
Arrays.fill(include, true);
com.facebook.hive.orc.RecordReader recordReader = reader.rows(include);
StructObjectInspector rowInspector = (StructObjectInspector) reader.getObjectInspector();
List<StructField> fields = makeColumnNames(types.size()).stream().map(rowInspector::getStructFieldRef).collect(toList());
Object rowData = null;
int rowCount = 0;
while (recordReader.hasNext()) {
rowData = recordReader.next(rowData);
for (int i = 0; i < fields.size(); i++) {
Object actualValue = rowInspector.getStructFieldData(rowData, fields.get(i));
actualValue = decodeRecordReaderValue(types.get(i), actualValue);
assertColumnValueEquals(types.get(i), actualValue, expectedValues.get(i).get(rowCount));
}
rowCount++;
}
assertEquals(rowCount, expectedValues.get(0).size());
}
use of org.apache.hadoop.hive.serde2.ColumnProjectionUtils.READ_ALL_COLUMNS in project presto by prestodb.
the class OrcTester method assertFileContentsOrcHive.
private static void assertFileContentsOrcHive(List<Type> types, TempFile tempFile, List<List<?>> expectedValues) throws Exception {
JobConf configuration = new JobConf(new Configuration(false));
configuration.set(READ_COLUMN_IDS_CONF_STR, "0");
configuration.setBoolean(READ_ALL_COLUMNS, false);
Reader reader = OrcFile.createReader(new Path(tempFile.getFile().getAbsolutePath()), new ReaderOptions(configuration));
org.apache.hadoop.hive.ql.io.orc.RecordReader recordReader = reader.rows();
StructObjectInspector rowInspector = (StructObjectInspector) reader.getObjectInspector();
List<StructField> fields = makeColumnNames(types.size()).stream().map(rowInspector::getStructFieldRef).collect(toList());
Object rowData = null;
int rowCount = 0;
while (recordReader.hasNext()) {
rowData = recordReader.next(rowData);
for (int i = 0; i < fields.size(); i++) {
Object actualValue = rowInspector.getStructFieldData(rowData, fields.get(i));
actualValue = decodeRecordReaderValue(types.get(i), actualValue);
assertColumnValueEquals(types.get(i), actualValue, expectedValues.get(i).get(rowCount));
}
rowCount++;
}
assertEquals(rowCount, expectedValues.get(0).size());
}
Aggregations