use of io.prestosql.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT in project hetu-core by openlookeng.
the class TestHiveFileFormats method testSequenceFile.
@Test(dataProvider = "rowCount")
public void testSequenceFile(int rowCount) throws Exception {
List<TestColumn> testColumns = TEST_COLUMNS.stream().filter(column -> !column.getName().equals("t_map_null_key_complex_key_value")).collect(toList());
assertThatFileFormat(SEQUENCEFILE).withColumns(testColumns).withRowsCount(rowCount).isReadableByRecordCursor(new GenericHiveRecordCursorProvider(HDFS_ENVIRONMENT));
}
use of io.prestosql.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT in project hetu-core by openlookeng.
the class TestHiveFileFormats method testRCText.
@Test(dataProvider = "rowCount")
public void testRCText(int rowCount) throws Exception {
List<TestColumn> testColumns = ImmutableList.copyOf(filter(TEST_COLUMNS, testColumn -> {
// RC file does not support complex type as key of a map
return !testColumn.getName().equals("t_struct_null") && !testColumn.getName().equals("t_map_null_key_complex_key_value");
}));
assertThatFileFormat(RCTEXT).withColumns(testColumns).withRowsCount(rowCount).isReadableByRecordCursor(new GenericHiveRecordCursorProvider(HDFS_ENVIRONMENT));
}
use of io.prestosql.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT in project hetu-core by openlookeng.
the class TestHiveFileFormats method testCsvFile.
@Test(dataProvider = "rowCount")
public void testCsvFile(int rowCount) throws Exception {
List<TestColumn> testColumns = TEST_COLUMNS.stream().filter(column -> column.isPartitionKey() || ("string".equals(column.getType()) && !column.getName().contains("_null_"))).collect(toImmutableList());
assertTrue(testColumns.size() > 5);
assertThatFileFormat(CSV).withColumns(testColumns).withRowsCount(rowCount).isReadableByRecordCursor(new GenericHiveRecordCursorProvider(HDFS_ENVIRONMENT));
}
use of io.prestosql.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT in project hetu-core by openlookeng.
the class TestHiveFileFormats method testJson.
@Test(dataProvider = "rowCount")
public void testJson(int rowCount) throws Exception {
List<TestColumn> testColumns = TEST_COLUMNS.stream().filter(column -> !column.getName().equals("t_binary")).filter(column -> !column.getName().equals("t_map_tinyint")).filter(column -> !column.getName().equals("t_map_smallint")).filter(column -> !column.getName().equals("t_map_int")).filter(column -> !column.getName().equals("t_map_bigint")).filter(column -> !column.getName().equals("t_map_float")).filter(column -> !column.getName().equals("t_map_double")).filter(TestHiveFileFormats::withoutNullMapKeyTests).filter(column -> !column.getName().equals("t_decimal_precision_38")).filter(column -> !column.getName().equals("t_map_decimal_precision_38")).filter(column -> !column.getName().equals("t_array_decimal_precision_38")).collect(toList());
assertThatFileFormat(JSON).withColumns(testColumns).withRowsCount(rowCount).isReadableByRecordCursor(new GenericHiveRecordCursorProvider(HDFS_ENVIRONMENT));
}
use of io.prestosql.plugin.hive.HiveTestUtils.HDFS_ENVIRONMENT in project boostkit-bigdata by kunpengcompute.
the class TestHiveFileFormats method testSequenceFile.
@Test(dataProvider = "rowCount")
public void testSequenceFile(int rowCount) throws Exception {
List<TestColumn> testColumns = TEST_COLUMNS.stream().filter(column -> !column.getName().equals("t_map_null_key_complex_key_value")).collect(toList());
assertThatFileFormat(SEQUENCEFILE).withColumns(testColumns).withRowsCount(rowCount).isReadableByRecordCursor(new GenericHiveRecordCursorProvider(HDFS_ENVIRONMENT));
}
Aggregations