use of io.prestosql.plugin.hive.HiveCompressionCodec.NONE in project boostkit-bigdata by kunpengcompute.
the class TestHiveIntegrationSmokeTest method createAndGetSplits.
private Set<Split> createAndGetSplits(long start) {
HiveConfig config = new HiveConfig();
config.setHiveStorageFormat(HiveStorageFormat.ORC);
config.setHiveCompressionCodec(NONE);
Properties splitProperties = new Properties();
splitProperties.setProperty(FILE_INPUT_FORMAT, config.getHiveStorageFormat().getInputFormat());
splitProperties.setProperty(SERIALIZATION_LIB, config.getHiveStorageFormat().getSerDe());
splitProperties.setProperty("columns", Joiner.on(',').join(TestHivePageSink.getColumnHandles().stream().map(HiveColumnHandle::getName).collect(toList())));
splitProperties.setProperty("columns.types", Joiner.on(',').join(TestHivePageSink.getColumnHandles().stream().map(HiveColumnHandle::getHiveType).map(hiveType -> hiveType.getHiveTypeName().toString()).collect(toList())));
List<ConnectorSplit> connectorSplits1 = new ArrayList<>();
for (long j = start; j < start + 30; j += 10) {
List<HiveSplit> hiveSplitList = new ArrayList<>();
for (int i = 0; i < 3; i++) {
HiveSplit hiveSplit = new HiveSplit(TEST_SCHEMA, TEST_TABLE, "", "file:///", i + j, 100 + i + j, 100 + i + j, 0, splitProperties, ImmutableList.of(), ImmutableList.of(), OptionalInt.empty(), false, ImmutableMap.of(), Optional.empty(), false, Optional.empty(), Optional.empty(), false, ImmutableMap.of());
hiveSplitList.add(hiveSplit);
}
HiveSplitWrapper split2 = HiveSplitWrapper.wrap(hiveSplitList, OptionalInt.empty());
connectorSplits1.add(split2);
}
ImmutableList.Builder<Split> result = ImmutableList.builder();
for (ConnectorSplit connectorSplit : connectorSplits1) {
result.add(new Split(CONNECTOR_ID, connectorSplit, Lifespan.taskWide()));
}
List<Split> splitList = result.build();
Set<Split> set = splitList.stream().collect(Collectors.toSet());
return set;
}
use of io.prestosql.plugin.hive.HiveCompressionCodec.NONE in project hetu-core by openlookeng.
the class TestHiveIntegrationSmokeTest method createAndGetSplits.
private Set<Split> createAndGetSplits(long start) {
HiveConfig config = new HiveConfig();
config.setHiveStorageFormat(HiveStorageFormat.ORC);
config.setHiveCompressionCodec(NONE);
Properties splitProperties = new Properties();
splitProperties.setProperty(FILE_INPUT_FORMAT, config.getHiveStorageFormat().getInputFormat());
splitProperties.setProperty(SERIALIZATION_LIB, config.getHiveStorageFormat().getSerDe());
splitProperties.setProperty("columns", Joiner.on(',').join(TestHivePageSink.getColumnHandles().stream().map(HiveColumnHandle::getName).collect(toList())));
splitProperties.setProperty("columns.types", Joiner.on(',').join(TestHivePageSink.getColumnHandles().stream().map(HiveColumnHandle::getHiveType).map(hiveType -> hiveType.getHiveTypeName().toString()).collect(toList())));
List<ConnectorSplit> connectorSplits1 = new ArrayList<>();
for (long j = start; j < start + 30; j += 10) {
List<HiveSplit> hiveSplitList = new ArrayList<>();
for (int i = 0; i < 3; i++) {
HiveSplit hiveSplit = new HiveSplit(TEST_SCHEMA, TEST_TABLE, "", "file:///", i + j, 100 + i + j, 100 + i + j, 0, splitProperties, ImmutableList.of(), ImmutableList.of(), OptionalInt.empty(), false, ImmutableMap.of(), Optional.empty(), false, Optional.empty(), Optional.empty(), false, ImmutableMap.of());
hiveSplitList.add(hiveSplit);
}
HiveSplitWrapper split2 = HiveSplitWrapper.wrap(hiveSplitList, OptionalInt.empty());
connectorSplits1.add(split2);
}
ImmutableList.Builder<Split> result = ImmutableList.builder();
for (ConnectorSplit connectorSplit : connectorSplits1) {
result.add(new Split(CONNECTOR_ID, connectorSplit, Lifespan.taskWide()));
}
List<Split> splitList = result.build();
Set<Split> set = splitList.stream().collect(Collectors.toSet());
return set;
}
Aggregations