use of org.apache.flink.shaded.guava30.com.google.common.io.Files in project presto by prestodb.
the class TestHivePageSink method writeTestFile.
private static long writeTestFile(HiveClientConfig config, MetastoreClientConfig metastoreClientConfig, ExtendedHiveMetastore metastore, String outputPath) {
HiveTransactionHandle transaction = new HiveTransactionHandle();
HiveWriterStats stats = new HiveWriterStats();
ConnectorPageSink pageSink = createPageSink(transaction, config, metastoreClientConfig, metastore, new Path("file:///" + outputPath), stats);
List<LineItemColumn> columns = getTestColumns();
List<Type> columnTypes = columns.stream().map(LineItemColumn::getType).map(TestHivePageSink::getHiveType).map(hiveType -> hiveType.getType(FUNCTION_AND_TYPE_MANAGER)).collect(toList());
PageBuilder pageBuilder = new PageBuilder(columnTypes);
int rows = 0;
for (LineItem lineItem : new LineItemGenerator(0.01, 1, 1)) {
rows++;
if (rows >= NUM_ROWS) {
break;
}
pageBuilder.declarePosition();
for (int i = 0; i < columns.size(); i++) {
LineItemColumn column = columns.get(i);
BlockBuilder blockBuilder = pageBuilder.getBlockBuilder(i);
switch(column.getType().getBase()) {
case IDENTIFIER:
BIGINT.writeLong(blockBuilder, column.getIdentifier(lineItem));
break;
case INTEGER:
INTEGER.writeLong(blockBuilder, column.getInteger(lineItem));
break;
case DATE:
DATE.writeLong(blockBuilder, column.getDate(lineItem));
break;
case DOUBLE:
DOUBLE.writeDouble(blockBuilder, column.getDouble(lineItem));
break;
case VARCHAR:
createUnboundedVarcharType().writeSlice(blockBuilder, Slices.utf8Slice(column.getString(lineItem)));
break;
default:
throw new IllegalArgumentException("Unsupported type " + column.getType());
}
}
}
Page page = pageBuilder.build();
pageSink.appendPage(page);
getFutureValue(pageSink.finish());
File outputDir = new File(outputPath);
List<File> files = ImmutableList.copyOf(outputDir.listFiles((dir, name) -> !name.endsWith(".crc")));
File outputFile = getOnlyElement(files);
long length = outputFile.length();
ConnectorPageSource pageSource = createPageSource(transaction, config, metastoreClientConfig, outputFile);
List<Page> pages = new ArrayList<>();
while (!pageSource.isFinished()) {
Page nextPage = pageSource.getNextPage();
if (nextPage != null) {
pages.add(nextPage.getLoadedPage());
}
}
MaterializedResult expectedResults = toMaterializedResult(getSession(config), columnTypes, ImmutableList.of(page));
MaterializedResult results = toMaterializedResult(getSession(config), columnTypes, pages);
assertEquals(results, expectedResults);
assertEquals(stats.getInputPageSizeInBytes().getAllTime().getMax(), page.getRetainedSizeInBytes());
return length;
}
use of org.apache.flink.shaded.guava30.com.google.common.io.Files in project incubator-gobblin by apache.
the class FsSpecProducerTest method testAddSpec.
@Test
public void testAddSpec() throws URISyntaxException, ExecutionException, InterruptedException, IOException {
this._fsSpecProducer.addSpec(createTestJobSpec());
// Add some random files(with non-avro extension name) into the folder observed by consumer, they shall not be picked up.
File randomFile = new File(workDir, "random");
Assert.assertTrue(randomFile.createNewFile());
randomFile.deleteOnExit();
List<Pair<SpecExecutor.Verb, Spec>> jobSpecs = this._fsSpecConsumer.changedSpecs().get();
Assert.assertEquals(jobSpecs.size(), 1);
Assert.assertEquals(jobSpecs.get(0).getLeft(), SpecExecutor.Verb.ADD);
Assert.assertEquals(jobSpecs.get(0).getRight().getUri().toString(), "testJob");
Assert.assertEquals(((JobSpec) jobSpecs.get(0).getRight()).getConfig().getString("key1"), "val1");
Assert.assertEquals(((JobSpec) jobSpecs.get(0).getRight()).getConfig().getString("key2"), "val2");
Assert.assertEquals(((JobSpec) jobSpecs.get(0).getRight()).getConfig().getString("key3.1" + ConfigUtils.STRIP_SUFFIX), "val3");
Assert.assertEquals(((JobSpec) jobSpecs.get(0).getRight()).getConfig().getString("key3.1.1"), "val4");
jobSpecs.clear();
// If there are other jobSpec in .avro files added by testSpecProducer, they shall still be found.
this._fsSpecProducer.addSpec(createTestJobSpec("newTestJob"));
jobSpecs = this._fsSpecConsumer.changedSpecs().get();
Assert.assertEquals(jobSpecs.size(), 2);
Assert.assertEquals(jobSpecs.get(0).getLeft(), SpecExecutor.Verb.ADD);
Assert.assertEquals(jobSpecs.get(1).getLeft(), SpecExecutor.Verb.ADD);
List<String> uriList = jobSpecs.stream().map(s -> s.getRight().getUri().toString()).collect(Collectors.toList());
Assert.assertTrue(uriList.contains("testJob"));
Assert.assertTrue(uriList.contains("newTestJob"));
}
Aggregations