use of io.prestosql.spi.type.VarcharType.VARCHAR in project hetu-core by openlookeng.
the class TestExpressionRewriteRuleSet method testAggregationExpressionRewrite.
@Test
public void testAggregationExpressionRewrite() {
ExpressionRewriteRuleSet functionCallRewriter = new ExpressionRewriteRuleSet((expression, context) -> new FunctionCallBuilder(tester().getMetadata()).setName(QualifiedName.of("count")).addArgument(VARCHAR, new SymbolReference("y")).build());
tester().assertThat(functionCallRewriter.aggregationExpressionRewrite()).on(p -> p.aggregation(a -> a.globalGrouping().addAggregation(p.symbol("count_1", BigintType.BIGINT), new FunctionCallBuilder(tester().getMetadata()).setName(QualifiedName.of("count")).addArgument(VARCHAR, new SymbolReference("x")).build(), ImmutableList.of(BigintType.BIGINT)).source(p.values(p.symbol("x"), p.symbol("y"))))).matches(PlanMatchPattern.aggregation(ImmutableMap.of("count_1", aliases -> new FunctionCallBuilder(tester().getMetadata()).setName(QualifiedName.of("count")).addArgument(VARCHAR, new SymbolReference("y")).build()), values("x", "y")));
}
use of io.prestosql.spi.type.VarcharType.VARCHAR in project hetu-core by openlookeng.
the class TestHashJoinOperator method testProbeOuterJoinWithFilterFunction.
@Test(dataProvider = "hashJoinTestValues")
public void testProbeOuterJoinWithFilterFunction(boolean parallelBuild, boolean probeHashEnabled, boolean buildHashEnabled) {
TaskContext taskContext = createTaskContext();
InternalJoinFilterFunction filterFunction = new TestInternalJoinFilterFunction(((leftPosition, leftPage, rightPosition, rightPage) -> BIGINT.getLong(rightPage.getBlock(1), rightPosition) >= 1025));
// build factory
List<Type> buildTypes = ImmutableList.of(VARCHAR, BIGINT, BIGINT);
RowPagesBuilder buildPages = rowPagesBuilder(buildHashEnabled, Ints.asList(0), ImmutableList.of(VARCHAR, BIGINT, BIGINT)).addSequencePage(10, 20, 30, 40);
BuildSideSetup buildSideSetup = setupBuildSide(parallelBuild, taskContext, Ints.asList(0), buildPages, Optional.of(filterFunction), false, SINGLE_STREAM_SPILLER_FACTORY);
JoinBridgeManager<PartitionedLookupSourceFactory> lookupSourceFactory = buildSideSetup.getLookupSourceFactoryManager();
// probe factory
List<Type> probeTypes = ImmutableList.of(VARCHAR, BIGINT, BIGINT);
RowPagesBuilder probePages = rowPagesBuilder(probeHashEnabled, Ints.asList(0), probeTypes);
List<Page> probeInput = probePages.addSequencePage(15, 20, 1020, 2020).build();
OperatorFactory joinOperatorFactory = probeOuterJoinOperatorFactory(lookupSourceFactory, probePages);
// build drivers and operators
instantiateBuildDrivers(buildSideSetup, taskContext);
buildLookupSource(buildSideSetup);
// expected
MaterializedResult expected = MaterializedResult.resultBuilder(taskContext.getSession(), concat(probeTypes, buildTypes)).row("20", 1020L, 2020L, null, null, null).row("21", 1021L, 2021L, null, null, null).row("22", 1022L, 2022L, null, null, null).row("23", 1023L, 2023L, null, null, null).row("24", 1024L, 2024L, null, null, null).row("25", 1025L, 2025L, "25", 35L, 45L).row("26", 1026L, 2026L, "26", 36L, 46L).row("27", 1027L, 2027L, "27", 37L, 47L).row("28", 1028L, 2028L, "28", 38L, 48L).row("29", 1029L, 2029L, "29", 39L, 49L).row("30", 1030L, 2030L, null, null, null).row("31", 1031L, 2031L, null, null, null).row("32", 1032L, 2032L, null, null, null).row("33", 1033L, 2033L, null, null, null).row("34", 1034L, 2034L, null, null, null).build();
assertOperatorEquals(joinOperatorFactory, taskContext.addPipelineContext(0, true, true, false).addDriverContext(), probeInput, expected, true, getHashChannels(probePages, buildPages));
}
use of io.prestosql.spi.type.VarcharType.VARCHAR in project hetu-core by openlookeng.
the class TestHashJoinOperator method testOuterJoinSnapshot.
@Test(dataProvider = "hashJoinTestValues")
public void testOuterJoinSnapshot(boolean parallelBuild, boolean probeHashEnabled, boolean buildHashEnabled) throws ExecutionException, InterruptedException {
TaskContext taskContext = createSnapshotTaskContext();
// build factory
List<Type> buildTypes = ImmutableList.of(VARCHAR, BIGINT, BIGINT);
RowPagesBuilder buildPages = rowPagesBuilder(buildHashEnabled, Ints.asList(0), ImmutableList.of(VARCHAR, BIGINT, BIGINT)).addSequencePage(10, 20, 30, 40);
BuildSideSetup buildSideSetup = setupBuildSide(parallelBuild, taskContext, Ints.asList(0), buildPages, Optional.empty(), false, SINGLE_STREAM_SPILLER_FACTORY, true);
JoinBridgeManager<PartitionedLookupSourceFactory> lookupSourceFactory = buildSideSetup.getLookupSourceFactoryManager();
// probe factory
List<Type> probeTypes = ImmutableList.of(VARCHAR, BIGINT, BIGINT);
RowPagesBuilder probePages = rowPagesBuilder(probeHashEnabled, Ints.asList(0), probeTypes);
List<Page> probeInput = probePages.addSequencePage(10, 15, 1020, 2020).build();
// Add markers between pages
List<Page> withMarkers = new ArrayList<>();
long snapshotId = 1;
for (Page page : probeInput) {
withMarkers.add(page);
withMarkers.add(MarkerPage.snapshotPage(snapshotId++));
}
probeInput = withMarkers;
OperatorFactory joinOperatorFactory = new LookupJoinOperators().fullOuterJoin(0, new PlanNodeId("test"), lookupSourceFactory, probePages.getTypes(), Ints.asList(0), getHashChannelAsInt(probePages), Optional.empty(), OptionalInt.of(1), PARTITIONING_SPILLER_FACTORY);
// build drivers and operators
instantiateBuildDrivers(buildSideSetup, taskContext);
buildLookupSource(buildSideSetup);
// Construct lookup-outer operator
PipelineContext buildPipeline = taskContext.addPipelineContext(2, false, true, false);
DriverContext outerDriverContext = buildPipeline.addDriverContext(Lifespan.taskWide(), 0);
Operator lookupOuter = ((LookupJoinOperatorFactory) joinOperatorFactory).createOuterOperatorFactory().get().getOuterOperatorFactory().createOperator(outerDriverContext);
assertFalse(lookupOuter.isBlocked().isDone());
// expected
MaterializedResult expected = MaterializedResult.resultBuilder(taskContext.getSession(), concat(probeTypes, buildTypes)).row("15", 1020L, 2020L, null, null, null).row("16", 1021L, 2021L, null, null, null).row("17", 1022L, 2022L, null, null, null).row("18", 1023L, 2023L, null, null, null).row("19", 1024L, 2024L, null, null, null).row("20", 1025L, 2025L, "20", 30L, 40L).row("21", 1026L, 2026L, "21", 31L, 41L).row("22", 1027L, 2027L, "22", 32L, 42L).row("23", 1028L, 2028L, "23", 33L, 43L).row("24", 1029L, 2029L, "24", 34L, 44L).build();
List<Integer> hashChannels = getHashChannels(probePages, buildPages);
assertOperatorEquals(joinOperatorFactory, taskContext.addPipelineContext(0, true, true, false).addDriverContext(), probeInput, expected, true, hashChannels);
assertTrue(lookupOuter.isBlocked().isDone());
assertTrue(lookupOuter.getOutput() instanceof MarkerPage);
Object state = lookupOuter.capture(null);
lookupOuter.isBlocked().get();
Page page = lookupOuter.getOutput();
page = dropChannel(ImmutableList.of(page), hashChannels).get(0);
MaterializedResult outerExpected = MaterializedResult.resultBuilder(taskContext.getSession(), concat(probeTypes, buildTypes)).row(null, null, null, "25", 35L, 45L).row(null, null, null, "26", 36L, 46L).row(null, null, null, "27", 37L, 47L).row(null, null, null, "28", 38L, 48L).row(null, null, null, "29", 39L, 49L).build();
List<MaterializedRow> rows = MaterializedResult.resultBuilder(taskContext.getSession(), concat(probeTypes, buildTypes)).page(page).build().getMaterializedRows();
rows = new ArrayList<>(rows);
rows.sort(Comparator.comparing(a -> (String) a.getField(3)));
MaterializedResult outerResult = MaterializedResult.resultBuilder(taskContext.getSession(), concat(probeTypes, buildTypes)).rows(rows).build();
assertEquals(outerResult, outerExpected);
long matched;
if (state instanceof boolean[]) {
boolean[] positions = (boolean[]) state;
matched = Booleans.asList(positions).stream().filter(e -> e).count();
} else {
ByteBuffer bb = ByteBuffer.wrap((byte[]) state);
List<RoaringBitmap> visitedPositions = new ArrayList<>();
for (int i = 0; i < (parallelBuild ? PARTITION_COUNT : 1); i++) {
ImmutableRoaringBitmap bm = new ImmutableRoaringBitmap(bb);
visitedPositions.add(new RoaringBitmap(bm));
bb.position(bb.position() + visitedPositions.get(i).serializedSizeInBytes());
}
matched = visitedPositions.stream().mapToLong(rr -> rr.getCardinality()).sum();
}
assertEquals(matched, 5);
}
use of io.prestosql.spi.type.VarcharType.VARCHAR in project hetu-core by openlookeng.
the class TestHashJoinOperator method testOuterJoinWithNullProbeAndFilterFunction.
@Test(dataProvider = "hashJoinTestValues")
public void testOuterJoinWithNullProbeAndFilterFunction(boolean parallelBuild, boolean probeHashEnabled, boolean buildHashEnabled) {
TaskContext taskContext = createTaskContext();
InternalJoinFilterFunction filterFunction = new TestInternalJoinFilterFunction(((leftPosition, leftPage, rightPosition, rightPage) -> VARCHAR.getSlice(rightPage.getBlock(0), rightPosition).toStringAscii().equals("a")));
// build factory
List<Type> buildTypes = ImmutableList.of(VARCHAR);
RowPagesBuilder buildPages = rowPagesBuilder(buildHashEnabled, Ints.asList(0), buildTypes).row("a").row("b").row("c");
BuildSideSetup buildSideSetup = setupBuildSide(parallelBuild, taskContext, Ints.asList(0), buildPages, Optional.of(filterFunction), false, SINGLE_STREAM_SPILLER_FACTORY);
JoinBridgeManager<PartitionedLookupSourceFactory> lookupSourceFactory = buildSideSetup.getLookupSourceFactoryManager();
// probe factory
List<Type> probeTypes = ImmutableList.of(VARCHAR);
RowPagesBuilder probePages = rowPagesBuilder(probeHashEnabled, Ints.asList(0), probeTypes);
List<Page> probeInput = probePages.row("a").row((String) null).row((String) null).row("a").row("b").build();
OperatorFactory joinOperatorFactory = probeOuterJoinOperatorFactory(lookupSourceFactory, probePages);
// build drivers and operators
instantiateBuildDrivers(buildSideSetup, taskContext);
buildLookupSource(buildSideSetup);
// expected
MaterializedResult expected = MaterializedResult.resultBuilder(taskContext.getSession(), concat(probeTypes, buildTypes)).row("a", "a").row(null, null).row(null, null).row("a", "a").row("b", null).build();
assertOperatorEquals(joinOperatorFactory, taskContext.addPipelineContext(0, true, true, false).addDriverContext(), probeInput, expected, true, getHashChannels(probePages, buildPages));
}
use of io.prestosql.spi.type.VarcharType.VARCHAR in project hetu-core by openlookeng.
the class ElasticsearchMetadata method toPrestoType.
private Type toPrestoType(IndexMetadata.Field metaDataField, boolean isArray) {
IndexMetadata.Type type = metaDataField.getType();
if (isArray) {
Type elementType = toPrestoType(metaDataField, false);
return new ArrayType(elementType);
}
if (type instanceof PrimitiveType) {
switch(((PrimitiveType) type).getName()) {
case "float":
return REAL;
case "double":
return DOUBLE;
case "byte":
return TINYINT;
case "short":
return SMALLINT;
case "integer":
return INTEGER;
case "long":
return BIGINT;
case "string":
case "text":
case "keyword":
return VARCHAR;
case "ip":
return ipAddressType;
case "boolean":
return BOOLEAN;
case "binary":
return VARBINARY;
default:
break;
}
} else if (type instanceof DateTimeType) {
if (((DateTimeType) type).getFormats().isEmpty()) {
return TIMESTAMP;
}
// otherwise, skip -- we don't support custom formats, yet
} else if (type instanceof ObjectType) {
ObjectType objectType = (ObjectType) type;
List<RowType.Field> fields = objectType.getFields().stream().map(field -> RowType.field(field.getName(), toPrestoType(field))).collect(toImmutableList());
return RowType.from(fields);
}
return null;
}
Aggregations