use of io.prestosql.sql.tree.Cast in project hetu-core by openlookeng.
the class TestScalarStatsCalculator method testFunctionCall.
@Test
public void testFunctionCall() {
assertCalculate(new FunctionCallBuilder(metadata).setName(QualifiedName.of("length")).addArgument(createVarcharType(10), new Cast(new NullLiteral(), "VARCHAR(10)")).build()).distinctValuesCount(0.0).lowValueUnknown().highValueUnknown().nullsFraction(1.0);
assertCalculate(new FunctionCallBuilder(metadata).setName(QualifiedName.of("length")).addArgument(createVarcharType(2), new SymbolReference("x")).build(), PlanNodeStatsEstimate.unknown(), TypeProvider.viewOf(ImmutableMap.of(new Symbol("x"), createVarcharType(2)))).distinctValuesCountUnknown().lowValueUnknown().highValueUnknown().nullsFractionUnknown();
}
use of io.prestosql.sql.tree.Cast in project hetu-core by openlookeng.
the class TestCostCalculator method testProject.
@Test
public void testProject() {
TableScanNode tableScan = tableScan("ts", "orderkey");
PlanNode project = project("project", tableScan, "string", new Cast(new SymbolReference("orderkey"), "STRING"));
Map<String, PlanCostEstimate> costs = ImmutableMap.of("ts", cpuCost(1000));
Map<String, PlanNodeStatsEstimate> stats = ImmutableMap.of("project", statsEstimate(project, 4000), "ts", statsEstimate(tableScan, 1000));
Map<String, Type> types = ImmutableMap.of("orderkey", BIGINT, "string", VARCHAR);
assertCost(project, costs, stats, types).cpu(1000 + 4000 * OFFSET_AND_IS_NULL_OVERHEAD).memory(0).network(0);
assertCostEstimatedExchanges(project, costs, stats, types).cpu(1000 + 4000 * OFFSET_AND_IS_NULL_OVERHEAD).memory(0).network(0);
assertCostFragmentedPlan(project, costs, stats, types).cpu(1000 + 4000 * OFFSET_AND_IS_NULL_OVERHEAD).memory(0).network(0);
assertCostHasUnknownComponentsForUnknownStats(project, types);
}
use of io.prestosql.sql.tree.Cast in project hetu-core by openlookeng.
the class TestComparisonStatsCalculator method symbolToCastExpressionNotEqual.
@Test
public void symbolToCastExpressionNotEqual() {
double rowCount = 807.3;
assertCalculate(new ComparisonExpression(NOT_EQUAL, new SymbolReference("u"), new Cast(new SymbolReference("w"), BIGINT))).outputRowsCount(rowCount).symbolStats("u", equalTo(capNDV(zeroNullsFraction(uStats), rowCount))).symbolStats("w", equalTo(capNDV(wStats, rowCount))).symbolStats("z", equalTo(capNDV(zStats, rowCount)));
rowCount = 897.0;
assertCalculate(new ComparisonExpression(NOT_EQUAL, new SymbolReference("u"), new Cast(new LongLiteral("10"), BIGINT))).outputRowsCount(rowCount).symbolStats("u", equalTo(capNDV(updateNDV(zeroNullsFraction(uStats), -1), rowCount))).symbolStats("z", equalTo(capNDV(zStats, rowCount)));
}
use of io.prestosql.sql.tree.Cast in project hetu-core by openlookeng.
the class TestScalarStatsCalculator method testCastBigintToDouble.
@Test
public void testCastBigintToDouble() {
PlanNodeStatsEstimate inputStatistics = PlanNodeStatsEstimate.builder().addSymbolStatistics(new Symbol("a"), SymbolStatsEstimate.builder().setNullsFraction(0.3).setLowValue(2.0).setHighValue(10.0).setDistinctValuesCount(4).setAverageRowSize(2.0).build()).build();
assertCalculate(new Cast(new SymbolReference("a"), "double"), inputStatistics).lowValue(2.0).highValue(10.0).distinctValuesCount(4).nullsFraction(0.3).dataSizeUnknown();
}
use of io.prestosql.sql.tree.Cast in project hetu-core by openlookeng.
the class TransformCorrelatedScalarSubquery method apply.
@Override
public Result apply(LateralJoinNode lateralJoinNode, Captures captures, Context context) {
PlanNode subquery = context.getLookup().resolve(lateralJoinNode.getSubquery());
if (!searchFrom(subquery, context.getLookup()).where(EnforceSingleRowNode.class::isInstance).recurseOnlyWhen(ProjectNode.class::isInstance).matches()) {
return Result.empty();
}
PlanNode rewrittenSubquery = searchFrom(subquery, context.getLookup()).where(EnforceSingleRowNode.class::isInstance).recurseOnlyWhen(ProjectNode.class::isInstance).removeFirst();
Range<Long> subqueryCardinality = extractCardinality(rewrittenSubquery, context.getLookup());
boolean producesAtMostOneRow = Range.closed(0L, 1L).encloses(subqueryCardinality);
if (producesAtMostOneRow) {
boolean producesSingleRow = Range.singleton(1L).encloses(subqueryCardinality);
return Result.ofPlanNode(new LateralJoinNode(context.getIdAllocator().getNextId(), lateralJoinNode.getInput(), rewrittenSubquery, lateralJoinNode.getCorrelation(), producesSingleRow ? lateralJoinNode.getType() : LEFT, lateralJoinNode.getFilter(), lateralJoinNode.getOriginSubquery()));
}
Symbol unique = context.getSymbolAllocator().newSymbol("unique", BigintType.BIGINT);
LateralJoinNode rewrittenLateralJoinNode = new LateralJoinNode(context.getIdAllocator().getNextId(), new AssignUniqueId(context.getIdAllocator().getNextId(), lateralJoinNode.getInput(), unique), rewrittenSubquery, lateralJoinNode.getCorrelation(), LEFT, lateralJoinNode.getFilter(), lateralJoinNode.getOriginSubquery());
Symbol isDistinct = context.getSymbolAllocator().newSymbol("is_distinct", BooleanType.BOOLEAN);
MarkDistinctNode markDistinctNode = new MarkDistinctNode(context.getIdAllocator().getNextId(), rewrittenLateralJoinNode, isDistinct, rewrittenLateralJoinNode.getInput().getOutputSymbols(), Optional.empty());
FilterNode filterNode = new FilterNode(context.getIdAllocator().getNextId(), markDistinctNode, castToRowExpression(new SimpleCaseExpression(toSymbolReference(isDistinct), ImmutableList.of(new WhenClause(TRUE_LITERAL, TRUE_LITERAL)), Optional.of(new Cast(new FunctionCallBuilder(metadata).setName(QualifiedName.of("fail")).addArgument(INTEGER, new LongLiteral(Integer.toString(SUBQUERY_MULTIPLE_ROWS.toErrorCode().getCode()))).addArgument(VARCHAR, new StringLiteral("Scalar sub-query has returned multiple rows")).build(), BOOLEAN)))));
return Result.ofPlanNode(new ProjectNode(context.getIdAllocator().getNextId(), filterNode, AssignmentUtils.identityAsSymbolReferences((lateralJoinNode.getOutputSymbols()))));
}
Aggregations