use of io.prestosql.sql.tree.FunctionCall in project hetu-core by openlookeng.
the class ExpressionInterpreter method createFailureFunction.
private static Expression createFailureFunction(RuntimeException exception, Type type, Metadata metadata) {
requireNonNull(exception, "Exception is null");
String failureInfo = JsonCodec.jsonCodec(FailureInfo.class).toJson(Failures.toFailure(exception).toFailureInfo());
FunctionCall jsonParse = new FunctionCallBuilder(metadata).setName(QualifiedName.of("json_parse")).addArgument(VARCHAR, new StringLiteral(failureInfo)).build();
FunctionCall failureFunction = new FunctionCallBuilder(metadata).setName(QualifiedName.of("fail")).addArgument(JSON, jsonParse).build();
return new Cast(failureFunction, type.getTypeSignature().toString());
}
use of io.prestosql.sql.tree.FunctionCall in project hetu-core by openlookeng.
the class ImpalaAstBuilder method visitFunctionCall.
@Override
public Node visitFunctionCall(ImpalaSqlParser.FunctionCallContext context) {
Optional<Expression> filter = visitIfPresent(context.filter(), Expression.class);
Optional<Window> window = visitIfPresent(context.over(), Window.class);
Optional<OrderBy> orderBy = Optional.empty();
if (context.ORDER() != null) {
orderBy = Optional.of(new OrderBy(visit(context.sortItem(), SortItem.class)));
}
QualifiedName name = getQualifiedName(context.qualifiedName());
boolean distinct = isDistinct(context.setQuantifier());
if (name.toString().equalsIgnoreCase("if")) {
check(context.expression().size() == 2 || context.expression().size() == 3, "Invalid number of arguments for 'if' function", context);
check(!window.isPresent(), "OVER clause not valid for 'if' function", context);
check(!distinct, "DISTINCT not valid for 'if' function", context);
check(!filter.isPresent(), "FILTER not valid for 'if' function", context);
Expression elseExpression = null;
if (context.expression().size() == 3) {
elseExpression = (Expression) visit(context.expression(2));
}
return new IfExpression(getLocation(context), (Expression) visit(context.expression(0)), (Expression) visit(context.expression(1)), elseExpression);
}
if (name.toString().equalsIgnoreCase("nullif")) {
check(context.expression().size() == 2, "Invalid number of arguments for 'nullif' function", context);
check(!window.isPresent(), "OVER clause not valid for 'nullif' function", context);
check(!distinct, "DISTINCT not valid for 'nullif' function", context);
check(!filter.isPresent(), "FILTER not valid for 'nullif' function", context);
return new NullIfExpression(getLocation(context), (Expression) visit(context.expression(0)), (Expression) visit(context.expression(1)));
}
if (name.toString().equalsIgnoreCase("coalesce")) {
check(context.expression().size() >= 2, "The 'coalesce' function must have at least two arguments", context);
check(!window.isPresent(), "OVER clause not valid for 'coalesce' function", context);
check(!distinct, "DISTINCT not valid for 'coalesce' function", context);
check(!filter.isPresent(), "FILTER not valid for 'coalesce' function", context);
return new CoalesceExpression(getLocation(context), visit(context.expression(), Expression.class));
}
return new FunctionCall(Optional.of(getLocation(context)), getQualifiedName(context.qualifiedName()), window, filter, orderBy, distinct, false, visit(context.expression(), Expression.class));
}
use of io.prestosql.sql.tree.FunctionCall in project hetu-core by openlookeng.
the class CreateCubeTask method internalExecute.
@VisibleForTesting
public ListenableFuture<?> internalExecute(CreateCube statement, Metadata metadata, AccessControl accessControl, Session session, QueryStateMachine stateMachine, List<Expression> parameters) {
Optional<CubeMetaStore> optionalCubeMetaStore = cubeManager.getMetaStore(STAR_TREE);
if (!optionalCubeMetaStore.isPresent()) {
throw new RuntimeException("HetuMetaStore is not initialized");
}
QualifiedObjectName cubeName = createQualifiedObjectName(session, statement, statement.getCubeName());
QualifiedObjectName tableName = createQualifiedObjectName(session, statement, statement.getSourceTableName());
Optional<TableHandle> cubeHandle = metadata.getTableHandle(session, cubeName);
Optional<TableHandle> tableHandle = metadata.getTableHandle(session, tableName);
if (optionalCubeMetaStore.get().getMetadataFromCubeName(cubeName.toString()).isPresent()) {
if (!statement.isNotExists()) {
throw new SemanticException(CUBE_ALREADY_EXISTS, statement, "Cube '%s' already exists", cubeName);
}
return immediateFuture(null);
}
if (cubeHandle.isPresent()) {
if (!statement.isNotExists()) {
throw new SemanticException(CUBE_OR_TABLE_ALREADY_EXISTS, statement, "Cube or Table '%s' already exists", cubeName);
}
return immediateFuture(null);
}
CatalogName catalogName = metadata.getCatalogHandle(session, cubeName.getCatalogName()).orElseThrow(() -> new PrestoException(NOT_FOUND, "Catalog not found: " + cubeName.getCatalogName()));
if (!metadata.isPreAggregationSupported(session, catalogName)) {
throw new PrestoException(StandardErrorCode.NOT_SUPPORTED, String.format("Cube cannot be created on catalog '%s'", catalogName.toString()));
}
if (!tableHandle.isPresent()) {
throw new SemanticException(MISSING_TABLE, statement, "Table '%s' does not exist", tableName);
}
TableMetadata tableMetadata = metadata.getTableMetadata(session, tableHandle.get());
List<String> groupingSet = statement.getGroupingSet().stream().map(s -> s.getValue().toLowerCase(ENGLISH)).collect(Collectors.toList());
Map<String, ColumnMetadata> sourceTableColumns = tableMetadata.getColumns().stream().collect(Collectors.toMap(ColumnMetadata::getName, col -> col));
List<ColumnMetadata> cubeColumns = new ArrayList<>();
Map<String, AggregationSignature> aggregations = new HashMap<>();
Analysis analysis = analyzeStatement(statement, session, metadata, accessControl, parameters, stateMachine.getWarningCollector());
Map<String, Field> fields = analysis.getOutputDescriptor().getAllFields().stream().collect(Collectors.toMap(col -> col.getName().map(String::toLowerCase).get(), col -> col));
for (FunctionCall aggFunction : statement.getAggregations()) {
String aggFunctionName = aggFunction.getName().toString().toLowerCase(ENGLISH);
String argument = aggFunction.getArguments().isEmpty() || aggFunction.getArguments().get(0) instanceof LongLiteral ? null : ((Identifier) aggFunction.getArguments().get(0)).getValue().toLowerCase(ENGLISH);
boolean distinct = aggFunction.isDistinct();
String cubeColumnName = aggFunctionName + "_" + (argument == null ? "all" : argument) + (aggFunction.isDistinct() ? "_distinct" : "");
CubeAggregateFunction cubeAggregateFunction = CubeAggregateFunction.valueOf(aggFunctionName.toUpperCase(ENGLISH));
switch(cubeAggregateFunction) {
case SUM:
aggregations.put(cubeColumnName, AggregationSignature.sum(argument, distinct));
break;
case COUNT:
AggregationSignature aggregationSignature = argument == null ? AggregationSignature.count() : AggregationSignature.count(argument, distinct);
aggregations.put(cubeColumnName, aggregationSignature);
break;
case AVG:
aggregations.put(cubeColumnName, AggregationSignature.avg(argument, distinct));
break;
case MAX:
aggregations.put(cubeColumnName, AggregationSignature.max(argument, distinct));
break;
case MIN:
aggregations.put(cubeColumnName, AggregationSignature.min(argument, distinct));
break;
default:
throw new PrestoException(NOT_SUPPORTED, format("Unsupported aggregation function : %s", aggFunctionName));
}
Field tableField = fields.get(cubeColumnName);
ColumnMetadata cubeCol = new ColumnMetadata(cubeColumnName, tableField.getType(), true, null, null, false, Collections.emptyMap());
cubeColumns.add(cubeCol);
}
accessControl.checkCanCreateTable(session.getRequiredTransactionId(), session.getIdentity(), tableName);
Map<String, Expression> sqlProperties = mapFromProperties(statement.getProperties());
Map<String, Object> properties = metadata.getTablePropertyManager().getProperties(catalogName, cubeName.getCatalogName(), sqlProperties, session, metadata, parameters);
if (properties.containsKey("partitioned_by")) {
List<String> partitionCols = new ArrayList<>(((List<String>) properties.get("partitioned_by")));
// put all partition columns at the end of the list
groupingSet.removeAll(partitionCols);
groupingSet.addAll(partitionCols);
}
for (String dimension : groupingSet) {
if (!sourceTableColumns.containsKey(dimension)) {
throw new SemanticException(MISSING_COLUMN, statement, "Column %s does not exist", dimension);
}
ColumnMetadata tableCol = sourceTableColumns.get(dimension);
ColumnMetadata cubeCol = new ColumnMetadata(dimension, tableCol.getType(), tableCol.isNullable(), null, null, false, tableCol.getProperties());
cubeColumns.add(cubeCol);
}
ConnectorTableMetadata cubeTableMetadata = new ConnectorTableMetadata(cubeName.asSchemaTableName(), ImmutableList.copyOf(cubeColumns), properties);
try {
metadata.createTable(session, cubeName.getCatalogName(), cubeTableMetadata, statement.isNotExists());
} catch (PrestoException e) {
// connectors are not required to handle the ignoreExisting flag
if (!e.getErrorCode().equals(ALREADY_EXISTS.toErrorCode()) || !statement.isNotExists()) {
throw e;
}
}
CubeMetadataBuilder builder = optionalCubeMetaStore.get().getBuilder(cubeName.toString(), tableName.toString());
groupingSet.forEach(dimension -> builder.addDimensionColumn(dimension, dimension));
aggregations.forEach((column, aggregationSignature) -> builder.addAggregationColumn(column, aggregationSignature.getFunction(), aggregationSignature.getDimension(), aggregationSignature.isDistinct()));
builder.addGroup(new HashSet<>(groupingSet));
// Status and Table modified time will be updated on the first insert into the cube
builder.setCubeStatus(CubeStatus.INACTIVE);
builder.setTableLastUpdatedTime(-1L);
statement.getSourceFilter().ifPresent(sourceTablePredicate -> {
sourceTablePredicate = Coercer.addCoercions(sourceTablePredicate, analysis);
builder.withCubeFilter(new CubeFilter(ExpressionFormatter.formatExpression(sourceTablePredicate, Optional.empty())));
});
builder.setCubeLastUpdatedTime(System.currentTimeMillis());
optionalCubeMetaStore.get().persist(builder.build());
return immediateFuture(null);
}
use of io.prestosql.sql.tree.FunctionCall in project hetu-core by openlookeng.
the class FunctionCallProvider method getExpectedValue.
public FunctionCall getExpectedValue(SymbolAliases aliases) {
List<Expression> symbolReferences = toSymbolReferences(args, aliases);
if (isWindowFunction) {
verify(!distinct, "window does not support distinct");
verify(orderBy.isEmpty(), "window does not support order by");
return new ExpectedWindowFunctionCall(symbolReferences);
}
Optional<OrderBy> orderByClause = Optional.empty();
if (!orderBy.isEmpty()) {
orderByClause = Optional.of(new OrderBy(orderBy.stream().map(item -> new SortItem(new SymbolReference(aliases.get(item.getField()).getName()), item.getOrdering(), item.getNullOrdering())).collect(Collectors.toList())));
}
return new FunctionCall(Optional.empty(), name, Optional.empty(), Optional.empty(), orderByClause, distinct, true, symbolReferences);
}
use of io.prestosql.sql.tree.FunctionCall in project hetu-core by openlookeng.
the class TestExpressionRewriteRuleSet method testAggregationExpressionNotRewritten.
@Test
public void testAggregationExpressionNotRewritten() {
FunctionCall nowCall = new FunctionCallBuilder(tester().getMetadata()).setName(QualifiedName.of("now")).build();
ExpressionRewriteRuleSet functionCallRewriter = new ExpressionRewriteRuleSet((expression, context) -> nowCall);
tester().assertThat(functionCallRewriter.aggregationExpressionRewrite()).on(p -> p.aggregation(a -> a.globalGrouping().addAggregation(p.symbol("count_1", DateType.DATE), nowCall, ImmutableList.of()).source(p.values()))).doesNotFire();
}
Aggregations