use of io.prestosql.sql.tree.Identifier in project hetu-core by openlookeng.
the class TestQuerySessionSupplier method testSqlPathCreation.
@Test
public void testSqlPathCreation() {
ImmutableList.Builder<SqlPathElement> correctValues = ImmutableList.builder();
correctValues.add(new SqlPathElement(Optional.of(new Identifier("normal")), new Identifier("schema")));
correctValues.add(new SqlPathElement(Optional.of(new Identifier("who.uses.periods")), new Identifier("in.schema.names")));
correctValues.add(new SqlPathElement(Optional.of(new Identifier("same,deal")), new Identifier("with,commas")));
correctValues.add(new SqlPathElement(Optional.of(new Identifier("aterrible")), new Identifier("thing!@#$%^&*()")));
List<SqlPathElement> expected = correctValues.build();
SqlPath path = new SqlPath(Optional.of("normal.schema," + "\"who.uses.periods\".\"in.schema.names\"," + "\"same,deal\".\"with,commas\"," + "aterrible.\"thing!@#$%^&*()\""));
assertEquals(path.getParsedPath(), expected);
assertEquals(path.toString(), Joiner.on(", ").join(expected));
}
use of io.prestosql.sql.tree.Identifier in project hetu-core by openlookeng.
the class CubeRangeCanonicalizer method mergePredicates.
public Expression mergePredicates(Expression cubePredicate) {
Set<Identifier> predicateColumns = ExpressionUtils.getIdentifiers(cubePredicate);
if (predicateColumns.size() > 1) {
// Only single column predicates can be merged
return cubePredicate;
}
Expression cubePredicateRewrite = ExpressionUtils.rewriteIdentifiersToSymbolReferences(cubePredicate);
List<Expression> predicates = ExpressionUtils.extractDisjuncts(cubePredicateRewrite);
CubeRangeVisitor visitor = new CubeRangeVisitor(types, metadata, session.toConnectorSession());
Expression transformed = ExpressionUtils.or(predicates.stream().map(visitor::process).collect(Collectors.toList()));
ExpressionDomainTranslator.ExtractionResult result = ExpressionDomainTranslator.fromPredicate(metadata, session, transformed, types);
if (!result.getRemainingExpression().equals(BooleanLiteral.TRUE_LITERAL)) {
log.info("Unable to transform predicate %s into tuple domain completely. Cannot merge ranges into single predicate.", transformed);
return cubePredicateRewrite;
}
ExpressionDomainTranslator domainTranslator = new ExpressionDomainTranslator(new LiteralEncoder(metadata));
return domainTranslator.toPredicate(result.getTupleDomain());
}
use of io.prestosql.sql.tree.Identifier in project hetu-core by openlookeng.
the class QueryPlanner method planUpdateRowAsInsert.
public UpdateDeleteRelationPlan planUpdateRowAsInsert(Update node) {
Table table = node.getTable();
RelationType descriptor = analysis.getOutputDescriptor(table);
TableHandle handle = analysis.getTableHandle(table);
ColumnHandle rowIdHandle = analysis.getRowIdHandle(table);
ColumnMetadata rowIdColumnMetadata = metadata.getColumnMetadata(session, handle, rowIdHandle);
// add table columns
ImmutableList.Builder<Symbol> outputSymbols = ImmutableList.builder();
ImmutableMap.Builder<Symbol, ColumnHandle> columnsBuilder = ImmutableMap.builder();
ImmutableList.Builder<Field> fields = ImmutableList.builder();
for (Field field : descriptor.getAllFields()) {
Symbol symbol = planSymbolAllocator.newSymbol(field);
outputSymbols.add(symbol);
columnsBuilder.put(symbol, analysis.getColumn(field));
fields.add(field);
}
// create table scan
ImmutableMap<Symbol, ColumnHandle> columns = columnsBuilder.build();
PlanNode tableScan = TableScanNode.newInstance(idAllocator.getNextId(), handle, outputSymbols.build(), columns, ReuseExchangeOperator.STRATEGY.REUSE_STRATEGY_DEFAULT, new UUID(0, 0), 0, true);
Scope scope = Scope.builder().withRelationType(RelationId.anonymous(), new RelationType(fields.build())).build();
RelationPlan relationPlan = new RelationPlan(tableScan, scope, outputSymbols.build());
TranslationMap translations = new TranslationMap(relationPlan, analysis, lambdaDeclarationToSymbolMap);
translations.setFieldMappings(relationPlan.getFieldMappings());
PlanBuilder builder = new PlanBuilder(translations, relationPlan.getRoot());
Optional<RowExpression> predicate = Optional.empty();
if (node.getWhere().isPresent()) {
builder = filter(builder, node.getWhere().get(), node);
if (builder.getRoot() instanceof FilterNode) {
predicate = Optional.of(((FilterNode) builder.getRoot()).getPredicate());
}
}
List<AssignmentItem> assignmentItems = node.getAssignmentItems();
Analysis.Update update = analysis.getUpdate().get();
Assignments.Builder assignments = Assignments.builder();
TableMetadata tableMetadata = metadata.getTableMetadata(session, update.getTarget());
Symbol orderBySymbol = null;
for (Map.Entry<Symbol, ColumnHandle> entry : columns.entrySet()) {
ColumnMetadata column;
ColumnHandle columnHandle = entry.getValue();
Symbol input = entry.getKey();
if (columnHandle.getColumnName().equals(rowIdHandle.getColumnName())) {
column = rowIdColumnMetadata;
} else {
column = tableMetadata.getColumn(columnHandle.getColumnName());
}
if (column != rowIdColumnMetadata && column.isHidden()) {
continue;
}
Symbol output = planSymbolAllocator.newSymbol(column.getName(), column.getType());
Type tableType = column.getType();
Type queryType = planSymbolAllocator.getTypes().get(input);
List<AssignmentItem> assignment = assignmentItems.stream().filter(item -> item.getName().equals(QualifiedName.of(column.getName()))).collect(Collectors.toList());
if (!assignment.isEmpty()) {
Expression expression = assignment.get(0).getValue();
Expression cast;
if (expression instanceof Identifier) {
// assigning by column reference
Optional<Symbol> first = columns.entrySet().stream().filter(e -> e.getValue().getColumnName().equals(((Identifier) expression).getValue())).map(Entry::getKey).findFirst();
Symbol source = (first.orElseThrow(() -> new IllegalArgumentException("Unable to find column " + ((Identifier) expression).getValue())));
cast = new Cast(toSymbolReference(source), tableType.getTypeSignature().toString());
} else {
cast = new Cast(expression, tableType.getTypeSignature().toString());
}
assignments.put(output, castToRowExpression(cast));
} else if (queryType.equals(tableType) || typeCoercion.isTypeOnlyCoercion(queryType, tableType)) {
assignments.put(output, castToRowExpression(toSymbolReference(input)));
} else {
Expression cast = new Cast(toSymbolReference(input), tableType.getTypeSignature().toString());
assignments.put(output, castToRowExpression(cast));
}
if (column == rowIdColumnMetadata) {
orderBySymbol = output;
}
}
ProjectNode projectNode = new ProjectNode(idAllocator.getNextId(), builder.getRoot(), assignments.build());
PlanBuilder planBuilder = new PlanBuilder(translations, projectNode);
SortOrder sortOrder = SortOrder.ASC_NULLS_LAST;
Symbol sortSymbol = orderBySymbol;
Map<Symbol, SortOrder> sortOrderMap = ImmutableMap.<Symbol, SortOrder>builder().put(sortSymbol, sortOrder).build();
OrderingScheme orderingScheme = new OrderingScheme(ImmutableList.of(sortSymbol), sortOrderMap);
builder = sort(planBuilder, Optional.of(orderingScheme));
ImmutableList.Builder<Field> projectFields = ImmutableList.builder();
projectFields.addAll(fields.build().stream().filter(x -> !x.isHidden()).collect(toImmutableList()));
scope = Scope.builder().withRelationType(RelationId.anonymous(), new RelationType(projectFields.build())).build();
RelationPlan plan = new RelationPlan(builder.getRoot(), scope, projectNode.getOutputSymbols());
List<String> visibleTableColumnNames = tableMetadata.getColumns().stream().filter(c -> !c.isHidden()).map(ColumnMetadata::getName).collect(Collectors.toList());
visibleTableColumnNames.add(rowIdColumnMetadata.getName());
return new UpdateDeleteRelationPlan(plan, visibleTableColumnNames, columns, predicate);
}
use of io.prestosql.sql.tree.Identifier in project hetu-core by openlookeng.
the class LogicalPlanner method createInsertCubePlan.
private RelationPlan createInsertCubePlan(Analysis analysis, InsertCube insertCubeStatement) {
Analysis.CubeInsert insert = analysis.getCubeInsert().get();
TableMetadata tableMetadata = metadata.getTableMetadata(session, insert.getTarget());
List<ColumnMetadata> visibleTableColumns = tableMetadata.getColumns().stream().filter(column -> !column.isHidden()).collect(toImmutableList());
List<String> visibleTableColumnNames = visibleTableColumns.stream().map(ColumnMetadata::getName).collect(toImmutableList());
RelationPlan plan = createRelationPlan(analysis, insertCubeStatement.getQuery());
Map<String, ColumnHandle> columns = metadata.getColumnHandles(session, insert.getTarget());
Assignments.Builder assignments = Assignments.builder();
for (ColumnMetadata column : tableMetadata.getColumns()) {
if (column.isHidden()) {
continue;
}
Symbol output = planSymbolAllocator.newSymbol(column.getName(), column.getType());
int index = insert.getColumns().indexOf(columns.get(column.getName()));
if (index < 0) {
Expression cast = new Cast(new NullLiteral(), column.getType().getTypeSignature().toString());
assignments.put(output, castToRowExpression(cast));
} else {
Symbol input = plan.getSymbol(index);
Type tableType = column.getType();
Type queryType = planSymbolAllocator.getTypes().get(input);
if (queryType.equals(tableType) || typeCoercion.isTypeOnlyCoercion(queryType, tableType)) {
assignments.put(output, castToRowExpression(toSymbolReference(input)));
} else {
Expression cast = noTruncationCast(toSymbolReference(input), queryType, tableType);
assignments.put(output, castToRowExpression(cast));
}
}
}
ProjectNode projectNode = new ProjectNode(idAllocator.getNextId(), plan.getRoot(), assignments.build());
List<Field> fields = visibleTableColumns.stream().map(column -> Field.newUnqualified(column.getName(), column.getType())).collect(toImmutableList());
Scope scope = Scope.builder().withRelationType(RelationId.anonymous(), new RelationType(fields)).build();
plan = new RelationPlan(projectNode, scope, projectNode.getOutputSymbols());
Optional<NewTableLayout> newTableLayout = metadata.getInsertLayout(session, insert.getTarget());
String catalogName = insert.getTarget().getCatalogName().getCatalogName();
TableStatisticsMetadata statisticsMetadata = metadata.getStatisticsCollectionMetadataForWrite(session, catalogName, tableMetadata.getMetadata());
RelationPlan tableWriterPlan = createTableWriterPlan(analysis, plan, new InsertReference(insert.getTarget(), analysis.isCubeOverwrite()), visibleTableColumnNames, newTableLayout, statisticsMetadata);
Expression rewritten = null;
Set<Identifier> predicateColumns = new HashSet<>();
if (insertCubeStatement.getWhere().isPresent()) {
rewritten = new QueryPlanner(analysis, planSymbolAllocator, idAllocator, buildLambdaDeclarationToSymbolMap(analysis, planSymbolAllocator), metadata, session, namedSubPlan, uniqueIdAllocator).rewriteExpression(tableWriterPlan, insertCubeStatement.getWhere().get(), analysis, buildLambdaDeclarationToSymbolMap(analysis, planSymbolAllocator));
predicateColumns.addAll(ExpressionUtils.getIdentifiers(rewritten));
}
CubeMetadata cubeMetadata = insert.getMetadata();
if (!insertCubeStatement.isOverwrite() && !insertCubeStatement.getWhere().isPresent() && cubeMetadata.getCubeStatus() != CubeStatus.INACTIVE) {
// Means data some data was inserted before, but trying to insert entire dataset
throw new PrestoException(QUERY_REJECTED, "Cannot allow insert. Inserting entire dataset but cube already has partial data");
} else if (insertCubeStatement.getWhere().isPresent()) {
if (!canSupportPredicate(rewritten)) {
throw new PrestoException(QUERY_REJECTED, String.format("Cannot support predicate '%s'", ExpressionFormatter.formatExpression(rewritten, Optional.empty())));
}
if (!insertCubeStatement.isOverwrite() && arePredicatesOverlapping(rewritten, cubeMetadata)) {
throw new PrestoException(QUERY_REJECTED, String.format("Cannot allow insert. Cube already contains data for the given predicate '%s'", ExpressionFormatter.formatExpression(insertCubeStatement.getWhere().get(), Optional.empty())));
}
}
TableHandle sourceTableHandle = insert.getSourceTable();
// At this point it has been verified that source table has not been updated
// so insert into cube should be allowed
LongSupplier tableLastModifiedTimeSupplier = metadata.getTableLastModifiedTimeSupplier(session, sourceTableHandle);
checkState(tableLastModifiedTimeSupplier != null, "Table last modified time is null");
Map<Symbol, Type> predicateColumnsType = predicateColumns.stream().map(identifier -> new Symbol(identifier.getValue())).collect(Collectors.toMap(Function.identity(), symbol -> planSymbolAllocator.getTypes().get(symbol), (key1, ignored) -> key1));
CubeFinishNode cubeFinishNode = new CubeFinishNode(idAllocator.getNextId(), tableWriterPlan.getRoot(), planSymbolAllocator.newSymbol("rows", BIGINT), new CubeUpdateMetadata(tableMetadata.getQualifiedName().toString(), tableLastModifiedTimeSupplier.getAsLong(), rewritten != null ? ExpressionFormatter.formatExpression(rewritten, Optional.empty()) : null, insertCubeStatement.isOverwrite()), predicateColumnsType);
return new RelationPlan(cubeFinishNode, analysis.getScope(insertCubeStatement), cubeFinishNode.getOutputSymbols());
}
use of io.prestosql.sql.tree.Identifier in project hetu-core by openlookeng.
the class CubeConsole method createCubeCommand.
/**
* Process the Create Cube Query
*
* @param queryRunner queryRunner
* @param outputFormat outputFormat
* @param schemaChanged schemaChanged
* @param usePager usePager
* @param schemaChanged schemaChanged
* @param showProgress showProgress
* @param terminal terminal
* @param out out
* @param errorChannel errorChannel
* @return boolean after processing the create cube query command.
*/
public boolean createCubeCommand(String query, QueryRunner queryRunner, ClientOptions.OutputFormat outputFormat, Runnable schemaChanged, boolean usePager, boolean showProgress, Terminal terminal, PrintStream out, PrintStream errorChannel) {
boolean success = true;
SqlParser parser = new SqlParser();
QualifiedName cubeName = null;
try {
CreateCube createCube = (CreateCube) parser.createStatement(query, new ParsingOptions(ParsingOptions.DecimalLiteralTreatment.AS_DOUBLE));
cubeName = createCube.getCubeName();
QualifiedName sourceTableName = createCube.getSourceTableName();
String whereClause = createCube.getWhere().get().toString();
Set<FunctionCall> aggregations = createCube.getAggregations();
List<Identifier> groupingSet = createCube.getGroupingSet();
List<Property> properties = createCube.getProperties();
boolean notExists = createCube.isNotExists();
CreateCube modifiedCreateCube = new CreateCube(cubeName, sourceTableName, groupingSet, aggregations, notExists, properties, Optional.empty(), createCube.getSourceFilter().orElse(null));
String queryCreateCube = SqlFormatter.formatSql(modifiedCreateCube, Optional.empty());
if (!console.runQuery(queryRunner, queryCreateCube, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel)) {
return false;
}
// we check whether the create cube expression can be processed
if (isSupportedExpression(createCube, queryRunner, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel)) {
if (createCube.getWhere().get() instanceof BetweenPredicate) {
// we process the between predicate in the create cube query where clause
success = processBetweenPredicate(createCube, queryRunner, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel, parser);
}
if (createCube.getWhere().get() instanceof ComparisonExpression) {
// we process the comparison expression in the create cube query where clause
success = processComparisonExpression(createCube, queryRunner, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel, parser);
}
} else {
// if we donot support processing the create cube query with multiple inserts, then only a single insert is run internally.
String queryInsert = String.format(INSERT_INTO_CUBE_STRING, cubeName, whereClause);
success = console.runQuery(queryRunner, queryInsert, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel);
}
if (!success) {
// roll back mechanism for unsuccessful create cube query
String dropCubeQuery = String.format(DROP_CUBE_STRING, cubeName);
console.runQuery(queryRunner, dropCubeQuery, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel);
}
} catch (ParsingException e) {
if (cubeName != null) {
// roll back mechanism for unsuccessful create cube query
String dropCubeQuery = String.format(DROP_CUBE_STRING, cubeName);
console.runQuery(queryRunner, dropCubeQuery, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel);
}
System.out.println(e.getMessage());
Query.renderErrorLocation(query, new ErrorLocation(e.getLineNumber(), e.getColumnNumber()), errorChannel);
success = false;
} catch (Exception e) {
if (cubeName != null) {
// roll back mechanism for unsuccessful create cube query
String dropCubeQuery = String.format(DROP_CUBE_STRING, cubeName);
console.runQuery(queryRunner, dropCubeQuery, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel);
}
// Add blank line after progress bar
System.out.println();
System.out.println(e.getMessage());
success = false;
}
return success;
}
Aggregations