use of io.prestosql.sql.tree.QualifiedName in project hetu-core by openlookeng.
the class CubeConsole method isSupportedExpression.
/**
* Gets whether the expression is supported for processing the create cube query
*
* @param createCube createCube
* @return void
*/
private boolean isSupportedExpression(CreateCube createCube, QueryRunner queryRunner, ClientOptions.OutputFormat outputFormat, Runnable schemaChanged, boolean usePager, boolean showProgress, Terminal terminal, PrintStream out, PrintStream errorChannel) {
boolean supportedExpression = false;
boolean success = true;
Optional<Expression> expression = createCube.getWhere();
if (expression.isPresent()) {
ImmutableSet.Builder<Identifier> identifierBuilder = new ImmutableSet.Builder<>();
new DefaultExpressionTraversalVisitor<Void, ImmutableSet.Builder<Identifier>>() {
@Override
protected Void visitIdentifier(Identifier node, ImmutableSet.Builder<Identifier> builder) {
builder.add(node);
return null;
}
}.process(expression.get(), identifierBuilder);
int sizeIdentifiers = identifierBuilder.build().asList().size();
if (sizeIdentifiers == SUPPORTED_INDENTIFIER_SIZE) {
String whereClause = createCube.getWhere().get().toString();
QualifiedName sourceTableName = createCube.getSourceTableName();
if (expression.get() instanceof BetweenPredicate) {
BetweenPredicate betweenPredicate = (BetweenPredicate) (expression.get());
String columnName = betweenPredicate.getValue().toString();
String columnDataTypeQuery;
String catalogName;
String tableName = sourceTableName.getSuffix();
checkArgument(tableName.matches("[\\p{Alnum}_]+"), "Invalid table name");
if (hasInvalidSymbol(columnName)) {
return false;
}
if (sourceTableName.getPrefix().isPresent() && sourceTableName.getPrefix().get().getPrefix().isPresent()) {
catalogName = sourceTableName.getPrefix().get().getPrefix().get().toString();
checkArgument(catalogName.matches("[\\p{Alnum}_]+"), "Invalid catalog name");
columnDataTypeQuery = String.format(SELECT_DATA_TYPE_STRING, catalogName, tableName, columnName);
} else if (queryRunner.getSession().getCatalog() != null) {
catalogName = queryRunner.getSession().getCatalog();
checkArgument(catalogName.matches("[\\p{Alnum}_]+"), "Invalid catalog name");
columnDataTypeQuery = String.format(SELECT_DATA_TYPE_STRING, catalogName, tableName, columnName);
} else {
return false;
}
if (!processCubeInitialQuery(queryRunner, columnDataTypeQuery, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel)) {
return false;
}
String resInitCubeQuery;
resInitCubeQuery = getResultInitCubeQuery();
if (resInitCubeQuery != null) {
cubeColumnDataType = resInitCubeQuery;
}
if (cubeColumnDataType.contains(DATATYPE_DECIMAL)) {
cubeColumnDataType = DATATYPE_DECIMAL;
}
if (cubeColumnDataType.contains(DATATYPE_VARCHAR)) {
cubeColumnDataType = DATATYPE_VARCHAR;
}
if (!isSupportedDatatype(cubeColumnDataType)) {
return false;
}
if (betweenPredicate.getMin() instanceof LongLiteral || betweenPredicate.getMin() instanceof LongLiteral || betweenPredicate.getMin() instanceof TimestampLiteral || betweenPredicate.getMin() instanceof TimestampLiteral || betweenPredicate.getMin() instanceof GenericLiteral || betweenPredicate.getMin() instanceof StringLiteral || betweenPredicate.getMin() instanceof DoubleLiteral) {
if (betweenPredicate.getMax() instanceof LongLiteral || betweenPredicate.getMax() instanceof LongLiteral || betweenPredicate.getMax() instanceof TimestampLiteral || betweenPredicate.getMax() instanceof TimestampLiteral || betweenPredicate.getMax() instanceof GenericLiteral || betweenPredicate.getMax() instanceof StringLiteral || betweenPredicate.getMax() instanceof DoubleLiteral) {
// initial query to get the total number of distinct column values in the table
String countDistinctQuery = String.format(SELECT_COUNT_DISTINCT_FROM_STRING, columnName, sourceTableName.toString(), whereClause);
if (!processCubeInitialQuery(queryRunner, countDistinctQuery, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel)) {
return false;
}
Long valueCountDistinctQuery = INITIAL_QUERY_RESULT_VALUE;
resInitCubeQuery = getResultInitCubeQuery();
if (resInitCubeQuery != null) {
valueCountDistinctQuery = Long.parseLong(resInitCubeQuery);
}
if (valueCountDistinctQuery < MAX_BUFFERED_ROWS && valueCountDistinctQuery * rowBufferTempMultiplier < Integer.MAX_VALUE) {
supportedExpression = true;
rowBufferListSize = (int) ((valueCountDistinctQuery).intValue() * rowBufferTempMultiplier);
}
}
}
}
if (expression.get() instanceof ComparisonExpression) {
ComparisonExpression comparisonExpression = (ComparisonExpression) (createCube.getWhere().get());
ComparisonExpression.Operator operator = comparisonExpression.getOperator();
Expression left = comparisonExpression.getLeft();
Expression right = comparisonExpression.getRight();
if (!(left instanceof SymbolReference) && right instanceof SymbolReference) {
comparisonExpression = new ComparisonExpression(operator.flip(), right, left);
}
if (left instanceof Literal && !(right instanceof Literal)) {
comparisonExpression = new ComparisonExpression(operator.flip(), right, left);
}
if (comparisonExpression.getRight() instanceof LongLiteral) {
supportedExpression = true;
}
String catalogName;
String tableName = sourceTableName.getSuffix();
String columnName = comparisonExpression.getLeft().toString();
String columnDataTypeQuery;
checkArgument(tableName.matches("[\\p{Alnum}_]+"), "Invalid table name");
if (hasInvalidSymbol(columnName)) {
return false;
}
if (sourceTableName.getPrefix().isPresent() && sourceTableName.getPrefix().get().getPrefix().isPresent()) {
catalogName = sourceTableName.getPrefix().get().getPrefix().get().toString();
checkArgument(catalogName.matches("[\\p{Alnum}_]+"), "Invalid catalog name");
columnDataTypeQuery = String.format(SELECT_DATA_TYPE_STRING, catalogName, tableName, columnName);
} else if (queryRunner.getSession().getCatalog() != null) {
catalogName = queryRunner.getSession().getCatalog();
checkArgument(catalogName.matches("[\\p{Alnum}_]+"), "Invalid catalog name");
columnDataTypeQuery = String.format(SELECT_DATA_TYPE_STRING, catalogName, tableName, columnName);
} else {
return false;
}
if (!processCubeInitialQuery(queryRunner, columnDataTypeQuery, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel)) {
return false;
}
String resInitCubeQuery;
resInitCubeQuery = getResultInitCubeQuery();
if (resInitCubeQuery != null) {
cubeColumnDataType = resInitCubeQuery.toLowerCase(Locale.ENGLISH);
}
if (cubeColumnDataType.contains(DATATYPE_DECIMAL)) {
cubeColumnDataType = DATATYPE_DECIMAL;
}
if (cubeColumnDataType.contains(DATATYPE_VARCHAR)) {
cubeColumnDataType = DATATYPE_VARCHAR;
}
if (!isSupportedDatatype(cubeColumnDataType)) {
return false;
}
if (comparisonExpression.getRight() instanceof GenericLiteral || comparisonExpression.getRight() instanceof StringLiteral || comparisonExpression.getRight() instanceof DoubleLiteral || comparisonExpression.getRight() instanceof LongLiteral || comparisonExpression.getRight() instanceof TimestampLiteral) {
// initial query to get the total number of distinct column values in the table
String countDistinctQuery = String.format(SELECT_COUNT_DISTINCT_FROM_STRING, columnName, sourceTableName.toString(), whereClause);
if (!processCubeInitialQuery(queryRunner, countDistinctQuery, outputFormat, schemaChanged, usePager, showProgress, terminal, out, errorChannel)) {
return false;
}
Long valueCountDistinctQuery = INITIAL_QUERY_RESULT_VALUE;
resInitCubeQuery = getResultInitCubeQuery();
if (resInitCubeQuery != null) {
valueCountDistinctQuery = Long.parseLong(resInitCubeQuery);
}
if (valueCountDistinctQuery < MAX_BUFFERED_ROWS) {
supportedExpression = true;
}
}
}
}
}
return supportedExpression;
}
use of io.prestosql.sql.tree.QualifiedName in project hetu-core by openlookeng.
the class TestNodeScheduler method testSplitCacheAwareScheduling.
@Test
public void testSplitCacheAwareScheduling() {
setUpNodes();
PropertyService.setProperty(HetuConstant.SPLIT_CACHE_MAP_ENABLED, true);
SplitCacheMap splitCacheMap = SplitCacheMap.getInstance();
QualifiedName tableQN = QualifiedName.of(CONNECTOR_ID.toString(), TEST_SCHEMA, TEST_TABLE);
MockSplit mock = new MockSplit("hdfs://hacluster/user/hive/warehouse/test_schema.db/test_table/a=23/000000_0", 0, 10, System.currentTimeMillis(), true);
MockSplit mock2 = new MockSplit("hdfs://hacluster/user/hive/warehouse/test_schema.db/test_table/b=33/000000_0", 0, 10, System.currentTimeMillis(), false);
MockSplit mock3 = new MockSplit("hdfs://hacluster/user/hive/warehouse/test_schema.db/test_table/a=23/000001_0", 0, 10, System.currentTimeMillis(), true);
Split split = new Split(CONNECTOR_ID, mock, Lifespan.taskWide());
Split split2 = new Split(CONNECTOR_ID, mock2, Lifespan.taskWide());
Split split3 = new Split(CONNECTOR_ID, mock3, Lifespan.taskWide());
Set<Split> splits = ImmutableSet.of(split, split2, split3);
assertFalse(splitCacheMap.cacheExists(tableQN));
Map splitInfoMap = (Map) split.getConnectorSplit().getInfo();
SplitKey splitKey = new SplitKey(split, split.getCatalogName().getCatalogName(), TEST_SCHEMA, splitInfoMap.get("table").toString());
assertFalse(splitCacheMap.getCachedNodeId(splitKey).isPresent());
Map split2InfoMap = (Map) split.getConnectorSplit().getInfo();
SplitKey split2Key = new SplitKey(split2, split2.getCatalogName().getCatalogName(), TEST_SCHEMA, split2InfoMap.get("table").toString());
Map split3InfoMap = (Map) split.getConnectorSplit().getInfo();
SplitKey split3Key = new SplitKey(split3, split3.getCatalogName().getCatalogName(), TEST_SCHEMA, split3InfoMap.get("table").toString());
TestNetworkTopology topology = new TestNetworkTopology();
NetworkLocationCache locationCache = new NetworkLocationCache(topology);
// contents of taskMap indicate the node-task map for the current stage
NodeSchedulerConfig nodeSchedulerConfig = new NodeSchedulerConfig().setMaxSplitsPerNode(20).setIncludeCoordinator(false).setMaxPendingSplitsPerTask(10);
NodeScheduler nodeScheduler = new NodeScheduler(locationCache, topology, nodeManager, nodeSchedulerConfig, nodeTaskMap);
NodeSelector selector = nodeScheduler.createNodeSelector(CONNECTOR_ID, false, null);
assertTrue(selector instanceof SplitCacheAwareNodeSelector);
Multimap<InternalNode, Split> assignment1 = selector.computeAssignments(splits, ImmutableList.copyOf(taskMap.values()), Optional.empty()).getAssignments();
assertEquals(3, assignment1.size());
// No cache predicates defined, thus the split to worker mapping will not be saved
assertFalse(splitCacheMap.getCachedNodeId(splitKey).isPresent());
// Add cache predicate
ColumnMetadata columnMetadataA = new ColumnMetadata("a", BIGINT);
TupleDomain<ColumnMetadata> tupleDomainA = TupleDomain.withColumnDomains(ImmutableMap.of(columnMetadataA, Domain.singleValue(BIGINT, 23L)));
splitCacheMap.addCache(tableQN, tupleDomainA, "a = 23");
assertFalse(splitCacheMap.getCachedNodeId(splitKey).isPresent());
Multimap<InternalNode, Split> assignment2 = selector.computeAssignments(splits, ImmutableList.copyOf(taskMap.values()), Optional.empty()).getAssignments();
// Split will be assigned by default node selector and the mapping cached
assertTrue(assignment2.containsValue(split));
assertTrue(assignment2.containsValue(split2));
assertTrue(assignment2.containsValue(split3));
assertFalse(splitCacheMap.getCachedNodeId(split2Key).isPresent());
Multimap<String, Split> nodeIdToSplits = ArrayListMultimap.create();
assignment2.forEach((node, spl) -> nodeIdToSplits.put(node.getNodeIdentifier(), spl));
assertTrue(nodeIdToSplits.get(splitCacheMap.getCachedNodeId(splitKey).get()).contains(split));
assertTrue(nodeIdToSplits.get(splitCacheMap.getCachedNodeId(split3Key).get()).contains(split3));
// Schedule split again and the same assignments should be returned
Multimap<InternalNode, Split> assignment3 = selector.computeAssignments(splits, ImmutableList.copyOf(taskMap.values()), Optional.empty()).getAssignments();
// Split will be assigned by default node selector and the mapping cached
assertTrue(assignment3.containsValue(split));
assertTrue(assignment3.containsValue(split2));
assertTrue(assignment3.containsValue(split3));
assertFalse(splitCacheMap.getCachedNodeId(split2Key).isPresent());
Multimap<String, Split> nodeIdToSplits3 = ArrayListMultimap.create();
assignment3.forEach((node, spl) -> nodeIdToSplits3.put(node.getNodeIdentifier(), spl));
assertTrue(nodeIdToSplits.get(splitCacheMap.getCachedNodeId(splitKey).get()).contains(split));
assertTrue(nodeIdToSplits.get(splitCacheMap.getCachedNodeId(split3Key).get()).contains(split3));
}
use of io.prestosql.sql.tree.QualifiedName in project hetu-core by openlookeng.
the class TreePrinter method print.
public void print(Node root) {
AstVisitor<Void, Integer> printer = new DefaultTraversalVisitor<Void, Integer>() {
@Override
protected Void visitNode(Node node, Integer indentLevel) {
throw new UnsupportedOperationException("not yet implemented: " + node);
}
@Override
protected Void visitQuery(Query node, Integer indentLevel) {
print(indentLevel, "Query ");
Integer tmpIndentLevel = indentLevel;
tmpIndentLevel++;
print(tmpIndentLevel, "QueryBody");
process(node.getQueryBody(), tmpIndentLevel);
if (node.getOrderBy().isPresent()) {
print(tmpIndentLevel, "OrderBy");
process(node.getOrderBy().get(), tmpIndentLevel + 1);
}
if (node.getLimit().isPresent()) {
print(tmpIndentLevel, "Limit: " + node.getLimit().get());
}
return null;
}
@Override
protected Void visitQuerySpecification(QuerySpecification node, Integer indentLevel) {
print(indentLevel, "QuerySpecification ");
Integer tmpIndentLevel = indentLevel;
tmpIndentLevel++;
process(node.getSelect(), tmpIndentLevel);
if (node.getFrom().isPresent()) {
print(tmpIndentLevel, "From");
process(node.getFrom().get(), tmpIndentLevel + 1);
}
if (node.getWhere().isPresent()) {
print(tmpIndentLevel, "Where");
process(node.getWhere().get(), tmpIndentLevel + 1);
}
if (node.getGroupBy().isPresent()) {
String distinct = "";
if (node.getGroupBy().get().isDistinct()) {
distinct = "[DISTINCT]";
}
print(tmpIndentLevel, "GroupBy" + distinct);
for (GroupingElement groupingElement : node.getGroupBy().get().getGroupingElements()) {
print(tmpIndentLevel, "SimpleGroupBy");
if (groupingElement instanceof SimpleGroupBy) {
for (Expression column : groupingElement.getExpressions()) {
process(column, tmpIndentLevel + 1);
}
} else if (groupingElement instanceof GroupingSets) {
print(tmpIndentLevel + 1, "GroupingSets");
for (List<Expression> set : ((GroupingSets) groupingElement).getSets()) {
print(tmpIndentLevel + 2, "GroupingSet[");
for (Expression expression : set) {
process(expression, tmpIndentLevel + 3);
}
print(tmpIndentLevel + 2, "]");
}
} else if (groupingElement instanceof Cube) {
print(tmpIndentLevel + 1, "Cube");
for (Expression column : groupingElement.getExpressions()) {
process(column, tmpIndentLevel + 1);
}
} else if (groupingElement instanceof Rollup) {
print(tmpIndentLevel + 1, "Rollup");
for (Expression column : groupingElement.getExpressions()) {
process(column, tmpIndentLevel + 1);
}
}
}
}
if (node.getHaving().isPresent()) {
print(tmpIndentLevel, "Having");
process(node.getHaving().get(), tmpIndentLevel + 1);
}
if (node.getOrderBy().isPresent()) {
print(tmpIndentLevel, "OrderBy");
process(node.getOrderBy().get(), tmpIndentLevel + 1);
}
if (node.getLimit().isPresent()) {
print(tmpIndentLevel, "Limit: " + node.getLimit().get());
}
return null;
}
protected Void visitOrderBy(OrderBy node, Integer indentLevel) {
for (SortItem sortItem : node.getSortItems()) {
process(sortItem, indentLevel);
}
return null;
}
@Override
protected Void visitSelect(Select node, Integer indentLevel) {
String distinct = "";
if (node.isDistinct()) {
distinct = "[DISTINCT]";
}
print(indentLevel, "Select" + distinct);
// visit children
super.visitSelect(node, indentLevel + 1);
return null;
}
@Override
protected Void visitAllColumns(AllColumns node, Integer indent) {
if (node.getPrefix().isPresent()) {
print(indent, node.getPrefix() + ".*");
} else {
print(indent, "*");
}
return null;
}
@Override
protected Void visitSingleColumn(SingleColumn node, Integer indent) {
if (node.getAlias().isPresent()) {
print(indent, "Alias: " + node.getAlias().get());
}
// visit children
super.visitSingleColumn(node, indent + 1);
return null;
}
@Override
protected Void visitComparisonExpression(ComparisonExpression node, Integer indentLevel) {
print(indentLevel, node.getOperator().toString());
super.visitComparisonExpression(node, indentLevel + 1);
return null;
}
@Override
protected Void visitArithmeticBinary(ArithmeticBinaryExpression node, Integer indentLevel) {
print(indentLevel, node.getOperator().toString());
super.visitArithmeticBinary(node, indentLevel + 1);
return null;
}
@Override
protected Void visitLogicalBinaryExpression(LogicalBinaryExpression node, Integer indentLevel) {
print(indentLevel, node.getOperator().toString());
super.visitLogicalBinaryExpression(node, indentLevel + 1);
return null;
}
@Override
protected Void visitStringLiteral(StringLiteral node, Integer indentLevel) {
print(indentLevel, "String[" + node.getValue() + "]");
return null;
}
@Override
protected Void visitBinaryLiteral(BinaryLiteral node, Integer indentLevel) {
print(indentLevel, "Binary[" + node.toHexString() + "]");
return null;
}
@Override
protected Void visitBooleanLiteral(BooleanLiteral node, Integer indentLevel) {
print(indentLevel, "Boolean[" + node.getValue() + "]");
return null;
}
@Override
protected Void visitLongLiteral(LongLiteral node, Integer indentLevel) {
print(indentLevel, "Long[" + node.getValue() + "]");
return null;
}
@Override
protected Void visitLikePredicate(LikePredicate node, Integer indentLevel) {
print(indentLevel, "LIKE");
super.visitLikePredicate(node, indentLevel + 1);
return null;
}
@Override
protected Void visitIdentifier(Identifier node, Integer indentLevel) {
QualifiedName resolved = resolvedNameReferences.get(node);
String resolvedName = "";
if (resolved != null) {
resolvedName = "=>" + resolved.toString();
}
print(indentLevel, "Identifier[" + node.getValue() + resolvedName + "]");
return null;
}
@Override
protected Void visitDereferenceExpression(DereferenceExpression node, Integer indentLevel) {
QualifiedName resolved = resolvedNameReferences.get(node);
String resolvedName = "";
if (resolved != null) {
resolvedName = "=>" + resolved.toString();
}
print(indentLevel, "DereferenceExpression[" + node + resolvedName + "]");
return null;
}
@Override
protected Void visitFunctionCall(FunctionCall node, Integer indentLevel) {
String name = Joiner.on('.').join(node.getName().getParts());
print(indentLevel, "FunctionCall[" + name + "]");
super.visitFunctionCall(node, indentLevel + 1);
return null;
}
@Override
protected Void visitTable(Table node, Integer indentLevel) {
String name = Joiner.on('.').join(node.getName().getParts());
print(indentLevel, "Table[" + name + "]");
return null;
}
@Override
protected Void visitValues(Values node, Integer indentLevel) {
print(indentLevel, "Values");
super.visitValues(node, indentLevel + 1);
return null;
}
@Override
protected Void visitRow(Row node, Integer indentLevel) {
print(indentLevel, "Row");
super.visitRow(node, indentLevel + 1);
return null;
}
@Override
protected Void visitAliasedRelation(AliasedRelation node, Integer indentLevel) {
print(indentLevel, "Alias[" + node.getAlias() + "]");
super.visitAliasedRelation(node, indentLevel + 1);
return null;
}
@Override
protected Void visitSampledRelation(SampledRelation node, Integer indentLevel) {
print(indentLevel, "TABLESAMPLE[" + node.getType() + " (" + node.getSamplePercentage() + ")]");
super.visitSampledRelation(node, indentLevel + 1);
return null;
}
@Override
protected Void visitTableSubquery(TableSubquery node, Integer indentLevel) {
print(indentLevel, "SubQuery");
super.visitTableSubquery(node, indentLevel + 1);
return null;
}
@Override
protected Void visitInPredicate(InPredicate node, Integer indentLevel) {
print(indentLevel, "IN");
super.visitInPredicate(node, indentLevel + 1);
return null;
}
@Override
protected Void visitSubqueryExpression(SubqueryExpression node, Integer indentLevel) {
print(indentLevel, "SubQuery");
super.visitSubqueryExpression(node, indentLevel + 1);
return null;
}
};
printer.process(root, 0);
}
use of io.prestosql.sql.tree.QualifiedName in project hetu-core by openlookeng.
the class TestSqlParser method testAnalyze.
@Test
public void testAnalyze() {
QualifiedName table = QualifiedName.of("foo");
assertStatement("ANALYZE foo", new Analyze(table, ImmutableList.of()));
assertStatement("ANALYZE foo WITH ( \"string\" = 'bar', \"long\" = 42, computed = concat('ban', 'ana'), a = ARRAY[ 'v1', 'v2' ] )", new Analyze(table, ImmutableList.of(new Property(new Identifier("string"), new StringLiteral("bar")), new Property(new Identifier("long"), new LongLiteral("42")), new Property(new Identifier("computed"), new FunctionCall(QualifiedName.of("concat"), ImmutableList.of(new StringLiteral("ban"), new StringLiteral("ana")))), new Property(new Identifier("a"), new ArrayConstructor(ImmutableList.of(new StringLiteral("v1"), new StringLiteral("v2")))))));
assertStatement("EXPLAIN ANALYZE foo", new Explain(new Analyze(table, ImmutableList.of()), false, false, ImmutableList.of()));
assertStatement("EXPLAIN ANALYZE ANALYZE foo", new Explain(new Analyze(table, ImmutableList.of()), true, false, ImmutableList.of()));
}
use of io.prestosql.sql.tree.QualifiedName in project hetu-core by openlookeng.
the class TestSqlParser method testInsertOverwrite.
@Test
public void testInsertOverwrite() {
QualifiedName table = QualifiedName.of("a");
Query query = simpleQuery(selectList(new AllColumns()), table(QualifiedName.of("t")));
assertStatement("INSERT OVERWRITE a SELECT * FROM t", new Insert(table, Optional.empty(), query, true));
assertStatement("INSERT OVERWRITE a (c1, c2) SELECT * FROM t", new Insert(table, Optional.of(ImmutableList.of(identifier("c1"), identifier("c2"))), query, true));
}
Aggregations