use of io.prestosql.sql.tree.Identifier in project hetu-core by openlookeng.
the class StatementAnalyzer method validateCreateIndex.
private void validateCreateIndex(Table table, Optional<Scope> scope) {
CreateIndex createIndex = (CreateIndex) analysis.getOriginalStatement();
QualifiedObjectName tableFullName = createQualifiedObjectName(session, createIndex, createIndex.getTableName());
accessControl.checkCanCreateIndex(session.getRequiredTransactionId(), session.getIdentity(), tableFullName);
String tableName = tableFullName.toString();
// check whether catalog support create index
if (!metadata.isHeuristicIndexSupported(session, tableFullName)) {
throw new SemanticException(NOT_SUPPORTED, createIndex, "CREATE INDEX is not supported in catalog '%s'", tableFullName.getCatalogName());
}
List<String> partitions = new ArrayList<>();
String partitionColumn = null;
if (createIndex.getExpression().isPresent()) {
partitions = HeuristicIndexUtils.extractPartitions(createIndex.getExpression().get());
// check partition name validate, create index …… where pt_d = xxx;
// pt_d must be partition column
Set<String> partitionColumns = partitions.stream().map(k -> k.substring(0, k.indexOf("="))).collect(Collectors.toSet());
if (partitionColumns.size() > 1) {
// currently only support one partition column
throw new IllegalArgumentException("Heuristic index only supports predicates on one column");
}
// The only entry in set should be the only partition column name
partitionColumn = partitionColumns.iterator().next();
}
Optional<TableHandle> tableHandle = metadata.getTableHandle(session, tableFullName);
if (tableHandle.isPresent()) {
if (!tableHandle.get().getConnectorHandle().isHeuristicIndexSupported()) {
throw new SemanticException(NOT_SUPPORTED, table, "Catalog supported, but table storage format is not supported by heuristic index");
}
TableMetadata tableMetadata = metadata.getTableMetadata(session, tableHandle.get());
List<String> availableColumns = tableMetadata.getColumns().stream().map(ColumnMetadata::getName).collect(Collectors.toList());
for (Identifier column : createIndex.getColumnAliases()) {
if (!availableColumns.contains(column.getValue().toLowerCase(Locale.ROOT))) {
throw new SemanticException(MISSING_ATTRIBUTE, table, "Column '%s' cannot be resolved", column.getValue());
}
}
if (partitionColumn != null && !tableHandle.get().getConnectorHandle().isPartitionColumn(partitionColumn)) {
throw new SemanticException(NOT_SUPPORTED, table, "Heuristic index creation is only supported for predicates on partition columns");
}
} else {
throw new SemanticException(MISSING_ATTRIBUTE, table, "Table '%s' is invalid", tableFullName);
}
List<Pair<String, Type>> indexColumns = new LinkedList<>();
for (Identifier i : createIndex.getColumnAliases()) {
indexColumns.add(new Pair<>(i.toString(), UNKNOWN));
}
// For now, creating index for multiple columns is not supported
if (indexColumns.size() > 1) {
throw new SemanticException(NOT_SUPPORTED, table, "Multi-column indexes are currently not supported");
}
try {
// Use this place holder to check the existence of index and lock the place
Properties properties = new Properties();
properties.setProperty(INPROGRESS_PROPERTY_KEY, "TRUE");
CreateIndexMetadata placeHolder = new CreateIndexMetadata(createIndex.getIndexName().toString(), tableName, createIndex.getIndexType(), 0L, indexColumns, partitions, properties, session.getUser(), UNDEFINED);
synchronized (StatementAnalyzer.class) {
IndexClient.RecordStatus recordStatus = heuristicIndexerManager.getIndexClient().lookUpIndexRecord(placeHolder);
switch(recordStatus) {
case SAME_NAME:
throw new SemanticException(INDEX_ALREADY_EXISTS, createIndex, "Index '%s' already exists", createIndex.getIndexName().toString());
case SAME_CONTENT:
throw new SemanticException(INDEX_ALREADY_EXISTS, createIndex, "Index with same (table,column,indexType) already exists");
case SAME_INDEX_PART_CONFLICT:
throw new SemanticException(INDEX_ALREADY_EXISTS, createIndex, "Index with same (table,column,indexType) already exists and partition(s) contain conflicts");
case IN_PROGRESS_SAME_NAME:
throw new SemanticException(INDEX_ALREADY_EXISTS, createIndex, "Index '%s' is being created by another user. Check running queries for details. If there is no running query for this index, " + "the index may be in an unexpected error state and should be dropped using 'DROP INDEX %s'", createIndex.getIndexName().toString(), createIndex.getIndexName().toString());
case IN_PROGRESS_SAME_CONTENT:
throw new SemanticException(INDEX_ALREADY_EXISTS, createIndex, "Index with same (table,column,indexType) is being created by another user. Check running queries for details. " + "If there is no running query for this index, the index may be in an unexpected error state and should be dropped using 'DROP INDEX'");
case IN_PROGRESS_SAME_INDEX_PART_CONFLICT:
if (partitions.isEmpty()) {
throw new SemanticException(INDEX_ALREADY_EXISTS, createIndex, "Index with same (table,column,indexType) is being created by another user. Check running queries for details. " + "If there is no running query for this index, the index may be in an unexpected error state and should be dropped using 'DROP INDEX %s'", createIndex.getIndexName().toString());
}
// allow different queries to run with explicitly same partitions
case SAME_INDEX_PART_CAN_MERGE:
case IN_PROGRESS_SAME_INDEX_PART_CAN_MERGE:
break;
case NOT_FOUND:
heuristicIndexerManager.getIndexClient().addIndexRecord(placeHolder);
}
}
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
use of io.prestosql.sql.tree.Identifier in project hetu-core by openlookeng.
the class TestSetPathTask method testSetPath.
@Test
public void testSetPath() {
PathSpecification pathSpecification = new PathSpecification(Optional.empty(), ImmutableList.of(new PathElement(Optional.empty(), new Identifier("foo"))));
QueryStateMachine stateMachine = createQueryStateMachine("SET PATH foo");
executeSetPathTask(pathSpecification, stateMachine);
assertEquals(stateMachine.getSetPath(), "foo");
}
use of io.prestosql.sql.tree.Identifier in project hetu-core by openlookeng.
the class TestSetPathTask method testSetPathInvalidCatalog.
@Test(expectedExceptions = PrestoException.class, expectedExceptionsMessageRegExp = "Catalog does not exist: .*")
public void testSetPathInvalidCatalog() {
PathSpecification invalidPathSpecification = new PathSpecification(Optional.empty(), ImmutableList.of(new PathElement(Optional.of(new Identifier("invalidCatalog")), new Identifier("thisDoesNotMatter"))));
QueryStateMachine stateMachine = createQueryStateMachine("SET PATH invalidCatalog.thisDoesNotMatter");
executeSetPathTask(invalidPathSpecification, stateMachine);
}
use of io.prestosql.sql.tree.Identifier in project hetu-core by openlookeng.
the class HiveAstBuilder method visitCreateTableAsSelect.
@Override
public Node visitCreateTableAsSelect(HiveSqlParser.CreateTableAsSelectContext context) {
if (context.TEMPORARY() != null) {
addDiff(DiffType.UNSUPPORTED, context.TEMPORARY().getText(), "[TEMPORARY] is not supported");
throw unsupportedError(ErrorType.UNSUPPORTED_STATEMENT, "Unsupported statement: CREATE TEMPORARY TABLE", context);
}
Optional<String> comment = Optional.empty();
if (context.COMMENT() != null) {
comment = Optional.of(((StringLiteral) visit(context.string(0))).getValue());
}
Optional<List<Identifier>> columnAliases = Optional.empty();
if (context.columnAliases() != null) {
columnAliases = Optional.of(visit(context.columnAliases().identifier(), Identifier.class));
}
List<Property> properties = new ArrayList<>();
if (context.TRANSACTIONAL() != null) {
Identifier name = new Identifier("transactional");
Expression value = new Identifier("true");
properties.add(new Property(name, value));
addDiff(DiffType.MODIFIED, context.TRANSACTIONAL().getText(), "transactional = true", "[TRANSACTIONAL] is formatted");
}
if (context.STORED_AS() != null) {
Identifier name = new Identifier("format");
String storedAsString = ((Identifier) visit(context.stored_as)).getValue();
Expression value = new StringLiteral(getFileFormat(storedAsString));
properties.add(new Property(name, value));
addDiff(DiffType.MODIFIED, context.STORED_AS().getText(), FORMAT, "[STORED AS] is formatted");
}
if (context.LOCATION() != null) {
Identifier name = new Identifier("location");
Expression value = (StringLiteral) visit(context.location);
properties.add(new Property(name, value));
}
if (context.TBLPROPERTIES() != null) {
List<Property> tableProperties = visit(context.properties().property(), Property.class);
for (int i = 0; i < tableProperties.size(); i++) {
Property property = tableProperties.get(i);
if (property.getName().getValue().equalsIgnoreCase("transactional")) {
Identifier name = new Identifier("transactional");
Expression value = new Identifier(unquote(property.getValue().toString()));
properties.add(new Property(name, value));
addDiff(DiffType.MODIFIED, property.getName().getValue(), "transactional = ", "[TRANSACTIONAL] is formatted");
} else {
addDiff(DiffType.UNSUPPORTED, property.getName().getValue(), "[TBLPROPERTIES] has unsupported properties");
throw unsupportedError(ErrorType.UNSUPPORTED_STATEMENT, format("Unsupported attribute: %s", property.getName().getValue()), context.properties());
}
}
}
return new CreateTableAsSelect(getLocation(context), getQualifiedName(context.qualifiedName()), (Query) visit(context.query()), context.EXISTS() != null, properties, true, columnAliases, comment);
}
use of io.prestosql.sql.tree.Identifier in project hetu-core by openlookeng.
the class HiveAstBuilder method visitAddReplaceColumn.
@Override
public Node visitAddReplaceColumn(HiveSqlParser.AddReplaceColumnContext context) {
if (context.CASCADE() != null) {
addDiff(DiffType.UNSUPPORTED, context.CASCADE().getText(), "[CASCADE] is not supported");
throw unsupportedError(ErrorType.UNSUPPORTED_STATEMENT, "Unsupported statement: CASCADE", context);
}
if (context.REPLACE() != null) {
addDiff(DiffType.UNSUPPORTED, context.REPLACE().getText(), "[REPLACE] is not supported");
throw unsupportedError(ErrorType.UNSUPPORTED_STATEMENT, "Unsupported statement: REPLACE", context);
}
if (context.PARTITION() != null) {
addDiff(DiffType.UNSUPPORTED, context.PARTITION().getText(), "[PARTITION] is not supported");
throw unsupportedError(ErrorType.UNSUPPORTED_STATEMENT, "Unsupported statement: PARTITION", context.partition);
}
if (context.tableElement().size() > 1) {
addDiff(DiffType.UNSUPPORTED, context.ADD().getText(), "[ADD MULTIPLE COLUMNS] is not supported");
addDiff(DiffType.UNSUPPORTED, context.COLUMNS().getText(), null);
throw unsupportedError(ErrorType.UNSUPPORTED_STATEMENT, "Unsupported add multiple columns", context.tableElement(0));
}
HiveSqlParser.ColumnDefinitionContext columnDefinitionContext = context.tableElement(0).columnDefinition();
Identifier identifier = (Identifier) visit(columnDefinitionContext.identifier());
String type = getType(columnDefinitionContext.type());
Optional<String> comment = Optional.empty();
if (columnDefinitionContext.COMMENT() != null) {
comment = Optional.of(((StringLiteral) visit(columnDefinitionContext.string())).getValue());
}
ColumnDefinition columnDefinition = new ColumnDefinition(identifier, type, true, ImmutableList.of(), comment);
return new AddColumn(getLocation(context), getQualifiedName(context.qualifiedName()), columnDefinition);
}
Aggregations