use of io.crate.sql.tree.CreateTable in project crate by crate.
the class TestStatementBuilder method printStatement.
private static void printStatement(String sql) {
println(sql.trim());
println("");
Statement statement = SqlParser.createStatement(sql);
println(statement.toString());
println("");
// TODO: support formatting all statement types
if (statement instanceof Query || statement instanceof CreateTable || statement instanceof CopyFrom || statement instanceof SwapTable || statement instanceof GCDanglingArtifacts || statement instanceof CreateFunction || statement instanceof CreateUser || statement instanceof GrantPrivilege || statement instanceof DenyPrivilege || statement instanceof RevokePrivilege || statement instanceof DropUser || statement instanceof DropAnalyzer || statement instanceof DropFunction || statement instanceof DropTable || statement instanceof DropBlobTable || statement instanceof DropView || statement instanceof DropRepository || statement instanceof DropSnapshot || statement instanceof Update || statement instanceof Insert || statement instanceof SetSessionAuthorizationStatement || statement instanceof Window) {
println(SqlFormatter.formatSql(statement));
println("");
assertFormattedSql(statement);
}
println("=".repeat(60));
println("");
}
use of io.crate.sql.tree.CreateTable in project crate by crate.
the class CreateTableStatementAnalyzer method analyze.
public AnalyzedCreateTable analyze(CreateTable<Expression> createTable, ParamTypeHints paramTypeHints, CoordinatorTxnCtx txnCtx) {
RelationName relationName = RelationName.of(createTable.name().getName(), txnCtx.sessionContext().searchPath().currentSchema());
relationName.ensureValidForRelationCreation();
var exprAnalyzerWithoutFields = new ExpressionAnalyzer(txnCtx, nodeCtx, paramTypeHints, FieldProvider.UNSUPPORTED, null);
var exprAnalyzerWithFieldsAsString = new ExpressionAnalyzer(txnCtx, nodeCtx, paramTypeHints, FieldProvider.FIELDS_AS_LITERAL, null);
var exprCtx = new ExpressionAnalysisContext(txnCtx.sessionContext());
Function<Expression, Symbol> exprMapper = y -> exprAnalyzerWithFieldsAsString.convert(y, exprCtx);
// 1st phase, map and analyze everything EXCEPT:
// - check constraints defined at any level (table or column)
// - generated expressions
// - default expressions
Map<TableElement<Symbol>, TableElement<Expression>> analyzed = new LinkedHashMap<>();
List<CheckConstraint<Expression>> checkConstraints = new ArrayList<>();
for (int i = 0; i < createTable.tableElements().size(); i++) {
TableElement<Expression> te = createTable.tableElements().get(i);
if (te instanceof CheckConstraint) {
checkConstraints.add((CheckConstraint<Expression>) te);
continue;
}
TableElement<Symbol> analyzedTe = null;
if (te instanceof ColumnDefinition) {
ColumnDefinition<Expression> def = (ColumnDefinition<Expression>) te;
List<ColumnConstraint<Symbol>> analyzedColumnConstraints = new ArrayList<>();
for (int j = 0; j < def.constraints().size(); j++) {
ColumnConstraint<Expression> cc = def.constraints().get(j);
if (cc instanceof CheckColumnConstraint) {
// Re-frame the column check constraint as a table check constraint
CheckColumnConstraint<Expression> columnCheck = (CheckColumnConstraint<Expression>) cc;
checkConstraints.add(new CheckConstraint<>(columnCheck.name(), def.ident(), columnCheck.expression(), columnCheck.expressionStr()));
continue;
}
analyzedColumnConstraints.add(cc.map(exprMapper));
}
analyzedTe = new ColumnDefinition<>(def.ident(), null, null, def.type() == null ? null : def.type().map(exprMapper), analyzedColumnConstraints, false, def.isGenerated());
}
analyzed.put(analyzedTe == null ? te.map(exprMapper) : analyzedTe, te);
}
CreateTable<Symbol> analyzedCreateTable = new CreateTable<>(createTable.name().map(exprMapper), new ArrayList<>(analyzed.keySet()), createTable.partitionedBy().map(x -> x.map(exprMapper)), createTable.clusteredBy().map(x -> x.map(exprMapper)), createTable.properties().map(x -> exprAnalyzerWithoutFields.convert(x, exprCtx)), createTable.ifNotExists());
AnalyzedTableElements<Symbol> analyzedTableElements = TableElementsAnalyzer.analyze(analyzedCreateTable.tableElements(), relationName, null);
// 2nd phase, analyze and map with a reference resolver:
// - generated/default expressions
// - check constraints
TableReferenceResolver referenceResolver = analyzedTableElements.referenceResolver(relationName);
var exprAnalyzerWithReferences = new ExpressionAnalyzer(txnCtx, nodeCtx, paramTypeHints, referenceResolver, null);
List<TableElement<Symbol>> tableElementsWithExpressions = new ArrayList<>();
for (int i = 0; i < analyzedCreateTable.tableElements().size(); i++) {
TableElement<Symbol> elementSymbol = analyzedCreateTable.tableElements().get(i);
TableElement<Expression> elementExpression = analyzed.get(elementSymbol);
tableElementsWithExpressions.add(elementExpression.mapExpressions(elementSymbol, x -> {
Symbol symbol = exprAnalyzerWithReferences.convert(x, exprCtx);
EnsureNoMatchPredicate.ensureNoMatchPredicate(symbol, "Cannot use MATCH in CREATE TABLE statements");
return symbol;
}));
}
checkConstraints.stream().map(x -> x.map(y -> exprAnalyzerWithReferences.convert(y, exprCtx))).forEach(te -> {
analyzedCreateTable.tableElements().add(te);
tableElementsWithExpressions.add(te);
analyzedTableElements.addCheckConstraint(relationName, (CheckConstraint<Symbol>) te);
});
AnalyzedTableElements<Symbol> analyzedTableElementsWithExpressions = TableElementsAnalyzer.analyze(tableElementsWithExpressions, relationName, null, false);
return new AnalyzedCreateTable(relationName, analyzedCreateTable, analyzedTableElements, analyzedTableElementsWithExpressions);
}
use of io.crate.sql.tree.CreateTable in project crate by crate.
the class CreateTableAsAnalyzer method analyze.
public AnalyzedCreateTableAs analyze(CreateTableAs createTableAs, ParamTypeHints paramTypeHints, CoordinatorTxnCtx txnCtx) {
RelationName relationName = RelationName.of(createTableAs.name().getName(), txnCtx.sessionContext().searchPath().currentSchema());
relationName.ensureValidForRelationCreation();
AnalyzedRelation analyzedSourceQuery = relationAnalyzer.analyze(createTableAs.query(), new StatementAnalysisContext(paramTypeHints, Operation.READ, txnCtx));
List<TableElement<Expression>> tableElements = Lists2.map(analyzedSourceQuery.outputs(), Symbols::toColumnDefinition);
CreateTable<Expression> createTable = new CreateTable<Expression>(createTableAs.name(), tableElements, Optional.empty(), Optional.empty(), GenericProperties.empty(), false);
// This is only a preliminary analysis to to have the source available for privilege checks.
// It will be analyzed again with the target columns from the target table once
// the table has been created.
AnalyzedRelation sourceRelation = relationAnalyzer.analyze(createTableAs.query(), new StatementAnalysisContext(paramTypeHints, Operation.READ, txnCtx));
// postponing the analysis of the insert statement, since the table has not been created yet.
Supplier<AnalyzedInsertStatement> postponedInsertAnalysis = () -> {
Insert<Expression> insert = new Insert<Expression>(createTableAs.name(), createTableAs.query(), Collections.emptyList(), Collections.emptyList(), Insert.DuplicateKeyContext.none());
return insertAnalyzer.analyze(insert, paramTypeHints, txnCtx);
};
return new AnalyzedCreateTableAs(createTableStatementAnalyzer.analyze(createTable, paramTypeHints, txnCtx), sourceRelation, postponedInsertAnalysis);
}
use of io.crate.sql.tree.CreateTable in project crate by crate.
the class CreateTablePlan method bind.
@VisibleForTesting
public static BoundCreateTable bind(AnalyzedCreateTable createTable, CoordinatorTxnCtx txnCtx, NodeContext nodeCtx, Row params, SubQueryResults subQueryResults, NumberOfShards numberOfShards, Schemas schemas, FulltextAnalyzerResolver fulltextAnalyzerResolver) {
Function<? super Symbol, Object> eval = x -> SymbolEvaluator.evaluate(txnCtx, nodeCtx, x, params, subQueryResults);
CreateTable<Symbol> table = createTable.createTable();
RelationName relationName = createTable.relationName();
GenericProperties<Object> properties = table.properties().map(eval);
AnalyzedTableElements<Object> tableElements = createTable.analyzedTableElements().map(eval);
TableParameter tableParameter = new TableParameter();
Optional<ClusteredBy<Object>> mappedClusteredBy = table.clusteredBy().map(x -> x.map(eval));
Integer numShards = mappedClusteredBy.flatMap(ClusteredBy::numberOfShards).map(numberOfShards::fromNumberOfShards).orElseGet(numberOfShards::defaultNumberOfShards);
tableParameter.settingsBuilder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards);
// apply default in case it is not specified in the properties,
// if it is it will get overwritten afterwards.
TablePropertiesAnalyzer.analyzeWithBoundValues(tableParameter, TableParameters.TABLE_CREATE_PARAMETER_INFO, properties, true);
AnalyzedTableElements<Symbol> tableElementsWithExpressions = createTable.analyzedTableElementsWithExpressions().map(x -> SubQueryAndParamBinder.convert(x, params, subQueryResults));
// validate table elements
AnalyzedTableElements.finalizeAndValidate(relationName, tableElementsWithExpressions, tableElements);
// update table settings
Settings tableSettings = AnalyzedTableElements.validateAndBuildSettings(tableElements, fulltextAnalyzerResolver);
tableParameter.settingsBuilder().put(tableSettings);
ColumnIdent routingColumn = mappedClusteredBy.map(clusteredBy -> resolveRoutingFromClusteredBy(clusteredBy, tableElements)).orElse(null);
Optional<PartitionedBy<Object>> partitionedByOptional = table.partitionedBy().map(x -> x.map(eval));
partitionedByOptional.ifPresent(partitionedBy -> processPartitionedBy(partitionedByOptional.get(), tableElements, relationName, routingColumn));
return new BoundCreateTable(relationName, tableElements, tableParameter, routingColumn, table.ifNotExists(), schemas);
}
use of io.crate.sql.tree.CreateTable in project crate by crate.
the class DocIndexMetadataTest method getDocIndexMetadataFromStatement.
private DocIndexMetadata getDocIndexMetadataFromStatement(String stmt) throws IOException {
Statement statement = SqlParser.createStatement(stmt);
DocTableInfoFactory docTableInfoFactory = new InternalDocTableInfoFactory(nodeCtx, new IndexNameExpressionResolver());
ViewInfoFactory viewInfoFactory = (ident, state) -> null;
DocSchemaInfo docSchemaInfo = new DocSchemaInfo(Schemas.DOC_SCHEMA_NAME, clusterService, nodeCtx, udfService, viewInfoFactory, docTableInfoFactory);
Path homeDir = createTempDir();
Schemas schemas = new Schemas(Map.of("doc", docSchemaInfo), clusterService, new DocSchemaInfoFactory(docTableInfoFactory, viewInfoFactory, nodeCtx, udfService));
FulltextAnalyzerResolver fulltextAnalyzerResolver = new FulltextAnalyzerResolver(clusterService, new AnalysisRegistry(new Environment(Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), homeDir.toString()).build(), homeDir.resolve("config")), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap(), emptyMap()));
CreateTableStatementAnalyzer analyzer = new CreateTableStatementAnalyzer(nodeCtx);
Analysis analysis = new Analysis(new CoordinatorTxnCtx(SessionContext.systemSessionContext()), ParamTypeHints.EMPTY);
CoordinatorTxnCtx txnCtx = new CoordinatorTxnCtx(SessionContext.systemSessionContext());
AnalyzedCreateTable analyzedCreateTable = analyzer.analyze((CreateTable<Expression>) statement, analysis.paramTypeHints(), analysis.transactionContext());
BoundCreateTable analyzedStatement = CreateTablePlan.bind(analyzedCreateTable, txnCtx, nodeCtx, Row.EMPTY, SubQueryResults.EMPTY, new NumberOfShards(clusterService), schemas, fulltextAnalyzerResolver);
Settings.Builder settingsBuilder = Settings.builder().put("index.number_of_shards", 1).put("index.number_of_replicas", 0).put("index.version.created", org.elasticsearch.Version.CURRENT).put(analyzedStatement.tableParameter().settings());
IndexMetadata indexMetadata = IndexMetadata.builder(analyzedStatement.tableIdent().name()).settings(settingsBuilder).putMapping(new MappingMetadata(Constants.DEFAULT_MAPPING_TYPE, analyzedStatement.mapping())).build();
return newMeta(indexMetadata, analyzedStatement.tableIdent().name());
}
Aggregations