use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.
the class AlterTableAddColumnPlan method bind.
@VisibleForTesting
public static BoundAddColumn bind(AnalyzedAlterTableAddColumn alterTable, CoordinatorTxnCtx txnCtx, NodeContext nodeCtx, Row params, SubQueryResults subQueryResults, FulltextAnalyzerResolver fulltextAnalyzerResolver) {
Function<? super Symbol, Object> eval = x -> SymbolEvaluator.evaluate(txnCtx, nodeCtx, x, params, subQueryResults);
DocTableInfo tableInfo = alterTable.tableInfo();
AnalyzedTableElements<Object> tableElements = alterTable.analyzedTableElements().map(eval);
for (AnalyzedColumnDefinition<Object> column : tableElements.columns()) {
ensureColumnLeafsAreNew(column, tableInfo);
}
addExistingPrimaryKeys(tableInfo, tableElements);
ensureNoIndexDefinitions(tableElements.columns());
addExistingCheckConstraints(tableInfo, tableElements);
// validate table elements
AnalyzedTableElements<Symbol> tableElementsUnboundWithExpressions = alterTable.analyzedTableElementsWithExpressions();
Settings tableSettings = AnalyzedTableElements.validateAndBuildSettings(tableElements, fulltextAnalyzerResolver);
Map<String, Object> mapping = AnalyzedTableElements.finalizeAndValidate(tableInfo.ident(), tableElementsUnboundWithExpressions, tableElements);
int numCurrentPks = tableInfo.primaryKey().size();
if (tableInfo.primaryKey().contains(DocSysColumns.ID)) {
numCurrentPks -= 1;
}
boolean hasNewPrimaryKeys = AnalyzedTableElements.primaryKeys(tableElements).size() > numCurrentPks;
boolean hasGeneratedColumns = tableElementsUnboundWithExpressions.hasGeneratedColumns();
return new BoundAddColumn(tableInfo, tableElements, tableSettings, mapping, hasNewPrimaryKeys, hasGeneratedColumns);
}
use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.
the class AlterTablePlan method bind.
public static BoundAlterTable bind(AnalyzedAlterTable analyzedAlterTable, CoordinatorTxnCtx txnCtx, NodeContext nodeCtx, Row params, SubQueryResults subQueryResults) {
Function<? super Symbol, Object> eval = x -> SymbolEvaluator.evaluate(txnCtx, nodeCtx, x, params, subQueryResults);
DocTableInfo docTableInfo = analyzedAlterTable.tableInfo();
AlterTable<Object> alterTable = analyzedAlterTable.alterTable().map(eval);
Table<Object> table = alterTable.table();
PartitionName partitionName = PartitionPropertiesAnalyzer.createPartitionName(table.partitionProperties(), docTableInfo);
TableParameters tableParameters = getTableParameterInfo(table, partitionName);
TableParameter tableParameter = getTableParameter(alterTable, tableParameters);
maybeRaiseBlockedException(docTableInfo, tableParameter.settings());
return new BoundAlterTable(docTableInfo, partitionName, tableParameter, table.excludePartitions(), docTableInfo.isPartitioned());
}
use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.
the class UpdateById method createExecutor.
private ShardRequestExecutor<ShardUpsertRequest> createExecutor(DependencyCarrier dependencies, PlannerContext plannerContext) {
ClusterService clusterService = dependencies.clusterService();
CoordinatorTxnCtx txnCtx = plannerContext.transactionContext();
ShardUpsertRequest.Builder requestBuilder = new ShardUpsertRequest.Builder(txnCtx.sessionSettings(), ShardingUpsertExecutor.BULK_REQUEST_TIMEOUT_SETTING.get(clusterService.state().metadata().settings()), ShardUpsertRequest.DuplicateKeyAction.UPDATE_OR_FAIL, true, assignments.targetNames(), // missing assignments are for INSERT .. ON DUPLICATE KEY UPDATE
null, returnValues, plannerContext.jobId(), false);
UpdateRequests updateRequests = new UpdateRequests(requestBuilder, table, assignments);
return new ShardRequestExecutor<>(clusterService, txnCtx, dependencies.nodeContext(), table, updateRequests, dependencies.transportActionProvider().transportShardUpsertAction()::execute, docKeys);
}
use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.
the class Session method singleExec.
@VisibleForTesting
CompletableFuture<?> singleExec(Portal portal, ResultReceiver<?> resultReceiver, int maxRows) {
var activeConsumer = portal.activeConsumer();
if (activeConsumer != null && activeConsumer.suspended()) {
activeConsumer.replaceResultReceiver(resultReceiver, maxRows);
activeConsumer.resume();
return resultReceiver.completionFuture();
}
var jobId = UUIDs.dirtyUUID();
var routingProvider = new RoutingProvider(Randomness.get().nextInt(), planner.getAwarenessAttributes());
var clusterState = executor.clusterService().state();
var txnCtx = new CoordinatorTxnCtx(sessionContext);
var nodeCtx = executor.nodeContext();
var params = new RowN(portal.params().toArray());
var plannerContext = new PlannerContext(clusterState, routingProvider, jobId, txnCtx, nodeCtx, maxRows, params);
var analyzedStmt = portal.analyzedStatement();
String rawStatement = portal.preparedStmt().rawStatement();
if (analyzedStmt == null) {
String errorMsg = "Statement must have been analyzed: " + rawStatement;
jobsLogs.logPreExecutionFailure(jobId, rawStatement, errorMsg, sessionContext.sessionUser());
throw new IllegalStateException(errorMsg);
}
Plan plan;
try {
plan = planner.plan(analyzedStmt, plannerContext);
} catch (Throwable t) {
jobsLogs.logPreExecutionFailure(jobId, rawStatement, SQLExceptions.messageOf(t), sessionContext.sessionUser());
throw t;
}
if (!analyzedStmt.isWriteOperation()) {
resultReceiver = new RetryOnFailureResultReceiver(executor.clusterService(), clusterState, indexName -> executor.clusterService().state().metadata().hasIndex(indexName), resultReceiver, jobId, (newJobId, resultRec) -> retryQuery(newJobId, analyzedStmt, routingProvider, new RowConsumerToResultReceiver(resultRec, maxRows, new JobsLogsUpdateListener(newJobId, jobsLogs)), params, txnCtx, nodeCtx));
}
jobsLogs.logExecutionStart(jobId, rawStatement, sessionContext.sessionUser(), StatementClassifier.classify(plan));
RowConsumerToResultReceiver consumer = new RowConsumerToResultReceiver(resultReceiver, maxRows, new JobsLogsUpdateListener(jobId, jobsLogs));
portal.setActiveConsumer(consumer);
plan.execute(executor, plannerContext, consumer, params, SubQueryResults.EMPTY);
return resultReceiver.completionFuture();
}
use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.
the class AlterTableAddColumnAnalyzer method analyze.
public AnalyzedAlterTableAddColumn analyze(AlterTableAddColumn<Expression> alterTable, ParamTypeHints paramTypeHints, CoordinatorTxnCtx txnCtx) {
if (!alterTable.table().partitionProperties().isEmpty()) {
throw new UnsupportedOperationException("Adding a column to a single partition is not supported");
}
DocTableInfo tableInfo = (DocTableInfo) schemas.resolveTableInfo(alterTable.table().getName(), Operation.ALTER, txnCtx.sessionContext().sessionUser(), txnCtx.sessionContext().searchPath());
TableReferenceResolver referenceResolver = new TableReferenceResolver(tableInfo.columns(), tableInfo.ident());
var exprAnalyzerWithReferenceResolver = new ExpressionAnalyzer(txnCtx, nodeCtx, paramTypeHints, referenceResolver, null);
var exprAnalyzerWithFieldsAsString = new ExpressionAnalyzer(txnCtx, nodeCtx, paramTypeHints, FieldProvider.FIELDS_AS_LITERAL, null);
var exprCtx = new ExpressionAnalysisContext(txnCtx.sessionContext());
AddColumnDefinition<Expression> tableElement = alterTable.tableElement();
// convert and validate the column name
ExpressionToColumnIdentVisitor.convert(tableElement.name());
// 1st phase, exclude check constraints (their expressions contain column references) and generated expressions
AddColumnDefinition<Symbol> addColumnDefinition = new AddColumnDefinition<>(exprAnalyzerWithFieldsAsString.convert(tableElement.name(), exprCtx), // expression must be mapped later on using mapExpressions()
null, tableElement.type() == null ? null : tableElement.type().map(y -> exprAnalyzerWithFieldsAsString.convert(y, exprCtx)), tableElement.constraints().stream().filter(c -> false == c instanceof CheckColumnConstraint).map(x -> x.map(y -> exprAnalyzerWithFieldsAsString.convert(y, exprCtx))).collect(Collectors.toList()), false, tableElement.generatedExpression() != null);
AnalyzedTableElements<Symbol> analyzedTableElements = TableElementsAnalyzer.analyze(singletonList(addColumnDefinition), tableInfo.ident(), tableInfo);
// 2nd phase, analyze possible generated expressions
AddColumnDefinition<Symbol> addColumnDefinitionWithExpression = (AddColumnDefinition<Symbol>) tableElement.mapExpressions(addColumnDefinition, x -> exprAnalyzerWithReferenceResolver.convert(x, exprCtx));
AnalyzedTableElements<Symbol> analyzedTableElementsWithExpressions = TableElementsAnalyzer.analyze(singletonList(addColumnDefinitionWithExpression), tableInfo.ident(), tableInfo);
// now analyze possible check expressions
var checkColumnConstraintsAnalyzer = new ExpressionAnalyzer(txnCtx, nodeCtx, paramTypeHints, new SelfReferenceFieldProvider(tableInfo.ident(), referenceResolver, analyzedTableElements.columns()), null);
tableElement.constraints().stream().filter(CheckColumnConstraint.class::isInstance).map(x -> x.map(y -> checkColumnConstraintsAnalyzer.convert(y, exprCtx))).forEach(c -> {
CheckColumnConstraint<Symbol> check = (CheckColumnConstraint<Symbol>) c;
analyzedTableElements.addCheckColumnConstraint(tableInfo.ident(), check);
analyzedTableElementsWithExpressions.addCheckColumnConstraint(tableInfo.ident(), check);
});
return new AnalyzedAlterTableAddColumn(tableInfo, analyzedTableElements, analyzedTableElementsWithExpressions);
}
Aggregations