use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.
the class WhereClauseAnalyzer method tieBreakPartitionQueries.
@Nullable
private static PartitionResult tieBreakPartitionQueries(EvaluatingNormalizer normalizer, Map<Symbol, List<Literal>> queryPartitionMap, CoordinatorTxnCtx coordinatorTxnCtx) throws UnsupportedOperationException {
/*
* Got multiple normalized queries which all could match.
* This might be the case if one partition resolved to null
*
* e.g.
*
* p = 1 and x = 2
*
* might lead to
*
* null and x = 2
* true and x = 2
*
* At this point it is unknown if they really match.
* In order to figure out if they could potentially match all conditions involving references are now set to true
*
* null and true -> can't match
* true and true -> can match, can use this query + partition
*
* If there is still more than 1 query that can match it's not possible to execute the query :(
*/
List<Tuple<Symbol, List<Literal>>> canMatch = new ArrayList<>();
for (Map.Entry<Symbol, List<Literal>> entry : queryPartitionMap.entrySet()) {
Symbol query = entry.getKey();
List<Literal> partitions = entry.getValue();
Symbol normalized = normalizer.normalize(ScalarsAndRefsToTrue.rewrite(query), coordinatorTxnCtx);
assert normalized instanceof Literal : "after normalization and replacing all reference occurrences with true there must only be a literal left";
Object value = ((Literal) normalized).value();
if (value != null && (Boolean) value) {
canMatch.add(new Tuple<>(query, partitions));
}
}
if (canMatch.size() == 1) {
Tuple<Symbol, List<Literal>> symbolListTuple = canMatch.get(0);
return new PartitionResult(symbolListTuple.v1(), Lists2.map(symbolListTuple.v2(), literal -> nullOrString(literal.value())));
}
return null;
}
use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.
the class ExpressionAnalyzerTest method testNonDeterministicFunctionsAlwaysNew.
@Test
public void testNonDeterministicFunctionsAlwaysNew() throws Exception {
CoordinatorTxnCtx txnCtx = CoordinatorTxnCtx.systemTransactionContext();
ExpressionAnalysisContext localContext = new ExpressionAnalysisContext(txnCtx.sessionContext());
String functionName = CoalesceFunction.NAME;
Symbol fn1 = ExpressionAnalyzer.allocateFunction(functionName, List.of(Literal.BOOLEAN_FALSE), null, localContext, txnCtx, expressions.nodeCtx);
Symbol fn2 = ExpressionAnalyzer.allocateFunction(functionName, List.of(Literal.BOOLEAN_FALSE), null, localContext, txnCtx, expressions.nodeCtx);
Symbol fn3 = ExpressionAnalyzer.allocateFunction(functionName, List.of(Literal.BOOLEAN_TRUE), null, localContext, txnCtx, expressions.nodeCtx);
// different instances
assertThat(fn1, allOf(not(sameInstance(fn2)), not(sameInstance(fn3))));
// but equal
assertThat(fn1, is(equalTo(fn2)));
assertThat(fn1, is(not(equalTo(fn3))));
}
use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.
the class Session method quickExec.
/**
* Execute a query in one step, avoiding the parse/bind/execute/sync procedure.
* Opposed to using parse/bind/execute/sync this method is thread-safe.
*
* @param parse A function to parse the statement; This can be used to cache the parsed statement.
* Use {@link #quickExec(String, ResultReceiver, Row)} to use the regular parser
*/
public void quickExec(String statement, Function<String, Statement> parse, ResultReceiver<?> resultReceiver, Row params) {
CoordinatorTxnCtx txnCtx = new CoordinatorTxnCtx(sessionContext);
Statement parsedStmt = parse.apply(statement);
AnalyzedStatement analyzedStatement = analyzer.analyze(parsedStmt, sessionContext, ParamTypeHints.EMPTY);
RoutingProvider routingProvider = new RoutingProvider(Randomness.get().nextInt(), planner.getAwarenessAttributes());
UUID jobId = UUIDs.dirtyUUID();
ClusterState clusterState = planner.currentClusterState();
PlannerContext plannerContext = new PlannerContext(clusterState, routingProvider, jobId, txnCtx, nodeCtx, 0, params);
Plan plan;
try {
plan = planner.plan(analyzedStatement, plannerContext);
} catch (Throwable t) {
jobsLogs.logPreExecutionFailure(jobId, statement, SQLExceptions.messageOf(t), sessionContext.sessionUser());
throw t;
}
StatementClassifier.Classification classification = StatementClassifier.classify(plan);
jobsLogs.logExecutionStart(jobId, statement, sessionContext.sessionUser(), classification);
JobsLogsUpdateListener jobsLogsUpdateListener = new JobsLogsUpdateListener(jobId, jobsLogs);
if (!analyzedStatement.isWriteOperation()) {
resultReceiver = new RetryOnFailureResultReceiver(executor.clusterService(), clusterState, // clusterState at the time of the index check is used
indexName -> clusterState.metadata().hasIndex(indexName), resultReceiver, jobId, (newJobId, retryResultReceiver) -> retryQuery(newJobId, analyzedStatement, routingProvider, new RowConsumerToResultReceiver(retryResultReceiver, 0, jobsLogsUpdateListener), params, txnCtx, nodeCtx));
}
RowConsumerToResultReceiver consumer = new RowConsumerToResultReceiver(resultReceiver, 0, jobsLogsUpdateListener);
plan.execute(executor, plannerContext, consumer, params, SubQueryResults.EMPTY);
}
use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.
the class Session method bulkExec.
private CompletableFuture<?> bulkExec(Statement statement, List<DeferredExecution> toExec) {
assert toExec.size() >= 1 : "Must have at least 1 deferred execution for bulk exec";
var jobId = UUIDs.dirtyUUID();
var routingProvider = new RoutingProvider(Randomness.get().nextInt(), planner.getAwarenessAttributes());
var clusterState = executor.clusterService().state();
var txnCtx = new CoordinatorTxnCtx(sessionContext);
var plannerContext = new PlannerContext(clusterState, routingProvider, jobId, txnCtx, nodeCtx, 0, null);
PreparedStmt firstPreparedStatement = toExec.get(0).portal().preparedStmt();
AnalyzedStatement analyzedStatement = firstPreparedStatement.analyzedStatement();
Plan plan;
try {
plan = planner.plan(analyzedStatement, plannerContext);
} catch (Throwable t) {
jobsLogs.logPreExecutionFailure(jobId, firstPreparedStatement.rawStatement(), SQLExceptions.messageOf(t), sessionContext.sessionUser());
throw t;
}
jobsLogs.logExecutionStart(jobId, firstPreparedStatement.rawStatement(), sessionContext.sessionUser(), StatementClassifier.classify(plan));
var bulkArgs = Lists2.map(toExec, x -> (Row) new RowN(x.portal().params().toArray()));
List<CompletableFuture<Long>> rowCounts = plan.executeBulk(executor, plannerContext, bulkArgs, SubQueryResults.EMPTY);
CompletableFuture<Void> allRowCounts = CompletableFuture.allOf(rowCounts.toArray(new CompletableFuture[0]));
List<CompletableFuture<?>> resultReceiverFutures = Lists2.map(toExec, x -> x.resultReceiver().completionFuture());
CompletableFuture<Void> allResultReceivers = CompletableFuture.allOf(resultReceiverFutures.toArray(new CompletableFuture[0]));
return allRowCounts.exceptionally(// swallow exception - failures are set per item in emitResults
t -> null).thenAccept(ignored -> emitRowCountsToResultReceivers(jobId, jobsLogs, toExec, rowCounts)).runAfterBoth(allResultReceivers, () -> {
});
}
use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.
the class CreateTableStatementAnalyzer method analyze.
public AnalyzedCreateTable analyze(CreateTable<Expression> createTable, ParamTypeHints paramTypeHints, CoordinatorTxnCtx txnCtx) {
RelationName relationName = RelationName.of(createTable.name().getName(), txnCtx.sessionContext().searchPath().currentSchema());
relationName.ensureValidForRelationCreation();
var exprAnalyzerWithoutFields = new ExpressionAnalyzer(txnCtx, nodeCtx, paramTypeHints, FieldProvider.UNSUPPORTED, null);
var exprAnalyzerWithFieldsAsString = new ExpressionAnalyzer(txnCtx, nodeCtx, paramTypeHints, FieldProvider.FIELDS_AS_LITERAL, null);
var exprCtx = new ExpressionAnalysisContext(txnCtx.sessionContext());
Function<Expression, Symbol> exprMapper = y -> exprAnalyzerWithFieldsAsString.convert(y, exprCtx);
// 1st phase, map and analyze everything EXCEPT:
// - check constraints defined at any level (table or column)
// - generated expressions
// - default expressions
Map<TableElement<Symbol>, TableElement<Expression>> analyzed = new LinkedHashMap<>();
List<CheckConstraint<Expression>> checkConstraints = new ArrayList<>();
for (int i = 0; i < createTable.tableElements().size(); i++) {
TableElement<Expression> te = createTable.tableElements().get(i);
if (te instanceof CheckConstraint) {
checkConstraints.add((CheckConstraint<Expression>) te);
continue;
}
TableElement<Symbol> analyzedTe = null;
if (te instanceof ColumnDefinition) {
ColumnDefinition<Expression> def = (ColumnDefinition<Expression>) te;
List<ColumnConstraint<Symbol>> analyzedColumnConstraints = new ArrayList<>();
for (int j = 0; j < def.constraints().size(); j++) {
ColumnConstraint<Expression> cc = def.constraints().get(j);
if (cc instanceof CheckColumnConstraint) {
// Re-frame the column check constraint as a table check constraint
CheckColumnConstraint<Expression> columnCheck = (CheckColumnConstraint<Expression>) cc;
checkConstraints.add(new CheckConstraint<>(columnCheck.name(), def.ident(), columnCheck.expression(), columnCheck.expressionStr()));
continue;
}
analyzedColumnConstraints.add(cc.map(exprMapper));
}
analyzedTe = new ColumnDefinition<>(def.ident(), null, null, def.type() == null ? null : def.type().map(exprMapper), analyzedColumnConstraints, false, def.isGenerated());
}
analyzed.put(analyzedTe == null ? te.map(exprMapper) : analyzedTe, te);
}
CreateTable<Symbol> analyzedCreateTable = new CreateTable<>(createTable.name().map(exprMapper), new ArrayList<>(analyzed.keySet()), createTable.partitionedBy().map(x -> x.map(exprMapper)), createTable.clusteredBy().map(x -> x.map(exprMapper)), createTable.properties().map(x -> exprAnalyzerWithoutFields.convert(x, exprCtx)), createTable.ifNotExists());
AnalyzedTableElements<Symbol> analyzedTableElements = TableElementsAnalyzer.analyze(analyzedCreateTable.tableElements(), relationName, null);
// 2nd phase, analyze and map with a reference resolver:
// - generated/default expressions
// - check constraints
TableReferenceResolver referenceResolver = analyzedTableElements.referenceResolver(relationName);
var exprAnalyzerWithReferences = new ExpressionAnalyzer(txnCtx, nodeCtx, paramTypeHints, referenceResolver, null);
List<TableElement<Symbol>> tableElementsWithExpressions = new ArrayList<>();
for (int i = 0; i < analyzedCreateTable.tableElements().size(); i++) {
TableElement<Symbol> elementSymbol = analyzedCreateTable.tableElements().get(i);
TableElement<Expression> elementExpression = analyzed.get(elementSymbol);
tableElementsWithExpressions.add(elementExpression.mapExpressions(elementSymbol, x -> {
Symbol symbol = exprAnalyzerWithReferences.convert(x, exprCtx);
EnsureNoMatchPredicate.ensureNoMatchPredicate(symbol, "Cannot use MATCH in CREATE TABLE statements");
return symbol;
}));
}
checkConstraints.stream().map(x -> x.map(y -> exprAnalyzerWithReferences.convert(y, exprCtx))).forEach(te -> {
analyzedCreateTable.tableElements().add(te);
tableElementsWithExpressions.add(te);
analyzedTableElements.addCheckConstraint(relationName, (CheckConstraint<Symbol>) te);
});
AnalyzedTableElements<Symbol> analyzedTableElementsWithExpressions = TableElementsAnalyzer.analyze(tableElementsWithExpressions, relationName, null, false);
return new AnalyzedCreateTable(relationName, analyzedCreateTable, analyzedTableElements, analyzedTableElementsWithExpressions);
}
Aggregations