use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.
the class CopyAnalyzer method analyzeCopyTo.
AnalyzedCopyTo analyzeCopyTo(CopyTo<Expression> node, ParamTypeHints paramTypeHints, CoordinatorTxnCtx txnCtx) {
if (!node.directoryUri()) {
throw new UnsupportedOperationException("Using COPY TO without specifying a DIRECTORY is not supported");
}
TableInfo tableInfo = schemas.resolveTableInfo(node.table().getName(), Operation.COPY_TO, txnCtx.sessionContext().sessionUser(), txnCtx.sessionContext().searchPath());
Operation.blockedRaiseException(tableInfo, Operation.READ);
DocTableRelation tableRelation = new DocTableRelation((DocTableInfo) tableInfo);
EvaluatingNormalizer normalizer = new EvaluatingNormalizer(nodeCtx, RowGranularity.CLUSTER, null, tableRelation);
var exprCtx = new ExpressionAnalysisContext(txnCtx.sessionContext());
var expressionAnalyzer = new ExpressionAnalyzer(txnCtx, nodeCtx, paramTypeHints, new NameFieldProvider(tableRelation), null);
var exprAnalyzerWithFieldsAsString = new ExpressionAnalyzer(txnCtx, nodeCtx, paramTypeHints, FieldProvider.FIELDS_AS_LITERAL, null);
var uri = expressionAnalyzer.convert(node.targetUri(), exprCtx);
var table = node.table().map(x -> exprAnalyzerWithFieldsAsString.convert(x, exprCtx));
var properties = node.properties().map(x -> expressionAnalyzer.convert(x, exprCtx));
var columns = Lists2.map(node.columns(), c -> normalizer.normalize(expressionAnalyzer.convert(c, exprCtx), txnCtx));
var whereClause = node.whereClause().map(w -> normalizer.normalize(expressionAnalyzer.convert(w, exprCtx), txnCtx)).orElse(null);
return new AnalyzedCopyTo(tableInfo, table, normalizer.normalize(uri, txnCtx), properties, columns, whereClause);
}
use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.
the class CreateAnalyzerPlan method createRequest.
@VisibleForTesting
public static ClusterUpdateSettingsRequest createRequest(AnalyzedCreateAnalyzer createAnalyzer, CoordinatorTxnCtx txnCtx, NodeContext nodeCtx, Row parameters, SubQueryResults subQueryResults, FulltextAnalyzerResolver ftResolver) {
Function<? super Symbol, Object> eval = x -> SymbolEvaluator.evaluate(txnCtx, nodeCtx, x, parameters, subQueryResults);
var analyzerIdent = createAnalyzer.ident();
var tokenizer = bindTokenizer(createAnalyzer.tokenizer(), analyzerIdent, eval, ftResolver);
var tokenFilters = bindTokenFilters(createAnalyzer.tokenFilters(), analyzerIdent, eval, ftResolver);
var charFilters = bindCharFilters(createAnalyzer.charFilters(), analyzerIdent, eval, ftResolver);
var genericAnalyzerSettings = bindGenericAnalyzerProperties(createAnalyzer.genericAnalyzerProperties(), analyzerIdent, eval);
var analyzerSettings = buildAnalyzerSettings(analyzerIdent, createAnalyzer.extendedAnalyzerName(), tokenizer, tokenFilters, charFilters, genericAnalyzerSettings, ftResolver);
Settings.Builder encodedSettingsBuilder = Settings.builder();
encodedSettingsBuilder.put(ANALYZER.buildSettingName(analyzerIdent), encodeSettings(analyzerSettings).utf8ToString());
if (tokenizer != null && !tokenizer.v2().isEmpty()) {
encodedSettingsBuilder.put(TOKENIZER.buildSettingName(tokenizer.v1()), encodeSettings(tokenizer.v2()).utf8ToString());
}
for (Map.Entry<String, Settings> tokenFilter : tokenFilters.entrySet()) {
if (!tokenFilter.getValue().isEmpty()) {
encodedSettingsBuilder.put(TOKEN_FILTER.buildSettingName(tokenFilter.getKey()), encodeSettings(tokenFilter.getValue()).utf8ToString());
}
}
for (Map.Entry<String, Settings> charFilter : charFilters.entrySet()) {
if (!charFilter.getValue().isEmpty()) {
encodedSettingsBuilder.put(CHAR_FILTER.buildSettingName(charFilter.getKey()), encodeSettings(charFilter.getValue()).utf8ToString());
}
}
return new ClusterUpdateSettingsRequest().persistentSettings(encodedSettingsBuilder.build());
}
use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.
the class CreateSnapshotPlan method createRequest.
@VisibleForTesting
public static CreateSnapshotRequest createRequest(AnalyzedCreateSnapshot createSnapshot, CoordinatorTxnCtx txnCtx, NodeContext nodeCtx, Row parameters, SubQueryResults subQueryResults, Schemas schemas) {
Function<? super Symbol, Object> eval = x -> SymbolEvaluator.evaluate(txnCtx, nodeCtx, x, parameters, subQueryResults);
Settings settings = GenericPropertiesConverter.genericPropertiesToSettings(createSnapshot.properties().map(eval), SnapshotSettings.SETTINGS);
boolean ignoreUnavailable = IGNORE_UNAVAILABLE.get(settings);
final HashSet<String> snapshotIndices;
final HashSet<String> templates = new HashSet<>();
if (createSnapshot.tables().isEmpty()) {
for (SchemaInfo schemaInfo : schemas) {
for (TableInfo tableInfo : schemaInfo.getTables()) {
// only check for user generated tables
if (tableInfo instanceof DocTableInfo) {
Operation.blockedRaiseException(tableInfo, Operation.READ);
}
}
}
snapshotIndices = new HashSet<>(AnalyzedCreateSnapshot.ALL_INDICES);
} else {
snapshotIndices = new HashSet<>(createSnapshot.tables().size());
for (Table<Symbol> table : createSnapshot.tables()) {
DocTableInfo docTableInfo;
try {
docTableInfo = (DocTableInfo) schemas.resolveTableInfo(table.getName(), Operation.CREATE_SNAPSHOT, txnCtx.sessionContext().sessionUser(), txnCtx.sessionContext().searchPath());
} catch (Exception e) {
if (ignoreUnavailable && e instanceof ResourceUnknownException) {
LOGGER.info("Ignore unknown relation '{}' for the '{}' snapshot'", table.getName(), createSnapshot.snapshot());
continue;
} else {
throw e;
}
}
if (docTableInfo.isPartitioned()) {
templates.add(PartitionName.templateName(docTableInfo.ident().schema(), docTableInfo.ident().name()));
}
if (table.partitionProperties().isEmpty()) {
snapshotIndices.addAll(Arrays.asList(docTableInfo.concreteIndices()));
} else {
var partitionName = toPartitionName(docTableInfo, Lists2.map(table.partitionProperties(), x -> x.map(eval)));
if (!docTableInfo.partitions().contains(partitionName)) {
if (!ignoreUnavailable) {
throw new PartitionUnknownException(partitionName);
} else {
LOGGER.info("ignoring unknown partition of table '{}' with ident '{}'", partitionName.relationName(), partitionName.ident());
}
} else {
snapshotIndices.add(partitionName.asIndexName());
}
}
}
}
return new CreateSnapshotRequest(createSnapshot.snapshot().getRepository(), createSnapshot.snapshot().getSnapshotId().getName()).includeGlobalState(createSnapshot.tables().isEmpty()).waitForCompletion(WAIT_FOR_COMPLETION.get(settings)).indices(snapshotIndices.toArray(new String[0])).indicesOptions(IndicesOptions.fromOptions(ignoreUnavailable, true, true, false, IndicesOptions.lenientExpandOpen())).templates(templates.stream().toList()).settings(settings);
}
use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.
the class CreateRepositoryPlan method createRequest.
@VisibleForTesting
public static PutRepositoryRequest createRequest(AnalyzedCreateRepository createRepository, CoordinatorTxnCtx txnCtx, NodeContext nodeCtx, Row parameters, SubQueryResults subQueryResults, RepositoryParamValidator repositoryParamValidator) {
Function<? super Symbol, Object> eval = x -> SymbolEvaluator.evaluate(txnCtx, nodeCtx, x, parameters, subQueryResults);
var genericProperties = createRepository.properties().map(eval);
var settings = genericPropertiesToSettings(genericProperties, // supported settings for the repository type
repositoryParamValidator.settingsForType(createRepository.type()).all());
repositoryParamValidator.validate(createRepository.type(), createRepository.properties(), settings);
PutRepositoryRequest request = new PutRepositoryRequest(createRepository.name());
request.type(createRepository.type());
request.settings(settings);
return request;
}
use of io.crate.metadata.CoordinatorTxnCtx in project crate by crate.
the class CreateTablePlan method bind.
@VisibleForTesting
public static BoundCreateTable bind(AnalyzedCreateTable createTable, CoordinatorTxnCtx txnCtx, NodeContext nodeCtx, Row params, SubQueryResults subQueryResults, NumberOfShards numberOfShards, Schemas schemas, FulltextAnalyzerResolver fulltextAnalyzerResolver) {
Function<? super Symbol, Object> eval = x -> SymbolEvaluator.evaluate(txnCtx, nodeCtx, x, params, subQueryResults);
CreateTable<Symbol> table = createTable.createTable();
RelationName relationName = createTable.relationName();
GenericProperties<Object> properties = table.properties().map(eval);
AnalyzedTableElements<Object> tableElements = createTable.analyzedTableElements().map(eval);
TableParameter tableParameter = new TableParameter();
Optional<ClusteredBy<Object>> mappedClusteredBy = table.clusteredBy().map(x -> x.map(eval));
Integer numShards = mappedClusteredBy.flatMap(ClusteredBy::numberOfShards).map(numberOfShards::fromNumberOfShards).orElseGet(numberOfShards::defaultNumberOfShards);
tableParameter.settingsBuilder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numShards);
// apply default in case it is not specified in the properties,
// if it is it will get overwritten afterwards.
TablePropertiesAnalyzer.analyzeWithBoundValues(tableParameter, TableParameters.TABLE_CREATE_PARAMETER_INFO, properties, true);
AnalyzedTableElements<Symbol> tableElementsWithExpressions = createTable.analyzedTableElementsWithExpressions().map(x -> SubQueryAndParamBinder.convert(x, params, subQueryResults));
// validate table elements
AnalyzedTableElements.finalizeAndValidate(relationName, tableElementsWithExpressions, tableElements);
// update table settings
Settings tableSettings = AnalyzedTableElements.validateAndBuildSettings(tableElements, fulltextAnalyzerResolver);
tableParameter.settingsBuilder().put(tableSettings);
ColumnIdent routingColumn = mappedClusteredBy.map(clusteredBy -> resolveRoutingFromClusteredBy(clusteredBy, tableElements)).orElse(null);
Optional<PartitionedBy<Object>> partitionedByOptional = table.partitionedBy().map(x -> x.map(eval));
partitionedByOptional.ifPresent(partitionedBy -> processPartitionedBy(partitionedByOptional.get(), tableElements, relationName, routingColumn));
return new BoundCreateTable(relationName, tableElements, tableParameter, routingColumn, table.ifNotExists(), schemas);
}
Aggregations