Search in sources :

Example 6 with Symbol

use of io.crate.analyze.symbol.Symbol in project crate by crate.

the class ValuesAwareExpressionAnalyzer method convertFunctionCall.

@Override
protected Symbol convertFunctionCall(FunctionCall node, ExpressionAnalysisContext context) {
    List<String> parts = node.getName().getParts();
    if (parts.get(0).equals("values")) {
        Expression expression = node.getArguments().get(0);
        Symbol argumentColumn = super.convert(expression, context);
        if (argumentColumn.valueType().equals(DataTypes.UNDEFINED)) {
            throw new IllegalArgumentException(SymbolFormatter.format("Referenced column '%s' in VALUES expression not found", argumentColumn));
        }
        if (!(argumentColumn instanceof Field)) {
            throw new IllegalArgumentException(SymbolFormatter.format("Argument to VALUES expression must reference a column that " + "is part of the INSERT statement. %s is invalid", argumentColumn));
        }
        return valuesResolver.allocateAndResolve((Field) argumentColumn);
    }
    return super.convertFunctionCall(node, context);
}
Also used : Field(io.crate.analyze.symbol.Field) ParameterExpression(io.crate.sql.tree.ParameterExpression) Expression(io.crate.sql.tree.Expression) Symbol(io.crate.analyze.symbol.Symbol)

Example 7 with Symbol

use of io.crate.analyze.symbol.Symbol in project crate by crate.

the class WhereClause method normalize.

public WhereClause normalize(EvaluatingNormalizer normalizer, TransactionContext transactionContext) {
    if (noMatch || query == null) {
        return this;
    }
    Symbol normalizedQuery = normalizer.normalize(query, transactionContext);
    if (normalizedQuery == query) {
        return this;
    }
    WhereClause normalizedWhereClause = new WhereClause(normalizedQuery, docKeys.orElse(null), partitions);
    normalizedWhereClause.clusteredBy = clusteredBy;
    return normalizedWhereClause;
}
Also used : Symbol(io.crate.analyze.symbol.Symbol)

Example 8 with Symbol

use of io.crate.analyze.symbol.Symbol in project crate by crate.

the class InsertFromSubQueryAnalyzer method analyze.

public AnalyzedStatement analyze(InsertFromSubquery node, Analysis analysis) {
    DocTableInfo tableInfo = schemas.getWritableTable(TableIdent.of(node.table(), analysis.sessionContext().defaultSchema()));
    Operation.blockedRaiseException(tableInfo, Operation.INSERT);
    DocTableRelation tableRelation = new DocTableRelation(tableInfo);
    FieldProvider fieldProvider = new NameFieldProvider(tableRelation);
    QueriedRelation source = (QueriedRelation) relationAnalyzer.analyze(node.subQuery(), analysis);
    List<Reference> targetColumns = new ArrayList<>(resolveTargetColumns(node.columns(), tableInfo, source.fields().size()));
    validateColumnsAndAddCastsIfNecessary(targetColumns, source.querySpec());
    Map<Reference, Symbol> onDuplicateKeyAssignments = null;
    if (!node.onDuplicateKeyAssignments().isEmpty()) {
        onDuplicateKeyAssignments = processUpdateAssignments(tableRelation, targetColumns, analysis.sessionContext(), analysis.parameterContext(), analysis.transactionContext(), fieldProvider, node.onDuplicateKeyAssignments());
    }
    return new InsertFromSubQueryAnalyzedStatement(source, tableInfo, targetColumns, onDuplicateKeyAssignments);
}
Also used : DocTableInfo(io.crate.metadata.doc.DocTableInfo) DynamicReference(io.crate.analyze.symbol.DynamicReference) Symbol(io.crate.analyze.symbol.Symbol)

Example 9 with Symbol

use of io.crate.analyze.symbol.Symbol in project crate by crate.

the class InsertFromSubQueryAnalyzer method validateColumnsAndAddCastsIfNecessary.

/**
     * validate that result columns from subquery match explicit insert columns
     * or complete table schema
     */
private static void validateColumnsAndAddCastsIfNecessary(List<Reference> targetColumns, QuerySpec querySpec) {
    if (targetColumns.size() != querySpec.outputs().size()) {
        Joiner commaJoiner = Joiner.on(", ");
        throw new IllegalArgumentException(String.format(Locale.ENGLISH, "Number of target columns (%s) of insert statement doesn't match number of source columns (%s)", commaJoiner.join(Iterables.transform(targetColumns, Reference.TO_COLUMN_NAME)), commaJoiner.join(Iterables.transform(querySpec.outputs(), SymbolPrinter.FUNCTION))));
    }
    int failedCastPosition = querySpec.castOutputs(Iterators.transform(targetColumns.iterator(), Symbol::valueType));
    if (failedCastPosition >= 0) {
        Symbol failedSource = querySpec.outputs().get(failedCastPosition);
        Reference failedTarget = targetColumns.get(failedCastPosition);
        throw new IllegalArgumentException(String.format(Locale.ENGLISH, "Type of subquery column %s (%s) does not match is not convertable to the type of table column %s (%s)", failedSource, failedSource.valueType(), failedTarget.ident().columnIdent().fqn(), failedTarget.valueType()));
    }
}
Also used : Joiner(com.google.common.base.Joiner) Symbol(io.crate.analyze.symbol.Symbol) DynamicReference(io.crate.analyze.symbol.DynamicReference)

Example 10 with Symbol

use of io.crate.analyze.symbol.Symbol in project crate by crate.

the class InsertFromValuesAnalyzer method addValues.

private void addValues(DocTableRelation tableRelation, ValueNormalizer valueNormalizer, EvaluatingNormalizer normalizer, ExpressionAnalyzer expressionAnalyzer, ExpressionAnalysisContext expressionAnalysisContext, TransactionContext transactionContext, ValuesResolver valuesResolver, ExpressionAnalyzer valuesAwareExpressionAnalyzer, ValuesList node, List<Assignment> assignments, InsertFromValuesAnalyzedStatement context, ReferenceToLiteralConverter.Context referenceToLiteralContext, int numPrimaryKeys, Function<List<BytesRef>, String> idFunction, int bulkIdx) throws IOException {
    if (context.tableInfo().isPartitioned()) {
        context.newPartitionMap();
    }
    List<BytesRef> primaryKeyValues = new ArrayList<>(numPrimaryKeys);
    String routingValue = null;
    List<ColumnIdent> primaryKey = context.tableInfo().primaryKey();
    Object[] insertValues = new Object[node.values().size()];
    for (int i = 0, valuesSize = node.values().size(); i < valuesSize; i++) {
        Expression expression = node.values().get(i);
        Symbol valuesSymbol = normalizer.normalize(expressionAnalyzer.convert(expression, expressionAnalysisContext), transactionContext);
        // implicit type conversion
        Reference column = context.columns().get(i);
        final ColumnIdent columnIdent = column.ident().columnIdent();
        Object value;
        try {
            valuesSymbol = valueNormalizer.normalizeInputForReference(valuesSymbol, column);
            value = ((Input) valuesSymbol).value();
        } catch (IllegalArgumentException | UnsupportedOperationException e) {
            throw new ColumnValidationException(columnIdent.sqlFqn(), e);
        } catch (ClassCastException e) {
            // symbol is no Input
            throw new ColumnValidationException(columnIdent.name(), SymbolFormatter.format("Invalid value '%s' in insert statement", valuesSymbol));
        }
        if (context.primaryKeyColumnIndices().contains(i)) {
            if (value == null) {
                throw new IllegalArgumentException("Primary key value must not be NULL");
            }
            int idx = primaryKey.indexOf(columnIdent);
            if (idx < 0) {
                // oh look, one or more nested primary keys!
                assert value instanceof Map : "value must be instance of Map";
                for (ColumnIdent pkIdent : primaryKey) {
                    if (!pkIdent.getRoot().equals(columnIdent)) {
                        continue;
                    }
                    int pkIdx = primaryKey.indexOf(pkIdent);
                    Object nestedValue = StringObjectMaps.fromMapByPath((Map) value, pkIdent.path());
                    addPrimaryKeyValue(pkIdx, nestedValue, primaryKeyValues);
                }
            } else {
                addPrimaryKeyValue(idx, value, primaryKeyValues);
            }
        }
        if (i == context.routingColumnIndex()) {
            routingValue = extractRoutingValue(columnIdent, value, context);
        }
        if (context.partitionedByIndices().contains(i)) {
            Object rest = processPartitionedByValues(columnIdent, value, context);
            if (rest != null) {
                insertValues[i] = rest;
            }
        } else {
            insertValues[i] = value;
        }
    }
    if (!assignments.isEmpty()) {
        valuesResolver.insertValues = insertValues;
        valuesResolver.columns = context.columns();
        Symbol[] onDupKeyAssignments = new Symbol[assignments.size()];
        valuesResolver.assignmentColumns = new ArrayList<>(assignments.size());
        expressionAnalyzer.setResolveFieldsOperation(Operation.UPDATE);
        for (int i = 0; i < assignments.size(); i++) {
            Assignment assignment = assignments.get(i);
            Reference columnName = tableRelation.resolveField((Field) expressionAnalyzer.convert(assignment.columnName(), expressionAnalysisContext));
            assert columnName != null : "columnName must not be null";
            Symbol valueSymbol = normalizer.normalize(valuesAwareExpressionAnalyzer.convert(assignment.expression(), expressionAnalysisContext), transactionContext);
            Symbol assignmentExpression = valueNormalizer.normalizeInputForReference(valueSymbol, columnName);
            onDupKeyAssignments[i] = assignmentExpression;
            if (valuesResolver.assignmentColumns.size() == i) {
                valuesResolver.assignmentColumns.add(columnName.ident().columnIdent().fqn());
            }
        }
        context.addOnDuplicateKeyAssignments(onDupKeyAssignments);
        context.addOnDuplicateKeyAssignmentsColumns(valuesResolver.assignmentColumns.toArray(new String[valuesResolver.assignmentColumns.size()]));
    }
    // process generated column expressions and add columns + values
    GeneratedExpressionContext ctx = new GeneratedExpressionContext(tableRelation, context, normalizer, transactionContext, referenceToLiteralContext, primaryKeyValues, insertValues, routingValue);
    processGeneratedExpressions(ctx);
    insertValues = ctx.insertValues;
    routingValue = ctx.routingValue;
    context.sourceMaps().add(insertValues);
    String id = idFunction.apply(primaryKeyValues);
    context.addIdAndRouting(id, routingValue);
    if (bulkIdx >= 0) {
        context.bulkIndices().add(bulkIdx);
    }
}
Also used : Symbol(io.crate.analyze.symbol.Symbol) ColumnValidationException(io.crate.exceptions.ColumnValidationException) BytesRef(org.apache.lucene.util.BytesRef)

Aggregations

Symbol (io.crate.analyze.symbol.Symbol)109 Test (org.junit.Test)51 CrateUnitTest (io.crate.test.integration.CrateUnitTest)40 Function (io.crate.analyze.symbol.Function)14 Input (io.crate.data.Input)14 Reference (io.crate.metadata.Reference)11 WhereClause (io.crate.analyze.WhereClause)10 InputColumn (io.crate.analyze.symbol.InputColumn)8 InputFactory (io.crate.operation.InputFactory)8 OrderBy (io.crate.analyze.OrderBy)7 ExpressionAnalysisContext (io.crate.analyze.expressions.ExpressionAnalysisContext)6 ExpressionAnalyzer (io.crate.analyze.expressions.ExpressionAnalyzer)6 AbstractScalarFunctionsTest (io.crate.operation.scalar.AbstractScalarFunctionsTest)6 BytesStreamOutput (org.elasticsearch.common.io.stream.BytesStreamOutput)6 StreamInput (org.elasticsearch.common.io.stream.StreamInput)6 ImmutableList (com.google.common.collect.ImmutableList)5 DocTableInfo (io.crate.metadata.doc.DocTableInfo)5 TableInfo (io.crate.metadata.table.TableInfo)5 QuerySpec (io.crate.analyze.QuerySpec)4 Literal (io.crate.analyze.symbol.Literal)4