use of org.apache.calcite.sql.SqlLiteral in project hazelcast by hazelcast.
the class HazelcastSqlToRelConverter method convertJsonValueCall.
/**
* Converts JSON_VALUE calls with extended syntax, with RETURNING clause among other things.
* Because there is no RexNode for type reference in Calcite (see CAST implementation),
* the type has to be instead set as the type of the parent (JSON_VALUE's RexCall), which is
* then interpreted as the desired type of the expression.
* <p>
* Supported syntax:
* JSON_VALUE(jsonArg, jsonPathArg [returning] [onEmpty|onError])
* returning: RETURNING dataType
* onEmpty: (DEFAULT value | NULL | ERROR) ON EMPTY
* onError: (DEFAULT value | NULL | ERROR) ON ERROR
*/
private RexNode convertJsonValueCall(SqlCall call, Blackboard bb) {
RexNode target = bb.convertExpression(call.operand(0));
RexNode path = bb.convertExpression(call.operand(1));
SqlJsonValueEmptyOrErrorBehavior onError = SqlJsonValueEmptyOrErrorBehavior.NULL;
SqlJsonValueEmptyOrErrorBehavior onEmpty = SqlJsonValueEmptyOrErrorBehavior.NULL;
RelDataType returning = validator.getTypeFactory().createSqlType(SqlTypeName.VARCHAR);
RexNode defaultValueOnError = getRexBuilder().makeNullLiteral(typeFactory.createSqlType(SqlTypeName.ANY));
RexNode defaultValueOnEmpty = getRexBuilder().makeNullLiteral(typeFactory.createSqlType(SqlTypeName.ANY));
// Start at 3rd Arg
int tokenIndex = 2;
// RETURNING can only be placed at the beginning, never in the middle or the end of the list of tokens.
if (call.operandCount() > 2 && isJsonValueReturningClause(call.operand(tokenIndex))) {
returning = validator.getValidatedNodeType(call.operand(tokenIndex + 1));
tokenIndex += 2;
}
boolean onEmptyDefined = false;
boolean onErrorDefined = false;
while (tokenIndex < call.operandCount()) {
if (!(call.operand(tokenIndex) instanceof SqlLiteral)) {
throw QueryException.error(SqlErrorCode.PARSING, "Unsupported JSON_VALUE extended syntax");
}
final SqlJsonValueEmptyOrErrorBehavior behavior = (SqlJsonValueEmptyOrErrorBehavior) ((SqlLiteral) call.operand(tokenIndex)).getValue();
RexNode defaultExpr = getRexBuilder().makeNullLiteral(typeFactory.createSqlType(SqlTypeName.ANY));
if (behavior == null) {
throw QueryException.error(SqlErrorCode.PARSING, "Failed to extract ON behavior for JSON_VALUE call");
}
switch(behavior) {
case DEFAULT:
defaultExpr = bb.convertExpression(call.operand(tokenIndex + 1));
tokenIndex += 2;
break;
case NULL:
case ERROR:
tokenIndex++;
break;
default:
// guard against possible unsupported updates to syntax, should never be thrown.
throw QueryException.error(SqlErrorCode.PARSING, "Unsupported JSON_VALUE OnEmptyOrErrorBehavior");
}
final SqlJsonEmptyOrError onTarget = (SqlJsonEmptyOrError) ((SqlLiteral) call.operand(tokenIndex)).getValue();
if (onTarget == null) {
throw QueryException.error(SqlErrorCode.PARSING, "Failed to extract ON-behavior target for JSON_VALUE call");
}
switch(onTarget) {
case EMPTY:
if (onEmptyDefined) {
throw QueryException.error(SqlErrorCode.PARSING, "Duplicate ON EMPTY clause in JSON_VALUE call");
}
if (behavior == SqlJsonValueEmptyOrErrorBehavior.DEFAULT) {
defaultValueOnEmpty = defaultExpr;
}
onEmpty = behavior;
onEmptyDefined = true;
break;
case ERROR:
if (onErrorDefined) {
throw QueryException.error(SqlErrorCode.PARSING, "Duplicate ON ERROR clause in JSON_VALUE call");
}
if (behavior == SqlJsonValueEmptyOrErrorBehavior.DEFAULT) {
defaultValueOnError = defaultExpr;
}
onError = behavior;
onErrorDefined = true;
break;
default:
// guard against possible unsupported updates to syntax, should never be thrown.
throw QueryException.error(SqlErrorCode.PARSING, "Unsupported JSON_VALUE EmptyOrErrorBehavior target");
}
tokenIndex++;
}
return getRexBuilder().makeCall(returning, HazelcastJsonValueFunction.INSTANCE, asList(target, path, defaultValueOnEmpty, defaultValueOnError, bb.convertLiteral(onEmpty.symbol(SqlParserPos.ZERO)), bb.convertLiteral(onError.symbol(SqlParserPos.ZERO))));
}
use of org.apache.calcite.sql.SqlLiteral in project calcite by apache.
the class RelOptUtil method validateValueAgainstConstraint.
/**
* Ensures that a source value does not violate the constraint of the target
* column.
*
* @param sourceValue The insert value being validated
* @param targetConstraint The constraint applied to sourceValue for validation
* @param errorSupplier The function to apply when validation fails
*/
public static void validateValueAgainstConstraint(SqlNode sourceValue, RexNode targetConstraint, Supplier<CalciteContextException> errorSupplier) {
if (!(sourceValue instanceof SqlLiteral)) {
// We cannot guarantee that the value satisfies the constraint.
throw errorSupplier.get();
}
final SqlLiteral insertValue = (SqlLiteral) sourceValue;
final RexLiteral columnConstraint = (RexLiteral) targetConstraint;
final RexSqlStandardConvertletTable convertletTable = new RexSqlStandardConvertletTable();
final RexToSqlNodeConverter sqlNodeToRexConverter = new RexToSqlNodeConverterImpl(convertletTable);
final SqlLiteral constraintValue = (SqlLiteral) sqlNodeToRexConverter.convertLiteral(columnConstraint);
if (!insertValue.equals(constraintValue)) {
// The value does not satisfy the constraint.
throw errorSupplier.get();
}
}
use of org.apache.calcite.sql.SqlLiteral in project calcite by apache.
the class SqlOperatorBaseTest method testLiteralBeyondLimit.
/**
* Tests that CAST fails when given a value just outside the valid range for
* that type. For example,
*
* <ul>
* <li>CAST(-200 AS TINYINT) fails because the value is less than -128;
* <li>CAST(1E-999 AS FLOAT) fails because the value underflows;
* <li>CAST(123.4567891234567 AS FLOAT) fails because the value loses
* precision.
* </ul>
*/
@Test
public void testLiteralBeyondLimit() {
tester.setFor(SqlStdOperatorTable.CAST);
final List<RelDataType> types = SqlLimitsTest.getTypes(tester.getValidator().getTypeFactory());
for (RelDataType type : types) {
for (Object o : getValues((BasicSqlType) type, false)) {
SqlLiteral literal = type.getSqlTypeName().createLiteral(o, SqlParserPos.ZERO);
SqlString literalString = literal.toSqlString(AnsiSqlDialect.DEFAULT);
if ((type.getSqlTypeName() == SqlTypeName.BIGINT) || ((type.getSqlTypeName() == SqlTypeName.DECIMAL) && (type.getPrecision() == 19))) {
// Values which are too large to be literals fail at
// validate time.
tester.checkFails("CAST(^" + literalString + "^ AS " + type + ")", "Numeric literal '.*' out of range", false);
} else if ((type.getSqlTypeName() == SqlTypeName.CHAR) || (type.getSqlTypeName() == SqlTypeName.VARCHAR) || (type.getSqlTypeName() == SqlTypeName.BINARY) || (type.getSqlTypeName() == SqlTypeName.VARBINARY)) {
// Casting overlarge string/binary values do not fail -
// they are truncated. See testCastTruncates().
} else {
// Value outside legal bound should fail at runtime (not
// validate time).
//
// NOTE: Because Java and Fennel calcs give
// different errors, the pattern hedges its bets.
tester.checkFails("CAST(" + literalString + " AS " + type + ")", "(?s).*(Overflow during calculation or cast\\.|Code=22003).*", true);
}
}
}
}
use of org.apache.calcite.sql.SqlLiteral in project calcite by apache.
the class SqlTesterImpl method buildQuery2.
/**
* Builds a query that extracts all literals as columns in an underlying
* select.
*
* <p>For example,</p>
*
* <blockquote>{@code 1 < 5}</blockquote>
*
* <p>becomes</p>
*
* <blockquote>{@code SELECT p0 < p1
* FROM (VALUES (1, 5)) AS t(p0, p1)}</blockquote>
*
* <p>Null literals don't have enough type information to be extracted.
* We push down {@code CAST(NULL AS type)} but raw nulls such as
* {@code CASE 1 WHEN 2 THEN 'a' ELSE NULL END} are left as is.</p>
*
* @param expression Scalar expression
* @return Query that evaluates a scalar expression
*/
private String buildQuery2(String expression) {
// "values (1 < 5)"
// becomes
// "select p0 < p1 from (values (1, 5)) as t(p0, p1)"
SqlNode x;
final String sql = "values (" + expression + ")";
try {
x = parseQuery(sql);
} catch (SqlParseException e) {
throw new RuntimeException(e);
}
final Collection<SqlNode> literalSet = new LinkedHashSet<>();
x.accept(new SqlShuttle() {
private final List<SqlOperator> ops = ImmutableList.of(SqlStdOperatorTable.LITERAL_CHAIN, SqlStdOperatorTable.LOCALTIME, SqlStdOperatorTable.LOCALTIMESTAMP, SqlStdOperatorTable.CURRENT_TIME, SqlStdOperatorTable.CURRENT_TIMESTAMP);
@Override
public SqlNode visit(SqlLiteral literal) {
if (!isNull(literal) && literal.getTypeName() != SqlTypeName.SYMBOL) {
literalSet.add(literal);
}
return literal;
}
@Override
public SqlNode visit(SqlCall call) {
final SqlOperator operator = call.getOperator();
if (operator == SqlStdOperatorTable.CAST && isNull(call.operand(0))) {
literalSet.add(call);
return call;
} else if (ops.contains(operator)) {
// literal"
return call;
} else {
return super.visit(call);
}
}
private boolean isNull(SqlNode sqlNode) {
return sqlNode instanceof SqlLiteral && ((SqlLiteral) sqlNode).getTypeName() == SqlTypeName.NULL;
}
});
final List<SqlNode> nodes = new ArrayList<>(literalSet);
Collections.sort(nodes, new Comparator<SqlNode>() {
public int compare(SqlNode o1, SqlNode o2) {
final SqlParserPos pos0 = o1.getParserPosition();
final SqlParserPos pos1 = o2.getParserPosition();
int c = -Utilities.compare(pos0.getLineNum(), pos1.getLineNum());
if (c != 0) {
return c;
}
return -Utilities.compare(pos0.getColumnNum(), pos1.getColumnNum());
}
});
String sql2 = sql;
final List<Pair<String, String>> values = new ArrayList<>();
int p = 0;
for (SqlNode literal : nodes) {
final SqlParserPos pos = literal.getParserPosition();
final int start = SqlParserUtil.lineColToIndex(sql, pos.getLineNum(), pos.getColumnNum());
final int end = SqlParserUtil.lineColToIndex(sql, pos.getEndLineNum(), pos.getEndColumnNum()) + 1;
String param = "p" + (p++);
values.add(Pair.of(sql2.substring(start, end), param));
sql2 = sql2.substring(0, start) + param + sql2.substring(end);
}
if (values.isEmpty()) {
values.add(Pair.of("1", "p0"));
}
return "select " + sql2.substring("values (".length(), sql2.length() - 1) + " from (values (" + Util.commaList(Pair.left(values)) + ")) as t(" + Util.commaList(Pair.right(values)) + ")";
}
use of org.apache.calcite.sql.SqlLiteral in project flink by apache.
the class SqlValidatorImpl method validateMatchRecognize.
@Override
public void validateMatchRecognize(SqlCall call) {
final SqlMatchRecognize matchRecognize = (SqlMatchRecognize) call;
final MatchRecognizeScope scope = (MatchRecognizeScope) getMatchRecognizeScope(matchRecognize);
final MatchRecognizeNamespace ns = getNamespace(call).unwrap(MatchRecognizeNamespace.class);
assert ns.rowType == null;
// rows per match
final SqlLiteral rowsPerMatch = matchRecognize.getRowsPerMatch();
final boolean allRows = rowsPerMatch != null && rowsPerMatch.getValue() == SqlMatchRecognize.RowsPerMatchOption.ALL_ROWS;
final RelDataTypeFactory.Builder typeBuilder = typeFactory.builder();
// parse PARTITION BY column
SqlNodeList partitionBy = matchRecognize.getPartitionList();
if (partitionBy != null) {
for (SqlNode node : partitionBy) {
SqlIdentifier identifier = (SqlIdentifier) node;
identifier.validate(this, scope);
RelDataType type = deriveType(scope, identifier);
String name = identifier.names.get(1);
typeBuilder.add(name, type);
}
}
// parse ORDER BY column
SqlNodeList orderBy = matchRecognize.getOrderList();
if (orderBy != null) {
for (SqlNode node : orderBy) {
node.validate(this, scope);
SqlIdentifier identifier;
if (node instanceof SqlBasicCall) {
identifier = (SqlIdentifier) ((SqlBasicCall) node).getOperands()[0];
} else {
identifier = (SqlIdentifier) node;
}
if (allRows) {
RelDataType type = deriveType(scope, identifier);
String name = identifier.names.get(1);
if (!typeBuilder.nameExists(name)) {
typeBuilder.add(name, type);
}
}
}
}
if (allRows) {
final SqlValidatorNamespace sqlNs = getNamespace(matchRecognize.getTableRef());
final RelDataType inputDataType = sqlNs.getRowType();
for (RelDataTypeField fs : inputDataType.getFieldList()) {
if (!typeBuilder.nameExists(fs.getName())) {
typeBuilder.add(fs);
}
}
}
// retrieve pattern variables used in pattern and subset
SqlNode pattern = matchRecognize.getPattern();
PatternVarVisitor visitor = new PatternVarVisitor(scope);
pattern.accept(visitor);
SqlLiteral interval = matchRecognize.getInterval();
if (interval != null) {
interval.validate(this, scope);
if (((SqlIntervalLiteral) interval).signum() < 0) {
throw newValidationError(interval, RESOURCE.intervalMustBeNonNegative(interval.toValue()));
}
if (orderBy == null || orderBy.size() == 0) {
throw newValidationError(interval, RESOURCE.cannotUseWithinWithoutOrderBy());
}
SqlNode firstOrderByColumn = orderBy.getList().get(0);
SqlIdentifier identifier;
if (firstOrderByColumn instanceof SqlBasicCall) {
identifier = (SqlIdentifier) ((SqlBasicCall) firstOrderByColumn).getOperands()[0];
} else {
identifier = (SqlIdentifier) firstOrderByColumn;
}
RelDataType firstOrderByColumnType = deriveType(scope, identifier);
if (!(firstOrderByColumnType.getSqlTypeName() == SqlTypeName.TIMESTAMP || firstOrderByColumnType.getSqlTypeName() == SqlTypeName.TIMESTAMP_WITH_LOCAL_TIME_ZONE)) {
throw newValidationError(interval, RESOURCE.firstColumnOfOrderByMustBeTimestamp());
}
SqlNode expand = expand(interval, scope);
RelDataType type = deriveType(scope, expand);
setValidatedNodeType(interval, type);
}
validateDefinitions(matchRecognize, scope);
SqlNodeList subsets = matchRecognize.getSubsetList();
if (subsets != null && subsets.size() > 0) {
for (SqlNode node : subsets) {
List<SqlNode> operands = ((SqlCall) node).getOperandList();
String leftString = ((SqlIdentifier) operands.get(0)).getSimple();
if (scope.getPatternVars().contains(leftString)) {
throw newValidationError(operands.get(0), RESOURCE.patternVarAlreadyDefined(leftString));
}
scope.addPatternVar(leftString);
for (SqlNode right : (SqlNodeList) operands.get(1)) {
SqlIdentifier id = (SqlIdentifier) right;
if (!scope.getPatternVars().contains(id.getSimple())) {
throw newValidationError(id, RESOURCE.unknownPattern(id.getSimple()));
}
scope.addPatternVar(id.getSimple());
}
}
}
// validate AFTER ... SKIP TO
final SqlNode skipTo = matchRecognize.getAfter();
if (skipTo instanceof SqlCall) {
final SqlCall skipToCall = (SqlCall) skipTo;
final SqlIdentifier id = skipToCall.operand(0);
if (!scope.getPatternVars().contains(id.getSimple())) {
throw newValidationError(id, RESOURCE.unknownPattern(id.getSimple()));
}
}
List<Map.Entry<String, RelDataType>> measureColumns = validateMeasure(matchRecognize, scope, allRows);
for (Map.Entry<String, RelDataType> c : measureColumns) {
if (!typeBuilder.nameExists(c.getKey())) {
typeBuilder.add(c.getKey(), c.getValue());
}
}
final RelDataType rowType = typeBuilder.build();
if (matchRecognize.getMeasureList().size() == 0) {
ns.setType(getNamespace(matchRecognize.getTableRef()).getRowType());
} else {
ns.setType(rowType);
}
}
Aggregations