use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.validate.SqlValidator in project druid by druid-io.
the class DruidPlanner method validate.
/**
* Validates a SQL query and populates {@link PlannerContext#getResourceActions()}.
*
* @return set of {@link Resource} corresponding to any Druid datasources or views which are taking part in the query.
*/
public ValidationResult validate() throws SqlParseException, ValidationException {
resetPlanner();
final ParsedNodes parsed = ParsedNodes.create(planner.parse(plannerContext.getSql()));
final SqlValidator validator = getValidator();
final SqlNode validatedQueryNode;
try {
validatedQueryNode = validator.validate(rewriteDynamicParameters(parsed.getQueryNode()));
} catch (RuntimeException e) {
throw new ValidationException(e);
}
SqlResourceCollectorShuttle resourceCollectorShuttle = new SqlResourceCollectorShuttle(validator, plannerContext);
validatedQueryNode.accept(resourceCollectorShuttle);
final Set<ResourceAction> resourceActions = new HashSet<>(resourceCollectorShuttle.getResourceActions());
if (parsed.getInsertNode() != null) {
final String targetDataSource = validateAndGetDataSourceForInsert(parsed.getInsertNode());
resourceActions.add(new ResourceAction(new Resource(targetDataSource, ResourceType.DATASOURCE), Action.WRITE));
}
plannerContext.setResourceActions(resourceActions);
return new ValidationResult(resourceActions);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.validate.SqlValidator in project druid by druid-io.
the class DruidPlanner method prepare.
/**
* Prepare an SQL query for execution, including some initial parsing and validation and any dynamic parameter type
* resolution, to support prepared statements via JDBC.
*
* In some future this could perhaps re-use some of the work done by {@link #validate()}
* instead of repeating it, but that day is not today.
*/
public PrepareResult prepare() throws SqlParseException, ValidationException, RelConversionException {
resetPlanner();
final ParsedNodes parsed = ParsedNodes.create(planner.parse(plannerContext.getSql()));
final SqlNode validatedQueryNode = planner.validate(parsed.getQueryNode());
final RelRoot rootQueryRel = planner.rel(validatedQueryNode);
final SqlValidator validator = getValidator();
final RelDataTypeFactory typeFactory = rootQueryRel.rel.getCluster().getTypeFactory();
final RelDataType parameterTypes = validator.getParameterRowType(validator.validate(validatedQueryNode));
final RelDataType returnedRowType;
if (parsed.getExplainNode() != null) {
returnedRowType = getExplainStructType(typeFactory);
} else {
returnedRowType = buildQueryMaker(rootQueryRel, parsed.getInsertNode()).getResultType();
}
return new PrepareResult(returnedRowType, parameterTypes);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.validate.SqlValidator in project hazelcast by hazelcast.
the class HazelcastCallBinding method newValidationSignatureError.
@Override
public CalciteException newValidationSignatureError() {
SqlOperator operator = getOperator();
SqlValidator validator = getValidator();
SqlCall call = getCall();
String operandTypes = getOperandTypes(validator, call, getScope());
Resources.ExInst<SqlValidatorException> error;
String operatorName = '\'' + operator.getName() + '\'';
switch(operator.getSyntax()) {
case FUNCTION:
case FUNCTION_STAR:
case FUNCTION_ID:
error = RESOURCES.invalidFunctionOperands(operatorName, operandTypes);
break;
default:
error = RESOURCES.invalidOperatorOperands(operatorName, operandTypes);
}
return validator.newValidationError(call, error);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.validate.SqlValidator in project hazelcast by hazelcast.
the class WindowUtils method getOrderingColumnType.
/**
* Return the datatype of the target column referenced by the DESCRIPTOR argument.
*/
public static RelDataType getOrderingColumnType(SqlCallBinding binding, int orderingColumnParameterIndex) {
SqlNode input = binding.operand(0);
SqlCall descriptor = (SqlCall) unwrapFunctionOperand(binding.operand(orderingColumnParameterIndex));
List<SqlNode> columnIdentifiers = descriptor.getOperandList();
if (columnIdentifiers.size() != 1) {
throw SqlUtil.newContextException(descriptor.getParserPosition(), ValidatorResource.RESOURCE.mustUseSingleOrderingColumn());
}
// `descriptor` is the DESCRIPTOR call, its operand is an SqlIdentifier having the column name
SqlIdentifier orderingColumnIdentifier = (SqlIdentifier) descriptor.getOperandList().get(0);
String orderingColumnName = orderingColumnIdentifier.getSimple();
SqlValidator validator = binding.getValidator();
RelDataTypeField columnField = validator.getValidatedNodeType(input).getField(orderingColumnName, validator.getCatalogReader().nameMatcher().isCaseSensitive(), false);
if (columnField == null) {
throw SqlUtil.newContextException(descriptor.getParserPosition(), RESOURCE.unknownIdentifier(orderingColumnName));
}
return columnField.getType();
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.validate.SqlValidator in project hazelcast by hazelcast.
the class SqlExtendedInsert method validate.
@Override
public void validate(SqlValidator validator, SqlValidatorScope scope) {
SqlValidatorTable table0 = validator.getCatalogReader().getTable(tableNames());
if (table0 == null) {
super.validate(validator, scope);
// should have failed with "Object not found"
assert false;
}
HazelcastTable table = table0.unwrap(HazelcastTable.class);
if (getTargetColumnList() == null) {
RelDataType rowType = table.getRowType(validator.getTypeFactory());
List<SqlNode> columnListWithoutHidden = new ArrayList<>();
for (RelDataTypeField f : rowType.getFieldList()) {
if (!table.isHidden(f.getName())) {
columnListWithoutHidden.add(new SqlIdentifier(f.getName(), SqlParserPos.ZERO));
}
}
overrideColumnList = new SqlNodeList(columnListWithoutHidden, SqlParserPos.ZERO);
}
super.validate(validator, scope);
Map<String, TableField> fieldsMap = table.getTarget().getFields().stream().collect(Collectors.toMap(TableField::getName, f -> f));
for (SqlNode fieldNode : getTargetColumnList()) {
TableField field = fieldsMap.get(((SqlIdentifier) fieldNode).getSimple());
if (field instanceof MapTableField) {
QueryPath path = ((MapTableField) field).getPath();
if (path.getPath() == null && field.getType().getTypeFamily() == QueryDataTypeFamily.OBJECT) {
throw validator.newValidationError(fieldNode, RESOURCE.insertToTopLevelObject());
}
}
}
}
Aggregations