use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIdentifier in project flink by apache.
the class SqlValidatorImpl method getNamespace.
private SqlValidatorNamespace getNamespace(SqlNode node, SqlValidatorScope scope) {
if (node instanceof SqlIdentifier && scope instanceof DelegatingScope) {
final SqlIdentifier id = (SqlIdentifier) node;
final DelegatingScope idScope = (DelegatingScope) ((DelegatingScope) scope).getParent();
return getNamespace(id, idScope);
} else if (node instanceof SqlCall) {
// Handle extended identifiers.
final SqlCall call = (SqlCall) node;
switch(call.getOperator().getKind()) {
case TABLE_REF:
return getNamespace(call.operand(0), scope);
case EXTEND:
final SqlNode operand0 = call.getOperandList().get(0);
final SqlIdentifier identifier = operand0.getKind() == SqlKind.TABLE_REF ? ((SqlCall) operand0).operand(0) : (SqlIdentifier) operand0;
final DelegatingScope idScope = (DelegatingScope) scope;
return getNamespace(identifier, idScope);
case AS:
final SqlNode nested = call.getOperandList().get(0);
switch(nested.getKind()) {
case TABLE_REF:
case EXTEND:
return getNamespace(nested, scope);
}
break;
}
}
return getNamespace(node);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIdentifier in project flink by apache.
the class ParserImpl method parseIdentifier.
@Override
public UnresolvedIdentifier parseIdentifier(String identifier) {
CalciteParser parser = calciteParserSupplier.get();
SqlIdentifier sqlIdentifier = parser.parseIdentifier(identifier);
return UnresolvedIdentifier.of(sqlIdentifier.names);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIdentifier in project beam by apache.
the class SqlOperators method createUdfOperator.
private static SqlUserDefinedFunction createUdfOperator(String name, Method method, final SqlSyntax syntax, String funGroup, String jarPath) {
Function function = ZetaSqlScalarFunctionImpl.create(method, funGroup, jarPath);
final RelDataTypeFactory typeFactory = createTypeFactory();
List<RelDataType> argTypes = new ArrayList<>();
List<SqlTypeFamily> typeFamilies = new ArrayList<>();
for (FunctionParameter o : function.getParameters()) {
final RelDataType type = o.getType(typeFactory);
argTypes.add(type);
typeFamilies.add(Util.first(type.getSqlTypeName().getFamily(), SqlTypeFamily.ANY));
}
final FamilyOperandTypeChecker typeChecker = OperandTypes.family(typeFamilies, i -> function.getParameters().get(i).isOptional());
final List<RelDataType> paramTypes = toSql(typeFactory, argTypes);
return new SqlUserDefinedFunction(new SqlIdentifier(name, SqlParserPos.ZERO), infer((ScalarFunction) function), InferTypes.explicit(argTypes), typeChecker, paramTypes, function) {
@Override
public SqlSyntax getSyntax() {
return syntax;
}
};
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIdentifier in project beam by apache.
the class SqlSetOptionBeam method execute.
@Override
public void execute(CalcitePrepare.Context context) {
final SqlIdentifier name = getName();
final SqlNode value = getValue();
final Pair<CalciteSchema, String> pair = SqlDdlNodes.schema(context, true, name);
if (!(pair.left.schema instanceof BeamCalciteSchema)) {
throw SqlUtil.newContextException(name.getParserPosition(), RESOURCE.internal("Schema is not instanceof BeamCalciteSchema"));
}
BeamCalciteSchema schema = (BeamCalciteSchema) pair.left.schema;
if (value != null) {
schema.setPipelineOption(pair.right, SqlDdlNodes.getString(value));
} else if ("ALL".equals(pair.right)) {
schema.removeAllPipelineOptions();
} else {
schema.removePipelineOption(pair.right);
}
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.sql.SqlIdentifier in project beam by apache.
the class SqlDdlNodes method schema.
/**
* Returns the schema in which to create an object.
*/
static Pair<CalciteSchema, String> schema(CalcitePrepare.Context context, boolean mutable, SqlIdentifier id) {
final List<String> path;
if (id.isSimple()) {
path = context.getDefaultSchemaPath();
} else {
path = Util.skipLast(id.names);
}
CalciteSchema schema = mutable ? context.getMutableRootSchema() : context.getRootSchema();
for (String p : path) {
schema = schema.getSubSchema(p, true);
if (schema == null) {
throw new AssertionError(String.format("Got null sub-schema for path '%s' in %s", p, path));
}
}
return Pair.of(schema, name(id));
}
Aggregations