use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.jdbc.CalciteSchema in project drill by axbaretto.
the class DynamicRootSchema method getSubSchema.
@Override
public CalciteSchema getSubSchema(String schemaName, boolean caseSensitive) {
CalciteSchema retSchema = getSubSchemaMap().get(schemaName);
if (retSchema != null) {
return retSchema;
}
loadSchemaFactory(schemaName, caseSensitive);
retSchema = getSubSchemaMap().get(schemaName);
return retSchema;
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.jdbc.CalciteSchema in project drill by apache.
the class DrillCalciteCatalogReader method isValidSchema.
/**
* Checks if the schema provided is a valid schema:
* <li>schema is not indicated (only one element in the names list)<li/>
*
* @param names list of schema and table names, table name is always the last element
* @throws UserException if the schema is not valid.
*/
void isValidSchema(List<String> names) throws UserException {
List<String> schemaPath = Util.skipLast(names);
for (List<String> currentSchema : getSchemaPaths()) {
List<String> fullSchemaPath = new ArrayList<>(currentSchema);
fullSchemaPath.addAll(schemaPath);
CalciteSchema schema = SqlValidatorUtil.getSchema(getRootSchema(), fullSchemaPath, nameMatcher());
if (schema != null) {
return;
}
}
SchemaUtilites.throwSchemaNotFoundException(defaultSchemaSupplier.get(), schemaPath);
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.jdbc.CalciteSchema in project beam by apache.
the class BeamZetaSqlCatalogTest method rejectsScalarFunctionImplWithUnsupportedParameterType.
@Test
public void rejectsScalarFunctionImplWithUnsupportedParameterType() throws NoSuchMethodException {
JdbcConnection jdbcConnection = createJdbcConnection();
SchemaPlus calciteSchema = jdbcConnection.getCurrentSchemaPlus();
Method method = TakesArrayTimeFn.class.getMethod("eval", List.class);
calciteSchema.add("take_array", ScalarFunctionImpl.create(method));
thrown.expect(UnsupportedOperationException.class);
thrown.expectMessage("Calcite type TIME not allowed in function take_array");
BeamZetaSqlCatalog.create(calciteSchema, jdbcConnection.getTypeFactory(), SqlAnalyzer.baseAnalyzerOptions());
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.jdbc.CalciteSchema in project beam by apache.
the class BeamZetaSqlCatalog method addTableToLeafCatalog.
/**
* Assume last element in tablePath is a table name, and everything before is catalogs. So the
* logic is to create nested catalogs until the last level, then add a table at the last level.
*
* <p>Table schema is extracted from Calcite schema based on the table name resolution strategy,
* e.g. either by drilling down the schema.getSubschema() path or joining the table name with dots
* to construct a single compound identifier (e.g. Data Catalog use case).
*/
private void addTableToLeafCatalog(List<String> tablePath, QueryTrait queryTrait) {
SimpleCatalog leafCatalog = createNestedCatalogs(zetaSqlCatalog, tablePath);
org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.schema.Table calciteTable = TableResolution.resolveCalciteTable(calciteSchema, tablePath);
if (calciteTable == null) {
throw new ZetaSqlException("Wasn't able to resolve the path " + tablePath + " in schema: " + calciteSchema.getName());
}
RelDataType rowType = calciteTable.getRowType(typeFactory);
TableResolution.SimpleTableWithPath tableWithPath = TableResolution.SimpleTableWithPath.of(tablePath);
queryTrait.addResolvedTable(tableWithPath);
addFieldsToTable(tableWithPath, rowType);
leafCatalog.addSimpleTable(tableWithPath.getTable());
}
use of org.apache.beam.vendor.calcite.v1_28_0.org.apache.calcite.jdbc.CalciteSchema in project beam by apache.
the class SqlCreateFunction method execute.
@Override
public void execute(CalcitePrepare.Context context) {
final Pair<CalciteSchema, String> pair = SqlDdlNodes.schema(context, true, functionName);
SchemaPlus schema = pair.left.plus();
String lastName = pair.right;
if (!schema.getFunctions(lastName).isEmpty()) {
throw SqlUtil.newContextException(functionName.getParserPosition(), RESOURCE.internal(String.format("Function %s is already defined.", lastName)));
}
JavaUdfLoader udfLoader = new JavaUdfLoader();
// TODO(BEAM-12355) Support qualified function names.
List<String> functionPath = ImmutableList.of(lastName);
if (!(jarPath instanceof SqlCharStringLiteral)) {
throw SqlUtil.newContextException(jarPath.getParserPosition(), RESOURCE.internal("Jar path is not instanceof SqlCharStringLiteral."));
}
String unquotedJarPath = ((SqlCharStringLiteral) jarPath).getNlsString().getValue();
if (isAggregate) {
// Try loading the aggregate function just to make sure it exists. LazyAggregateCombineFn will
// need to fetch it again at runtime.
udfLoader.loadAggregateFunction(functionPath, unquotedJarPath);
LazyAggregateCombineFn<?, ?, ?> combineFn = new LazyAggregateCombineFn<>(functionPath, unquotedJarPath);
schema.add(lastName, combineFn.getUdafImpl());
} else {
ScalarFn scalarFn = udfLoader.loadScalarFunction(functionPath, unquotedJarPath);
Method method = ScalarFnReflector.getApplyMethod(scalarFn);
Function function = ScalarFunctionImpl.create(method, unquotedJarPath);
schema.add(lastName, function);
}
}
Aggregations