use of org.apache.flink.table.api.TableException in project flink by apache.
the class BuiltInFunctionDefinitions method getDefinitions.
@Internal
public static List<BuiltInFunctionDefinition> getDefinitions() {
final Field[] fields = BuiltInFunctionDefinitions.class.getFields();
final List<BuiltInFunctionDefinition> list = new ArrayList<>(fields.length);
for (Field field : fields) {
if (FunctionDefinition.class.isAssignableFrom(field.getType())) {
try {
final BuiltInFunctionDefinition funcDef = (BuiltInFunctionDefinition) field.get(BuiltInFunctionDefinitions.class);
list.add(Preconditions.checkNotNull(funcDef));
} catch (IllegalAccessException e) {
throw new TableException("The function definition for field " + field.getName() + " is not accessible.", e);
}
}
}
return list;
}
use of org.apache.flink.table.api.TableException in project flink by apache.
the class TableFactoryService method discoverFactories.
/**
* Searches for factories using Java service providers.
*
* @return all factories in the classpath
*/
private static List<TableFactory> discoverFactories(Optional<ClassLoader> classLoader) {
try {
List<TableFactory> result = new LinkedList<>();
ClassLoader cl = classLoader.orElse(Thread.currentThread().getContextClassLoader());
ServiceLoader.load(TableFactory.class, cl).iterator().forEachRemaining(result::add);
return result;
} catch (ServiceConfigurationError e) {
LOG.error("Could not load service provider for table factories.", e);
throw new TableException("Could not load service provider for table factories.", e);
}
}
use of org.apache.flink.table.api.TableException in project flink by apache.
the class UserDefinedFunctionHelper method createSpecializedFunction.
/**
* Creates the runtime implementation of a {@link FunctionDefinition} as an instance of {@link
* UserDefinedFunction}.
*
* @see SpecializedFunction
*/
public static UserDefinedFunction createSpecializedFunction(String functionName, FunctionDefinition definition, CallContext callContext, ClassLoader builtInClassLoader, @Nullable ReadableConfig configuration) {
if (definition instanceof SpecializedFunction) {
final SpecializedFunction specialized = (SpecializedFunction) definition;
final SpecializedContext specializedContext = new SpecializedContext() {
@Override
public CallContext getCallContext() {
return callContext;
}
@Override
public ReadableConfig getConfiguration() {
if (configuration == null) {
throw new TableException("Access to configuration is currently not supported for all kinds of calls.");
}
return configuration;
}
@Override
public ClassLoader getBuiltInClassLoader() {
return builtInClassLoader;
}
};
final UserDefinedFunction udf = specialized.specialize(specializedContext);
checkState(udf.getKind() == definition.getKind(), "Function kind must not change during specialization.");
return udf;
} else if (definition instanceof UserDefinedFunction) {
return (UserDefinedFunction) definition;
} else {
throw new TableException(String.format("Could not find a runtime implementation for function definition '%s'.", functionName));
}
}
use of org.apache.flink.table.api.TableException in project flink by apache.
the class DefaultExecutor method createPipeline.
@Override
public Pipeline createPipeline(List<Transformation<?>> transformations, ReadableConfig tableConfiguration, @Nullable String defaultJobName) {
// reconfigure before a stream graph is generated
executionEnvironment.configure(tableConfiguration);
// create stream graph
final RuntimeExecutionMode mode = getConfiguration().get(ExecutionOptions.RUNTIME_MODE);
switch(mode) {
case BATCH:
configureBatchSpecificProperties();
break;
case STREAMING:
break;
case AUTOMATIC:
default:
throw new TableException(String.format("Unsupported runtime mode: %s", mode));
}
final StreamGraph streamGraph = executionEnvironment.generateStreamGraph(transformations);
setJobName(streamGraph, defaultJobName);
return streamGraph;
}
use of org.apache.flink.table.api.TableException in project flink by apache.
the class StructuredObjectConverter method open.
@Override
public void open(ClassLoader classLoader) {
for (DataStructureConverter<Object, Object> fieldConverter : fieldConverters) {
fieldConverter.open(classLoader);
}
try {
final Class<?> compiledConverter = CompileUtils.compile(classLoader, generatedName, generatedCode);
generatedConverter = (DataStructureConverter<RowData, T>) compiledConverter.getConstructor(RowData.FieldGetter[].class, DataStructureConverter[].class).newInstance(fieldGetters, fieldConverters);
} catch (Throwable t) {
throw new TableException("Error while generating structured type converter.", t);
}
generatedConverter.open(classLoader);
}
Aggregations