use of org.apache.flink.table.api.TableException in project flink by apache.
the class AggFunctionTestBase method accumulateValues.
protected ACC accumulateValues(List<T> values) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
AggregateFunction<T, ACC> aggregator = getAggregator();
ACC accumulator = getAggregator().createAccumulator();
Method accumulateFunc = getAccumulateFunc();
for (T value : values) {
if (accumulateFunc.getParameterCount() == 1) {
accumulateFunc.invoke(aggregator, accumulator);
} else if (accumulateFunc.getParameterCount() == 2) {
accumulateFunc.invoke(aggregator, accumulator, value);
} else {
throw new TableException("Unsupported now");
}
}
return accumulator;
}
use of org.apache.flink.table.api.TableException in project flink by apache.
the class AggFunctionTestBase method retractValues.
protected void retractValues(ACC accumulator, List<T> values) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
AggregateFunction<T, ACC> aggregator = getAggregator();
Method retractFunc = getRetractFunc();
for (T value : values) {
if (retractFunc.getParameterCount() == 1) {
retractFunc.invoke(aggregator, accumulator);
} else if (retractFunc.getParameterCount() == 2) {
retractFunc.invoke(aggregator, accumulator, value);
} else {
throw new TableException("Unsupported now");
}
}
}
use of org.apache.flink.table.api.TableException in project flink by apache.
the class ProjectionOperationFactory method validateAndGetUniqueNames.
private String[] validateAndGetUniqueNames(List<ResolvedExpression> namedExpressions) {
// we need to maintain field names order to match with types
final Set<String> names = new LinkedHashSet<>();
extractNames(namedExpressions).stream().map(name -> name.orElseThrow(() -> new TableException("Could not name a field in a projection."))).forEach(name -> {
if (!names.add(name)) {
throw new ValidationException("Ambiguous column name: " + name);
}
});
return names.toArray(new String[0]);
}
use of org.apache.flink.table.api.TableException in project flink by apache.
the class AggregateOperationFactory method createResolvedWindow.
/**
* Converts an API class to a resolved window for planning with expressions already resolved. It
* performs following validations:
*
* <ul>
* <li>The alias is represented with an unresolved reference
* <li>The time attribute is a single field reference of a {@link
* TimeIndicatorTypeInfo}(stream), {@link SqlTimeTypeInfo}(batch), or {@link
* BasicTypeInfo#LONG_TYPE_INFO}(batch) type
* <li>The size & slide are value literals of either {@link BasicTypeInfo#LONG_TYPE_INFO}, or
* {@link TimeIntervalTypeInfo} type
* <li>The size & slide are of the same type
* <li>The gap is a value literal of a {@link TimeIntervalTypeInfo} type
* </ul>
*
* @param window window to resolve
* @param resolver resolver to resolve potential unresolved field references
* @return window with expressions resolved
*/
ResolvedGroupWindow createResolvedWindow(GroupWindow window, ExpressionResolver resolver) {
Expression alias = window.getAlias();
if (!(alias instanceof UnresolvedReferenceExpression)) {
throw new ValidationException("Only unresolved reference supported for alias of a group window.");
}
final String windowName = ((UnresolvedReferenceExpression) alias).getName();
FieldReferenceExpression timeField = getValidatedTimeAttribute(window, resolver);
if (window instanceof TumbleWithSizeOnTimeWithAlias) {
return validateAndCreateTumbleWindow((TumbleWithSizeOnTimeWithAlias) window, windowName, timeField);
} else if (window instanceof SlideWithSizeAndSlideOnTimeWithAlias) {
return validateAndCreateSlideWindow((SlideWithSizeAndSlideOnTimeWithAlias) window, windowName, timeField);
} else if (window instanceof SessionWithGapOnTimeWithAlias) {
return validateAndCreateSessionWindow((SessionWithGapOnTimeWithAlias) window, windowName, timeField);
} else {
throw new TableException("Unknown window type: " + window);
}
}
use of org.apache.flink.table.api.TableException in project flink by apache.
the class ParserImpl method parse.
/**
* When parsing statement, it first uses {@link ExtendedParser} to parse statements. If {@link
* ExtendedParser} fails to parse statement, it uses the {@link CalciteParser} to parse
* statements.
*
* @param statement input statement.
* @return parsed operations.
*/
@Override
public List<Operation> parse(String statement) {
CalciteParser parser = calciteParserSupplier.get();
FlinkPlannerImpl planner = validatorSupplier.get();
Optional<Operation> command = EXTENDED_PARSER.parse(statement);
if (command.isPresent()) {
return Collections.singletonList(command.get());
}
// parse the sql query
// use parseSqlList here because we need to support statement end with ';' in sql client.
SqlNodeList sqlNodeList = parser.parseSqlList(statement);
List<SqlNode> parsed = sqlNodeList.getList();
Preconditions.checkArgument(parsed.size() == 1, "only single statement supported");
return Collections.singletonList(SqlToOperationConverter.convert(planner, catalogManager, parsed.get(0)).orElseThrow(() -> new TableException("Unsupported query: " + statement)));
}
Aggregations