use of org.apache.flink.table.api.TableException in project flink by apache.
the class ShuffleJsonDeserializer method deserialize.
@Override
public Shuffle deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException {
JsonNode jsonNode = jsonParser.getCodec().readTree(jsonParser);
Shuffle.Type type = Shuffle.Type.valueOf(jsonNode.get("type").asText().toUpperCase());
switch(type) {
case ANY:
return ExecEdge.ANY_SHUFFLE;
case SINGLETON:
return ExecEdge.SINGLETON_SHUFFLE;
case BROADCAST:
return ExecEdge.BROADCAST_SHUFFLE;
case FORWARD:
return ExecEdge.FORWARD_SHUFFLE;
case HASH:
JsonNode keysNode = jsonNode.get("keys");
if (keysNode == null || keysNode.size() == 0) {
throw new TableException("Hash shuffle requires non-empty hash keys.");
}
int[] keys = new int[keysNode.size()];
for (int i = 0; i < keysNode.size(); ++i) {
keys[i] = keysNode.get(i).asInt();
}
return ExecEdge.hashShuffle(keys);
default:
throw new TableException("Unsupported shuffle type: " + type);
}
}
use of org.apache.flink.table.api.TableException in project flink by apache.
the class RequiredDistributionJsonDeserializer method deserialize.
@Override
public RequiredDistribution deserialize(JsonParser jsonParser, DeserializationContext ctx) throws IOException {
JsonNode jsonNode = jsonParser.getCodec().readTree(jsonParser);
DistributionType type = DistributionType.valueOf(jsonNode.get("type").asText().toUpperCase());
switch(type) {
case ANY:
return InputProperty.ANY_DISTRIBUTION;
case SINGLETON:
return InputProperty.SINGLETON_DISTRIBUTION;
case BROADCAST:
return InputProperty.BROADCAST_DISTRIBUTION;
case UNKNOWN:
return InputProperty.UNKNOWN_DISTRIBUTION;
case HASH:
JsonNode keysNode = jsonNode.get("keys");
if (keysNode == null) {
throw new TableException("Hash distribution requires non-empty hash keys.");
}
int[] keys = new int[keysNode.size()];
for (int i = 0; i < keysNode.size(); ++i) {
keys[i] = keysNode.get(i).asInt();
}
return InputProperty.hashDistribution(keys);
default:
throw new TableException("Unsupported distribution type: " + type);
}
}
use of org.apache.flink.table.api.TableException in project flink by apache.
the class RexNodeJsonDeserializer method deserializeLiteral.
private static RexNode deserializeLiteral(JsonNode jsonNode, SerdeContext serdeContext) {
final JsonNode logicalTypeNode = jsonNode.required(FIELD_NAME_TYPE);
final RelDataType relDataType = RelDataTypeJsonDeserializer.deserialize(logicalTypeNode, serdeContext);
if (jsonNode.has(FIELD_NAME_SARG)) {
return deserializeSarg(jsonNode.required(FIELD_NAME_SARG), relDataType, serdeContext);
} else if (jsonNode.has(FIELD_NAME_VALUE)) {
final Object value = deserializeLiteralValue(jsonNode, relDataType.getSqlTypeName(), serdeContext);
if (value == null) {
return serdeContext.getRexBuilder().makeNullLiteral(relDataType);
}
return serdeContext.getRexBuilder().makeLiteral(value, relDataType, true);
} else {
throw new TableException("Unknown literal: " + jsonNode.toPrettyString());
}
}
use of org.apache.flink.table.api.TableException in project flink by apache.
the class RexNodeJsonDeserializer method deserializeInternalFunction.
private static SqlOperator deserializeInternalFunction(String internalName, SqlSyntax syntax, SerdeContext serdeContext) {
// Try $FUNC$1
final Optional<SqlOperator> internalOperator = lookupOptionalSqlOperator(FunctionIdentifier.of(internalName), syntax, serdeContext, false);
if (internalOperator.isPresent()) {
return internalOperator.get();
}
// Try FUNC
final String publicName = BuiltInSqlOperator.extractNameFromQualifiedName(internalName);
final Optional<SqlOperator> latestOperator = lookupOptionalSqlOperator(FunctionIdentifier.of(publicName), syntax, serdeContext, true);
if (latestOperator.isPresent()) {
return latestOperator.get();
}
throw new TableException(String.format("Could not resolve internal system function '%s'. " + "This is a bug, please file an issue.", internalName));
}
use of org.apache.flink.table.api.TableException in project flink by apache.
the class RexNodeJsonDeserializer method deserializeFunctionClass.
private static SqlOperator deserializeFunctionClass(JsonNode jsonNode, SerdeContext serdeContext) {
final String className = jsonNode.required(FIELD_NAME_CLASS).asText();
final Class<?> functionClass = loadClass(className, serdeContext, "function");
final UserDefinedFunction functionInstance = UserDefinedFunctionHelper.instantiateFunction(functionClass);
final ContextResolvedFunction resolvedFunction;
// because we never serialize classes for system functions
if (jsonNode.has(FIELD_NAME_CATALOG_NAME)) {
final ObjectIdentifier objectIdentifier = ObjectIdentifierJsonDeserializer.deserialize(jsonNode.required(FIELD_NAME_CATALOG_NAME).asText(), serdeContext);
resolvedFunction = ContextResolvedFunction.permanent(FunctionIdentifier.of(objectIdentifier), functionInstance);
} else {
resolvedFunction = ContextResolvedFunction.anonymous(functionInstance);
}
switch(functionInstance.getKind()) {
case SCALAR:
case TABLE:
return BridgingSqlFunction.of(serdeContext.getFlinkContext(), serdeContext.getTypeFactory(), resolvedFunction);
case AGGREGATE:
return BridgingSqlAggFunction.of(serdeContext.getFlinkContext(), serdeContext.getTypeFactory(), resolvedFunction);
default:
throw new TableException(String.format("Unsupported anonymous function kind '%s' for class '%s'.", functionInstance.getKind(), className));
}
}
Aggregations