use of org.apache.flink.util.FlinkRuntimeException in project flink by apache.
the class InputConversionOperator method processElement.
@Override
public void processElement(StreamRecord<E> element) throws Exception {
final E externalRecord = element.getValue();
final Object internalRecord;
try {
internalRecord = converter.toInternal(externalRecord);
} catch (Exception e) {
throw new FlinkRuntimeException(String.format("Error during input conversion from external DataStream API to " + "internal Table API data structures. Make sure that the " + "provided data types that configure the converters are " + "correctly declared in the schema. Affected record:\n%s", externalRecord), e);
}
final RowData payloadRowData;
if (requiresWrapping) {
final GenericRowData wrapped = new GenericRowData(RowKind.INSERT, 1);
wrapped.setField(0, internalRecord);
payloadRowData = wrapped;
} else {
// top-level records must not be null and will be skipped
if (internalRecord == null) {
return;
}
payloadRowData = (RowData) internalRecord;
}
final RowKind kind = payloadRowData.getRowKind();
if (isInsertOnly && kind != RowKind.INSERT) {
throw new FlinkRuntimeException(String.format("Error during input conversion. Conversion expects insert-only " + "records but DataStream API record contains: %s", kind));
}
if (!produceRowtimeMetadata) {
output.collect(outRecord.replace(payloadRowData));
return;
}
if (!element.hasTimestamp()) {
throw new FlinkRuntimeException("Could not find timestamp in DataStream API record. " + "Make sure that timestamps have been assigned before and " + "the event-time characteristic is enabled.");
}
final GenericRowData rowtimeRowData = new GenericRowData(1);
rowtimeRowData.setField(0, TimestampData.fromEpochMillis(element.getTimestamp()));
final JoinedRowData joinedRowData = new JoinedRowData(kind, payloadRowData, rowtimeRowData);
output.collect(outRecord.replace(joinedRowData));
}
use of org.apache.flink.util.FlinkRuntimeException in project flink by apache.
the class AbstractCodeGeneratorCastRule method create.
@SuppressWarnings("unchecked")
@Override
public CastExecutor<IN, OUT> create(CastRule.Context castRuleContext, LogicalType inputLogicalType, LogicalType targetLogicalType) {
final String inputTerm = "_myInput";
final String inputIsNullTerm = "_myInputIsNull";
final String castExecutorClassName = CodeGenUtils.newName("GeneratedCastExecutor");
final String inputTypeTerm = CodeGenUtils.boxedTypeTermForType(inputLogicalType);
final CastExecutorCodeGeneratorContext ctx = new CastExecutorCodeGeneratorContext(castRuleContext);
final CastCodeBlock codeBlock = generateCodeBlock(ctx, inputTerm, inputIsNullTerm, inputLogicalType, targetLogicalType);
// Class fields can contain type serializers
final String classFieldDecls = Stream.concat(ctx.typeSerializers.values().stream().map(entry -> "private final " + className(entry.getValue().getClass()) + " " + entry.getKey() + ";"), ctx.getClassFields().stream()).collect(Collectors.joining("\n"));
final String constructorSignature = "public " + castExecutorClassName + "(" + ctx.typeSerializers.values().stream().map(entry -> className(entry.getValue().getClass()) + " " + entry.getKey()).collect(Collectors.joining(", ")) + ")";
final String constructorBody = ctx.getDeclaredTypeSerializers().stream().map(name -> "this." + name + " = " + name + ";\n").collect(Collectors.joining());
// Because janino doesn't support generics, we need to manually cast the input variable of
// the cast method
final String functionSignature = "@Override public Object cast(Object _myInputObj) throws " + className(TableException.class);
// Write the function body
final CastRuleUtils.CodeWriter bodyWriter = new CastRuleUtils.CodeWriter();
bodyWriter.declStmt(inputTypeTerm, inputTerm, cast(inputTypeTerm, "_myInputObj"));
bodyWriter.declStmt("boolean", inputIsNullTerm, "_myInputObj == null");
ctx.variableDeclarationStatements.forEach(decl -> bodyWriter.appendBlock(decl + "\n"));
if (this.canFail(inputLogicalType, targetLogicalType)) {
bodyWriter.tryCatchStmt(tryWriter -> tryWriter.append(codeBlock).stmt("return " + codeBlock.getReturnTerm()), (exceptionTerm, catchWriter) -> catchWriter.throwStmt(constructorCall(TableException.class, strLiteral("Error when casting " + inputLogicalType + " to " + targetLogicalType + "."), exceptionTerm)));
} else {
bodyWriter.append(codeBlock).stmt("return " + codeBlock.getReturnTerm());
}
final String classCode = "public final class " + castExecutorClassName + " implements " + className(CastExecutor.class) + " {\n" + classFieldDecls + "\n" + constructorSignature + " {\n" + constructorBody + "}\n" + functionSignature + " {\n" + bodyWriter + "}\n}";
try {
Object[] constructorArgs = ctx.getTypeSerializersInstances().toArray(new TypeSerializer[0]);
return (CastExecutor<IN, OUT>) CompileUtils.compile(castRuleContext.getClassLoader(), castExecutorClassName, classCode).getConstructors()[0].newInstance(constructorArgs);
} catch (Throwable e) {
throw new FlinkRuntimeException("Cast executor cannot be instantiated. This is a bug. Please file an issue. Code:\n" + classCode, e);
}
}
use of org.apache.flink.util.FlinkRuntimeException in project flink by apache.
the class TestManagedIterableSourceSplit method getIterator.
@Override
public Iterator<RowData> getIterator() {
if (iterator == null) {
try {
BufferedReader reader = new BufferedReader(new FileReader(filePath.getPath()));
iterator = Iterators.transform(reader.lines().iterator(), line -> GenericRowData.of(StringData.fromString(id), StringData.fromString(filePath.getPath()), StringData.fromString(line)));
} catch (IOException e) {
throw new FlinkRuntimeException(e);
}
}
return iterator;
}
Aggregations