Search in sources :

Example 76 with FlinkRuntimeException

use of org.apache.flink.util.FlinkRuntimeException in project flink by apache.

the class InputConversionOperator method processElement.

@Override
public void processElement(StreamRecord<E> element) throws Exception {
    final E externalRecord = element.getValue();
    final Object internalRecord;
    try {
        internalRecord = converter.toInternal(externalRecord);
    } catch (Exception e) {
        throw new FlinkRuntimeException(String.format("Error during input conversion from external DataStream API to " + "internal Table API data structures. Make sure that the " + "provided data types that configure the converters are " + "correctly declared in the schema. Affected record:\n%s", externalRecord), e);
    }
    final RowData payloadRowData;
    if (requiresWrapping) {
        final GenericRowData wrapped = new GenericRowData(RowKind.INSERT, 1);
        wrapped.setField(0, internalRecord);
        payloadRowData = wrapped;
    } else {
        // top-level records must not be null and will be skipped
        if (internalRecord == null) {
            return;
        }
        payloadRowData = (RowData) internalRecord;
    }
    final RowKind kind = payloadRowData.getRowKind();
    if (isInsertOnly && kind != RowKind.INSERT) {
        throw new FlinkRuntimeException(String.format("Error during input conversion. Conversion expects insert-only " + "records but DataStream API record contains: %s", kind));
    }
    if (!produceRowtimeMetadata) {
        output.collect(outRecord.replace(payloadRowData));
        return;
    }
    if (!element.hasTimestamp()) {
        throw new FlinkRuntimeException("Could not find timestamp in DataStream API record. " + "Make sure that timestamps have been assigned before and " + "the event-time characteristic is enabled.");
    }
    final GenericRowData rowtimeRowData = new GenericRowData(1);
    rowtimeRowData.setField(0, TimestampData.fromEpochMillis(element.getTimestamp()));
    final JoinedRowData joinedRowData = new JoinedRowData(kind, payloadRowData, rowtimeRowData);
    output.collect(outRecord.replace(joinedRowData));
}
Also used : RowData(org.apache.flink.table.data.RowData) GenericRowData(org.apache.flink.table.data.GenericRowData) JoinedRowData(org.apache.flink.table.data.utils.JoinedRowData) JoinedRowData(org.apache.flink.table.data.utils.JoinedRowData) RowKind(org.apache.flink.types.RowKind) FlinkRuntimeException(org.apache.flink.util.FlinkRuntimeException) GenericRowData(org.apache.flink.table.data.GenericRowData) FlinkRuntimeException(org.apache.flink.util.FlinkRuntimeException)

Example 77 with FlinkRuntimeException

use of org.apache.flink.util.FlinkRuntimeException in project flink by apache.

the class AbstractCodeGeneratorCastRule method create.

@SuppressWarnings("unchecked")
@Override
public CastExecutor<IN, OUT> create(CastRule.Context castRuleContext, LogicalType inputLogicalType, LogicalType targetLogicalType) {
    final String inputTerm = "_myInput";
    final String inputIsNullTerm = "_myInputIsNull";
    final String castExecutorClassName = CodeGenUtils.newName("GeneratedCastExecutor");
    final String inputTypeTerm = CodeGenUtils.boxedTypeTermForType(inputLogicalType);
    final CastExecutorCodeGeneratorContext ctx = new CastExecutorCodeGeneratorContext(castRuleContext);
    final CastCodeBlock codeBlock = generateCodeBlock(ctx, inputTerm, inputIsNullTerm, inputLogicalType, targetLogicalType);
    // Class fields can contain type serializers
    final String classFieldDecls = Stream.concat(ctx.typeSerializers.values().stream().map(entry -> "private final " + className(entry.getValue().getClass()) + " " + entry.getKey() + ";"), ctx.getClassFields().stream()).collect(Collectors.joining("\n"));
    final String constructorSignature = "public " + castExecutorClassName + "(" + ctx.typeSerializers.values().stream().map(entry -> className(entry.getValue().getClass()) + " " + entry.getKey()).collect(Collectors.joining(", ")) + ")";
    final String constructorBody = ctx.getDeclaredTypeSerializers().stream().map(name -> "this." + name + " = " + name + ";\n").collect(Collectors.joining());
    // Because janino doesn't support generics, we need to manually cast the input variable of
    // the cast method
    final String functionSignature = "@Override public Object cast(Object _myInputObj) throws " + className(TableException.class);
    // Write the function body
    final CastRuleUtils.CodeWriter bodyWriter = new CastRuleUtils.CodeWriter();
    bodyWriter.declStmt(inputTypeTerm, inputTerm, cast(inputTypeTerm, "_myInputObj"));
    bodyWriter.declStmt("boolean", inputIsNullTerm, "_myInputObj == null");
    ctx.variableDeclarationStatements.forEach(decl -> bodyWriter.appendBlock(decl + "\n"));
    if (this.canFail(inputLogicalType, targetLogicalType)) {
        bodyWriter.tryCatchStmt(tryWriter -> tryWriter.append(codeBlock).stmt("return " + codeBlock.getReturnTerm()), (exceptionTerm, catchWriter) -> catchWriter.throwStmt(constructorCall(TableException.class, strLiteral("Error when casting " + inputLogicalType + " to " + targetLogicalType + "."), exceptionTerm)));
    } else {
        bodyWriter.append(codeBlock).stmt("return " + codeBlock.getReturnTerm());
    }
    final String classCode = "public final class " + castExecutorClassName + " implements " + className(CastExecutor.class) + " {\n" + classFieldDecls + "\n" + constructorSignature + " {\n" + constructorBody + "}\n" + functionSignature + " {\n" + bodyWriter + "}\n}";
    try {
        Object[] constructorArgs = ctx.getTypeSerializersInstances().toArray(new TypeSerializer[0]);
        return (CastExecutor<IN, OUT>) CompileUtils.compile(castRuleContext.getClassLoader(), castExecutorClassName, classCode).getConstructors()[0].newInstance(constructorArgs);
    } catch (Throwable e) {
        throw new FlinkRuntimeException("Cast executor cannot be instantiated. This is a bug. Please file an issue. Code:\n" + classCode, e);
    }
}
Also used : TypeSerializer(org.apache.flink.api.common.typeutils.TypeSerializer) CastRuleUtils.strLiteral(org.apache.flink.table.planner.functions.casting.CastRuleUtils.strLiteral) CompileUtils(org.apache.flink.table.runtime.generated.CompileUtils) FlinkRuntimeException(org.apache.flink.util.FlinkRuntimeException) TableException(org.apache.flink.table.api.TableException) CodeGenUtils.className(org.apache.flink.table.planner.codegen.CodeGenUtils.className) SimpleImmutableEntry(java.util.AbstractMap.SimpleImmutableEntry) Collectors(java.util.stream.Collectors) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) CastRuleUtils.cast(org.apache.flink.table.planner.functions.casting.CastRuleUtils.cast) CastExecutor(org.apache.flink.table.data.utils.CastExecutor) List(java.util.List) CodeGenUtils(org.apache.flink.table.planner.codegen.CodeGenUtils) Stream(java.util.stream.Stream) LogicalType(org.apache.flink.table.types.logical.LogicalType) CastRuleUtils.constructorCall(org.apache.flink.table.planner.functions.casting.CastRuleUtils.constructorCall) InternalSerializers(org.apache.flink.table.runtime.typeutils.InternalSerializers) Map(java.util.Map) TableException(org.apache.flink.table.api.TableException) CastExecutor(org.apache.flink.table.data.utils.CastExecutor) FlinkRuntimeException(org.apache.flink.util.FlinkRuntimeException)

Example 78 with FlinkRuntimeException

use of org.apache.flink.util.FlinkRuntimeException in project flink by apache.

the class TestManagedIterableSourceSplit method getIterator.

@Override
public Iterator<RowData> getIterator() {
    if (iterator == null) {
        try {
            BufferedReader reader = new BufferedReader(new FileReader(filePath.getPath()));
            iterator = Iterators.transform(reader.lines().iterator(), line -> GenericRowData.of(StringData.fromString(id), StringData.fromString(filePath.getPath()), StringData.fromString(line)));
        } catch (IOException e) {
            throw new FlinkRuntimeException(e);
        }
    }
    return iterator;
}
Also used : StringData(org.apache.flink.table.data.StringData) GenericRowData(org.apache.flink.table.data.GenericRowData) RowData(org.apache.flink.table.data.RowData) Iterator(java.util.Iterator) Path(org.apache.flink.core.fs.Path) FlinkRuntimeException(org.apache.flink.util.FlinkRuntimeException) IOException(java.io.IOException) IteratorSourceSplit(org.apache.flink.api.connector.source.lib.util.IteratorSourceSplit) BufferedReader(java.io.BufferedReader) FileReader(java.io.FileReader) Iterators(org.apache.flink.shaded.guava30.com.google.common.collect.Iterators) Serializable(java.io.Serializable) BufferedReader(java.io.BufferedReader) FlinkRuntimeException(org.apache.flink.util.FlinkRuntimeException) FileReader(java.io.FileReader) IOException(java.io.IOException)

Aggregations

FlinkRuntimeException (org.apache.flink.util.FlinkRuntimeException)78 IOException (java.io.IOException)28 Test (org.junit.Test)13 JobID (org.apache.flink.api.common.JobID)10 HashMap (java.util.HashMap)8 ArrayList (java.util.ArrayList)7 CompletableFuture (java.util.concurrent.CompletableFuture)7 ExecutionException (java.util.concurrent.ExecutionException)7 Nonnull (javax.annotation.Nonnull)7 Configuration (org.apache.flink.configuration.Configuration)6 Collectors (java.util.stream.Collectors)5 JobGraph (org.apache.flink.runtime.jobgraph.JobGraph)5 JobResultStore (org.apache.flink.runtime.highavailability.JobResultStore)4 RocksDBException (org.rocksdb.RocksDBException)4 List (java.util.List)3 Map (java.util.Map)3 CheckpointMetrics (org.apache.flink.runtime.checkpoint.CheckpointMetrics)3 TaskStateSnapshot (org.apache.flink.runtime.checkpoint.TaskStateSnapshot)3 ExecutionAttemptID (org.apache.flink.runtime.executiongraph.ExecutionAttemptID)3 JobResult (org.apache.flink.runtime.jobmaster.JobResult)3