use of org.apache.flink.util.FlinkRuntimeException in project flink by apache.
the class SourceCoordinatorContext method assignSplits.
@Override
public void assignSplits(SplitsAssignment<SplitT> assignment) {
// Ensure the split assignment is done by the coordinator executor.
callInCoordinatorThread(() -> {
// Ensure all the subtasks in the assignment have registered.
for (Integer subtaskId : assignment.assignment().keySet()) {
if (!registeredReaders.containsKey(subtaskId)) {
throw new IllegalArgumentException(String.format("Cannot assign splits %s to subtask %d because the subtask is not registered.", registeredReaders.get(subtaskId), subtaskId));
}
}
assignmentTracker.recordSplitAssignment(assignment);
assignment.assignment().forEach((id, splits) -> {
final OperatorCoordinator.SubtaskGateway gateway = getGatewayAndCheckReady(id);
final AddSplitEvent<SplitT> addSplitEvent;
try {
addSplitEvent = new AddSplitEvent<>(splits, splitSerializer);
} catch (IOException e) {
throw new FlinkRuntimeException("Failed to serialize splits.", e);
}
gateway.sendEvent(addSplitEvent);
});
return null;
}, String.format("Failed to assign splits %s due to ", assignment));
}
use of org.apache.flink.util.FlinkRuntimeException in project flink by apache.
the class DetachedApplicationRunner method tryExecuteJobs.
private List<JobID> tryExecuteJobs(final DispatcherGateway dispatcherGateway, final PackagedProgram program, final Configuration configuration) {
configuration.set(DeploymentOptions.ATTACHED, false);
final List<JobID> applicationJobIds = new ArrayList<>();
final PipelineExecutorServiceLoader executorServiceLoader = new WebSubmissionExecutorServiceLoader(applicationJobIds, dispatcherGateway);
try {
ClientUtils.executeProgram(executorServiceLoader, configuration, program, enforceSingleJobExecution, true);
} catch (ProgramInvocationException e) {
LOG.warn("Could not execute application: ", e);
throw new FlinkRuntimeException("Could not execute application.", e);
}
return applicationJobIds;
}
use of org.apache.flink.util.FlinkRuntimeException in project flink by apache.
the class ElasticsearchWriter method extractFailures.
private void extractFailures(BulkRequest request, BulkResponse response) {
if (!response.hasFailures()) {
pendingActions -= request.numberOfActions();
return;
}
Throwable chainedFailures = null;
for (int i = 0; i < response.getItems().length; i++) {
final BulkItemResponse itemResponse = response.getItems()[i];
if (!itemResponse.isFailed()) {
continue;
}
final Throwable failure = itemResponse.getFailure().getCause();
if (failure == null) {
continue;
}
final RestStatus restStatus = itemResponse.getFailure().getStatus();
final DocWriteRequest<?> actionRequest = request.requests().get(i);
chainedFailures = firstOrSuppressed(wrapException(restStatus, failure, actionRequest), chainedFailures);
}
if (chainedFailures == null) {
return;
}
throw new FlinkRuntimeException(chainedFailures);
}
use of org.apache.flink.util.FlinkRuntimeException in project flink by apache.
the class CompileUtils method compileExpression.
/**
* Compiles an expression code to a janino {@link ExpressionEvaluator}.
*
* @param code the expression code
* @param argumentNames the expression argument names
* @param argumentClasses the expression argument classes
* @param returnClass the return type of the expression
* @return the compiled class
*/
public static ExpressionEvaluator compileExpression(String code, List<String> argumentNames, List<Class<?>> argumentClasses, Class<?> returnClass) {
try {
ExpressionEntry key = new ExpressionEntry(code, argumentNames, argumentClasses, returnClass);
return COMPILED_EXPRESSION_CACHE.get(key, () -> {
ExpressionEvaluator expressionEvaluator = new ExpressionEvaluator();
// Input args
expressionEvaluator.setParameters(argumentNames.toArray(new String[0]), argumentClasses.toArray(new Class[0]));
// Result type
expressionEvaluator.setExpressionType(returnClass);
try {
// Compile
expressionEvaluator.cook(code);
} catch (CompileException e) {
throw new InvalidProgramException("Table program cannot be compiled. This is a bug. Please file an issue.\nExpression: " + code, e);
}
return expressionEvaluator;
});
} catch (Exception e) {
throw new FlinkRuntimeException(e.getMessage(), e);
}
}
use of org.apache.flink.util.FlinkRuntimeException in project flink by apache.
the class OutputConversionOperator method processElement.
@Override
public void processElement(StreamRecord<RowData> element) throws Exception {
final RowData rowData = element.getValue();
if (consumeRowtimeMetadata) {
// timestamp is TIMESTAMP_LTZ
final long rowtime = rowData.getTimestamp(rowData.getArity() - 1, 3).getMillisecond();
outRecord.setTimestamp(rowtime);
} else if (rowtimeIndex != -1) {
// timestamp might be TIMESTAMP or TIMESTAMP_LTZ
final long rowtime = rowData.getTimestamp(rowtimeIndex, 3).getMillisecond();
outRecord.setTimestamp(rowtime);
}
final Object internalRecord;
if (atomicFieldGetter != null) {
internalRecord = atomicFieldGetter.getFieldOrNull(rowData);
} else {
internalRecord = rowData;
}
final Object externalRecord;
try {
externalRecord = converter.toExternal(internalRecord);
} catch (Exception e) {
throw new FlinkRuntimeException(String.format("Error during output conversion from internal Table API to " + "external DataStream API data structures. Make sure " + "that the provided data types that configure the " + "converters are correctly declared in the schema. " + "Affected record:\n%s", internalRecord), e);
}
outRecord.replace(externalRecord);
output.collect(outRecord);
}
Aggregations