use of org.apache.flink.util.OptionalFailure in project flink by apache.
the class JobResultDeserializer method parseAccumulatorResults.
@SuppressWarnings("unchecked")
private Map<String, SerializedValue<OptionalFailure<Object>>> parseAccumulatorResults(final JsonParser p, final DeserializationContext ctxt) throws IOException {
final Map<String, SerializedValue<OptionalFailure<Object>>> accumulatorResults = new HashMap<>();
while (true) {
final JsonToken jsonToken = p.nextToken();
assertNotEndOfInput(p, jsonToken);
if (jsonToken == JsonToken.END_OBJECT) {
break;
}
final String accumulatorName = p.getValueAsString();
p.nextValue();
accumulatorResults.put(accumulatorName, (SerializedValue<OptionalFailure<Object>>) serializedValueDeserializer.deserialize(p, ctxt));
}
return accumulatorResults;
}
use of org.apache.flink.util.OptionalFailure in project flink by apache.
the class CollectResultIteratorTest method createIteratorAndJobClient.
private Tuple2<CollectResultIterator<Integer>, JobClient> createIteratorAndJobClient(AbstractCollectResultBuffer<Integer> buffer, AbstractTestCoordinationRequestHandler<Integer> handler) {
CollectResultIterator<Integer> iterator = new CollectResultIterator<>(buffer, CompletableFuture.completedFuture(TEST_OPERATOR_ID), ACCUMULATOR_NAME, 0);
TestJobClient.JobInfoProvider infoProvider = new TestJobClient.JobInfoProvider() {
@Override
public boolean isJobFinished() {
return handler.isClosed();
}
@Override
public Map<String, OptionalFailure<Object>> getAccumulatorResults() {
return handler.getAccumulatorResults();
}
};
TestJobClient jobClient = new TestJobClient(TEST_JOB_ID, TEST_OPERATOR_ID, handler, infoProvider);
iterator.setJobClient(jobClient);
return Tuple2.of(iterator, jobClient);
}
use of org.apache.flink.util.OptionalFailure in project flink by apache.
the class CollectionExecutor method execute.
// --------------------------------------------------------------------------------------------
// General execution methods
// --------------------------------------------------------------------------------------------
public JobExecutionResult execute(Plan program) throws Exception {
long startTime = System.currentTimeMillis();
JobID jobID = program.getJobId() == null ? new JobID() : program.getJobId();
initCache(program.getCachedFiles());
Collection<? extends GenericDataSinkBase<?>> sinks = program.getDataSinks();
for (Operator<?> sink : sinks) {
execute(sink, jobID);
}
long endTime = System.currentTimeMillis();
Map<String, OptionalFailure<Object>> accumulatorResults = AccumulatorHelper.toResultMap(accumulators);
return new JobExecutionResult(null, endTime - startTime, accumulatorResults);
}
use of org.apache.flink.util.OptionalFailure in project flink by apache.
the class SubtaskExecutionAttemptAccumulatorsHandlerTest method testHandleRequest.
@Test
public void testHandleRequest() throws Exception {
// Instance the handler.
final RestHandlerConfiguration restHandlerConfiguration = RestHandlerConfiguration.fromConfiguration(new Configuration());
final SubtaskExecutionAttemptAccumulatorsHandler handler = new SubtaskExecutionAttemptAccumulatorsHandler(() -> null, Time.milliseconds(100L), Collections.emptyMap(), SubtaskExecutionAttemptAccumulatorsHeaders.getInstance(), new DefaultExecutionGraphCache(restHandlerConfiguration.getTimeout(), Time.milliseconds(restHandlerConfiguration.getRefreshInterval())), TestingUtils.defaultExecutor());
// Instance a empty request.
final HandlerRequest<EmptyRequestBody> request = HandlerRequest.create(EmptyRequestBody.getInstance(), new SubtaskAttemptMessageParameters());
final Map<String, OptionalFailure<Accumulator<?, ?>>> userAccumulators = new HashMap<>(3);
userAccumulators.put("IntCounter", OptionalFailure.of(new IntCounter(10)));
userAccumulators.put("LongCounter", OptionalFailure.of(new LongCounter(100L)));
userAccumulators.put("Failure", OptionalFailure.ofFailure(new FlinkRuntimeException("Test")));
// Instance the expected result.
final StringifiedAccumulatorResult[] accumulatorResults = StringifiedAccumulatorResult.stringifyAccumulatorResults(userAccumulators);
final int attemptNum = 1;
final int subtaskIndex = 2;
// Instance the tested execution.
final ArchivedExecution execution = new ArchivedExecution(accumulatorResults, null, new ExecutionAttemptID(), attemptNum, ExecutionState.FINISHED, null, null, null, subtaskIndex, new long[ExecutionState.values().length]);
// Invoke tested method.
final SubtaskExecutionAttemptAccumulatorsInfo accumulatorsInfo = handler.handleRequest(request, execution);
final ArrayList<UserAccumulator> userAccumulatorList = new ArrayList<>(userAccumulators.size());
for (StringifiedAccumulatorResult accumulatorResult : accumulatorResults) {
userAccumulatorList.add(new UserAccumulator(accumulatorResult.getName(), accumulatorResult.getType(), accumulatorResult.getValue()));
}
final SubtaskExecutionAttemptAccumulatorsInfo expected = new SubtaskExecutionAttemptAccumulatorsInfo(subtaskIndex, attemptNum, execution.getAttemptId().toString(), userAccumulatorList);
// Verify.
assertEquals(expected, accumulatorsInfo);
}
use of org.apache.flink.util.OptionalFailure in project flink by apache.
the class StringifiedAccumulatorResultTest method stringifyingResultsShouldIncorporateAccumulatorLocalValueDirectly.
@Test
public void stringifyingResultsShouldIncorporateAccumulatorLocalValueDirectly() {
final String name = "a";
final int targetValue = 314159;
final IntCounter acc = new IntCounter();
acc.add(targetValue);
final Map<String, OptionalFailure<Accumulator<?, ?>>> accumulatorMap = new HashMap<>();
accumulatorMap.put(name, OptionalFailure.of(acc));
final StringifiedAccumulatorResult[] results = StringifiedAccumulatorResult.stringifyAccumulatorResults(accumulatorMap);
assertEquals(1, results.length);
final StringifiedAccumulatorResult firstResult = results[0];
assertEquals(name, firstResult.getName());
assertEquals("IntCounter", firstResult.getType());
assertEquals(Integer.toString(targetValue), firstResult.getValue());
}
Aggregations