use of co.cask.cdap.api.flow.flowlet.FailurePolicy in project cdap by caskdata.
the class FlowletSpecificationCodec method deserialize.
@Override
public FlowletSpecification deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
JsonObject jsonObj = json.getAsJsonObject();
String className = jsonObj.get("className").getAsString();
String name = jsonObj.get("name").getAsString();
String description = jsonObj.get("description").getAsString();
FailurePolicy policy = FailurePolicy.valueOf(jsonObj.get("failurePolicy").getAsString());
Set<String> dataSets = deserializeSet(jsonObj.get("datasets"), context, String.class);
Map<String, String> properties = deserializeMap(jsonObj.get("properties"), context, String.class);
Resources resources = context.deserialize(jsonObj.get("resources"), Resources.class);
return new DefaultFlowletSpecification(className, name, description, policy, dataSets, properties, resources);
}
use of co.cask.cdap.api.flow.flowlet.FailurePolicy in project cdap by caskdata.
the class FlowletProcessDriver method processMethodCallback.
private <T> ProcessMethodCallback processMethodCallback(final PriorityQueue<FlowletProcessEntry<?>> processQueue, final FlowletProcessEntry<T> processEntry, final InputDatum<T> input) {
// If it is generator flowlet, processCount is 1.
final int processedCount = processEntry.getProcessSpec().getProcessMethod().needsInput() ? input.size() : 1;
return new ProcessMethodCallback() {
private final LoadingCache<String, MetricsContext> queueMetricsCollectors = CacheBuilder.newBuilder().expireAfterAccess(1, TimeUnit.HOURS).build(new CacheLoader<String, MetricsContext>() {
@Override
public MetricsContext load(String key) throws Exception {
return flowletContext.getProgramMetrics().childContext(Constants.Metrics.Tag.FLOWLET_QUEUE, key);
}
});
@Override
public void onSuccess(Object object, InputContext inputContext) {
try {
gaugeEventProcessed(input.getQueueName());
txCallback.onSuccess(object, inputContext);
} catch (Throwable t) {
LOG.error("Exception on onSuccess call: {}", flowletContext, t);
} finally {
enqueueEntry();
}
}
@Override
public void onFailure(Object inputObject, InputContext inputContext, FailureReason reason, InputAcknowledger inputAcknowledger) {
LOG.warn("Process failure: {}, {}, input: {}", flowletContext, reason.getMessage(), input, reason.getCause());
FailurePolicy failurePolicy;
try {
flowletContext.getProgramMetrics().increment("process.errors", 1);
failurePolicy = txCallback.onFailure(inputObject, inputContext, reason);
if (failurePolicy == null) {
failurePolicy = FailurePolicy.RETRY;
LOG.info("Callback returns null for failure policy. Default to {}.", failurePolicy);
}
} catch (Throwable t) {
LOG.error("Exception on onFailure call: {}", flowletContext, t);
failurePolicy = FailurePolicy.RETRY;
}
if (input.getRetry() >= processEntry.getProcessSpec().getProcessMethod().getMaxRetries()) {
LOG.info("Too many retries, ignores the input: {}", input);
failurePolicy = FailurePolicy.IGNORE;
}
if (failurePolicy == FailurePolicy.RETRY) {
FlowletProcessEntry retryEntry = processEntry.isRetry() ? processEntry : FlowletProcessEntry.create(processEntry.getProcessSpec(), new ProcessSpecification<>(new SingleItemQueueReader<>(input), processEntry.getProcessSpec().getProcessMethod(), null));
processQueue.offer(retryEntry);
} else if (failurePolicy == FailurePolicy.IGNORE) {
try {
gaugeEventProcessed(input.getQueueName());
inputAcknowledger.ack();
} catch (Throwable t) {
LOG.error("Fatal problem, fail to ack an input: {}", flowletContext, t);
} finally {
enqueueEntry();
}
}
}
private void enqueueEntry() {
processQueue.offer(processEntry.resetRetry());
}
private void gaugeEventProcessed(QueueName inputQueueName) {
if (processEntry.isTick()) {
flowletContext.getProgramMetrics().increment("process.ticks.processed", processedCount);
} else if (inputQueueName == null) {
flowletContext.getProgramMetrics().increment("process.events.processed", processedCount);
} else {
queueMetricsCollectors.getUnchecked(inputQueueName.getSimpleName()).increment("process.events.processed", processedCount);
}
}
};
}
Aggregations