use of org.apache.nifi.processor.Relationship in project nifi by apache.
the class DistributeLoad method createWeightedList.
@OnScheduled
public void createWeightedList(final ProcessContext context) {
final Map<Integer, Integer> weightings = new LinkedHashMap<>();
String distStrat = context.getProperty(DISTRIBUTION_STRATEGY).getValue();
if (distStrat.equals(STRATEGY_LOAD_DISTRIBUTION_SERVICE)) {
String hostNamesValue = context.getProperty(HOSTNAMES).getValue();
String[] hostNames = hostNamesValue.split("(?:,+|;+|\\s+)");
Set<String> hostNameSet = new HashSet<>();
for (String hostName : hostNames) {
if (StringUtils.isNotBlank(hostName)) {
hostNameSet.add(hostName);
}
}
LoadDistributionService svc = context.getProperty(LOAD_DISTRIBUTION_SERVICE_TEMPLATE).asControllerService(LoadDistributionService.class);
myListener = new LoadDistributionListener() {
@Override
public void update(Map<String, Integer> loadInfo) {
for (Relationship rel : relationshipsRef.get()) {
String hostname = rel.getDescription();
Integer weight = 1;
if (loadInfo.containsKey(hostname)) {
weight = loadInfo.get(hostname);
}
weightings.put(Integer.decode(rel.getName()), weight);
}
updateWeightedRelationships(weightings);
}
};
Map<String, Integer> loadInfo = svc.getLoadDistribution(hostNameSet, myListener);
for (Relationship rel : relationshipsRef.get()) {
String hostname = rel.getDescription();
Integer weight = 1;
if (loadInfo.containsKey(hostname)) {
weight = loadInfo.get(hostname);
}
weightings.put(Integer.decode(rel.getName()), weight);
}
} else {
final int numRelationships = context.getProperty(NUM_RELATIONSHIPS).asInteger();
for (int i = 1; i <= numRelationships; i++) {
weightings.put(i, 1);
}
for (final PropertyDescriptor propDesc : context.getProperties().keySet()) {
if (!this.properties.contains(propDesc)) {
final int relationship = Integer.parseInt(propDesc.getName());
final int weighting = context.getProperty(propDesc).asInteger();
weightings.put(relationship, weighting);
}
}
}
updateWeightedRelationships(weightings);
}
use of org.apache.nifi.processor.Relationship in project nifi by apache.
the class DistributeLoad method updateWeightedRelationships.
private void updateWeightedRelationships(final Map<Integer, Integer> weightings) {
final List<Relationship> relationshipList = new ArrayList<>();
for (final Map.Entry<Integer, Integer> entry : weightings.entrySet()) {
final String relationshipName = String.valueOf(entry.getKey());
final Relationship relationship = new Relationship.Builder().name(relationshipName).build();
for (int i = 0; i < entry.getValue(); i++) {
relationshipList.add(relationship);
}
}
this.weightedRelationshipListRef.set(Collections.unmodifiableList(relationshipList));
}
use of org.apache.nifi.processor.Relationship in project nifi by apache.
the class DistributeLoad method customValidate.
@Override
protected Collection<ValidationResult> customValidate(ValidationContext validationContext) {
Collection<ValidationResult> results = new ArrayList<>();
if (doCustomValidate.getAndSet(false)) {
String distStrat = validationContext.getProperty(DISTRIBUTION_STRATEGY).getValue();
if (distStrat.equals(STRATEGY_LOAD_DISTRIBUTION_SERVICE)) {
// make sure Hostnames and Controller service are set
PropertyValue propDesc = validationContext.getProperty(HOSTNAMES);
if (null == propDesc || null == propDesc.getValue() || propDesc.getValue().isEmpty()) {
results.add(new ValidationResult.Builder().subject(HOSTNAMES.getName()).explanation("Must specify Hostnames when using 'Load Distribution Strategy'").valid(false).build());
}
propDesc = validationContext.getProperty(LOAD_DISTRIBUTION_SERVICE_TEMPLATE);
if (null == propDesc || null == propDesc.getValue() || propDesc.getValue().isEmpty()) {
results.add(new ValidationResult.Builder().subject(LOAD_DISTRIBUTION_SERVICE_TEMPLATE.getName()).explanation("Must specify 'Load Distribution Service ID' when using 'Load Distribution Strategy'").valid(false).build());
}
if (results.isEmpty()) {
int numRels = validationContext.getProperty(NUM_RELATIONSHIPS).asInteger();
String hostNamesValue = validationContext.getProperty(HOSTNAMES).getValue();
String[] hostNames = hostNamesValue.split("(?:,+|;+|\\s+)");
int numHosts = 0;
for (String hostName : hostNames) {
if (StringUtils.isNotBlank(hostName)) {
hostNames[numHosts++] = hostName;
}
}
if (numHosts > numRels) {
results.add(new ValidationResult.Builder().subject("Number of Relationships and Hostnames").explanation("Number of Relationships must be equal to, or greater than, the number of host names").valid(false).build());
} else {
// create new relationships with descriptions of hostname
Set<Relationship> relsWithDesc = new TreeSet<>();
for (int i = 0; i < numHosts; i++) {
relsWithDesc.add(new Relationship.Builder().name(String.valueOf(i + 1)).description(hostNames[i]).build());
}
// add add'l rels if configuration requires it...it probably shouldn't
for (int i = numHosts + 1; i <= numRels; i++) {
relsWithDesc.add(createRelationship(i));
}
relationshipsRef.set(Collections.unmodifiableSet(relsWithDesc));
}
}
}
}
return results;
}
use of org.apache.nifi.processor.Relationship in project nifi by apache.
the class DistributeLoad method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) {
final FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final DistributionStrategy strategy = strategyRef.get();
final Set<Relationship> available = context.getAvailableRelationships();
final int numRelationships = context.getProperty(NUM_RELATIONSHIPS).asInteger();
final boolean allDestinationsAvailable = (available.size() == numRelationships);
if (!allDestinationsAvailable && strategy.requiresAllDestinationsAvailable()) {
// can't transfer the FlowFiles. Roll back and yield
session.rollback();
context.yield();
return;
}
final Relationship relationship = strategy.mapToRelationship(context, flowFile);
if (relationship == null) {
// can't transfer the FlowFiles. Roll back and yield
session.rollback();
context.yield();
return;
}
session.transfer(flowFile, relationship);
session.getProvenanceReporter().route(flowFile, relationship);
}
use of org.apache.nifi.processor.Relationship in project nifi by apache.
the class ExecuteStreamCommand method onTrigger.
@Override
public void onTrigger(ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile inputFlowFile = session.get();
if (null == inputFlowFile) {
return;
}
final ArrayList<String> args = new ArrayList<>();
final boolean putToAttribute = context.getProperty(PUT_OUTPUT_IN_ATTRIBUTE).isSet();
final Integer attributeSize = context.getProperty(PUT_ATTRIBUTE_MAX_LENGTH).asInteger();
final String attributeName = context.getProperty(PUT_OUTPUT_IN_ATTRIBUTE).getValue();
final String executeCommand = context.getProperty(EXECUTION_COMMAND).evaluateAttributeExpressions(inputFlowFile).getValue();
args.add(executeCommand);
final String commandArguments = context.getProperty(EXECUTION_ARGUMENTS).evaluateAttributeExpressions(inputFlowFile).getValue();
final boolean ignoreStdin = Boolean.parseBoolean(context.getProperty(IGNORE_STDIN).getValue());
if (!StringUtils.isBlank(commandArguments)) {
for (String arg : ArgumentUtils.splitArgs(commandArguments, context.getProperty(ARG_DELIMITER).getValue().charAt(0))) {
args.add(arg);
}
}
final String workingDir = context.getProperty(WORKING_DIR).evaluateAttributeExpressions(inputFlowFile).getValue();
final ProcessBuilder builder = new ProcessBuilder();
logger.debug("Executing and waiting for command {} with arguments {}", new Object[] { executeCommand, commandArguments });
File dir = null;
if (!StringUtils.isBlank(workingDir)) {
dir = new File(workingDir);
if (!dir.exists() && !dir.mkdirs()) {
logger.warn("Failed to create working directory {}, using current working directory {}", new Object[] { workingDir, System.getProperty("user.dir") });
}
}
final Map<String, String> environment = new HashMap<>();
for (final Map.Entry<PropertyDescriptor, String> entry : context.getProperties().entrySet()) {
if (entry.getKey().isDynamic()) {
environment.put(entry.getKey().getName(), entry.getValue());
}
}
builder.environment().putAll(environment);
builder.command(args);
builder.directory(dir);
builder.redirectInput(Redirect.PIPE);
builder.redirectOutput(Redirect.PIPE);
final Process process;
try {
process = builder.start();
} catch (IOException e) {
logger.error("Could not create external process to run command", e);
throw new ProcessException(e);
}
try (final OutputStream pos = process.getOutputStream();
final InputStream pis = process.getInputStream();
final InputStream pes = process.getErrorStream();
final BufferedInputStream bis = new BufferedInputStream(pis);
final BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(pes))) {
int exitCode = -1;
final BufferedOutputStream bos = new BufferedOutputStream(pos);
FlowFile outputFlowFile = putToAttribute ? inputFlowFile : session.create(inputFlowFile);
ProcessStreamWriterCallback callback = new ProcessStreamWriterCallback(ignoreStdin, bos, bis, logger, attributeName, session, outputFlowFile, process, putToAttribute, attributeSize);
session.read(inputFlowFile, callback);
outputFlowFile = callback.outputFlowFile;
if (putToAttribute) {
outputFlowFile = session.putAttribute(outputFlowFile, attributeName, new String(callback.outputBuffer, 0, callback.size));
}
exitCode = callback.exitCode;
logger.debug("Execution complete for command: {}. Exited with code: {}", new Object[] { executeCommand, exitCode });
Map<String, String> attributes = new HashMap<>();
final StringBuilder strBldr = new StringBuilder();
try {
String line;
while ((line = bufferedReader.readLine()) != null) {
strBldr.append(line).append("\n");
}
} catch (IOException e) {
strBldr.append("Unknown...could not read Process's Std Error");
}
int length = strBldr.length() > 4000 ? 4000 : strBldr.length();
attributes.put("execution.error", strBldr.substring(0, length));
final Relationship outputFlowFileRelationship = putToAttribute ? ORIGINAL_RELATIONSHIP : (exitCode != 0) ? NONZERO_STATUS_RELATIONSHIP : OUTPUT_STREAM_RELATIONSHIP;
if (exitCode == 0) {
logger.info("Transferring flow file {} to {}", new Object[] { outputFlowFile, outputFlowFileRelationship.getName() });
} else {
logger.error("Transferring flow file {} to {}. Executable command {} ended in an error: {}", new Object[] { outputFlowFile, outputFlowFileRelationship.getName(), executeCommand, strBldr.toString() });
}
attributes.put("execution.status", Integer.toString(exitCode));
attributes.put("execution.command", executeCommand);
attributes.put("execution.command.args", commandArguments);
outputFlowFile = session.putAllAttributes(outputFlowFile, attributes);
if (NONZERO_STATUS_RELATIONSHIP.equals(outputFlowFileRelationship)) {
outputFlowFile = session.penalize(outputFlowFile);
}
// This will transfer the FlowFile that received the stream output to its destined relationship.
// In the event the stream is put to the an attribute of the original, it will be transferred here.
session.transfer(outputFlowFile, outputFlowFileRelationship);
if (!putToAttribute) {
logger.info("Transferring flow file {} to original", new Object[] { inputFlowFile });
inputFlowFile = session.putAllAttributes(inputFlowFile, attributes);
session.transfer(inputFlowFile, ORIGINAL_RELATIONSHIP);
}
} catch (final IOException ex) {
// could not close Process related streams
logger.warn("Problem terminating Process {}", new Object[] { process }, ex);
} finally {
// last ditch effort to clean up that process.
process.destroy();
}
}
Aggregations