use of org.apache.nifi.components.PropertyValue in project kylo by Teradata.
the class LoadHighWaterMark method onTrigger.
/* (non-Javadoc)
* @see org.apache.nifi.processor.AbstractProcessor#onTrigger(org.apache.nifi.processor.ProcessContext, org.apache.nifi.processor.ProcessSession)
*/
@Override
public void onTrigger(ProcessContext context, ProcessSession session) throws ProcessException {
FlowFile inputFF = session.get();
FlowFile outputFF = inputFF;
boolean createdFlowfile = false;
// Create the flow file if we are the start of the flow.
if (outputFF == null && !context.hasNonLoopConnection()) {
outputFF = session.create();
createdFlowfile = true;
}
if (outputFF != null) {
try {
outputFF = initialize(context, session, outputFF);
} catch (FeedIdNotFoundException e) {
// Otherwise re-throw the exception.
if (createdFlowfile && outputFF.getAttribute(FEED_ID_ATTR) == null) {
getLog().debug("ID for feed was not available yet - yielding");
session.remove(outputFF);
context.yield();
return;
} else {
throw e;
}
}
MetadataRecorder recorder = context.getProperty(CommonProperties.METADATA_SERVICE).asControllerService(MetadataProviderService.class).getRecorder();
String waterMark = context.getProperty(HIGH_WATER_MARK).getValue();
String propName = context.getProperty(PROPERTY_NAME).getValue();
String initialValue = context.getProperty(INITIAL_VALUE).getValue();
try {
String feedId = getFeedId(context, outputFF);
try {
outputFF = recorder.loadWaterMark(session, outputFF, feedId, waterMark, propName, initialValue);
} catch (WaterMarkActiveException e) {
throw e;
} catch (Exception e) {
getLog().error("Failed to load the current high-water mark: {} for feed {}", new Object[] { waterMark, feedId }, e);
session.transfer(outputFF, CommonProperties.REL_FAILURE);
}
this.yieldCount.set(0);
session.transfer(outputFF, CommonProperties.REL_SUCCESS);
} catch (WaterMarkActiveException e) {
String activeStrategy = context.getProperty(ACTIVE_WATER_MARK_STRATEGY).getValue();
if ("ROUTE".equals(activeStrategy)) {
handleRouteOnActive(session, outputFF, waterMark);
} else {
PropertyValue value = context.getProperty(MAX_YIELD_COUNT);
int maxCount = value.isSet() ? value.asInteger() : Integer.MAX_VALUE - 1;
int count = this.yieldCount.incrementAndGet();
if (maxCount > 0 && count > maxCount) {
handleMaxYieldCount(context, session, recorder, outputFF, waterMark, propName, initialValue);
} else {
// If this processor created this flow file (1st processor in flow) then we will yield no matter what the strategy.
handleYieldOnActive(context, session, outputFF, createdFlowfile, activeStrategy, waterMark, count, maxCount);
}
}
}
}
}
use of org.apache.nifi.components.PropertyValue in project kylo by Teradata.
the class GetTableData method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
FlowFile flowFile = null;
if (context.hasIncomingConnection()) {
flowFile = session.get();
// we know that we should run only if we have a FlowFile.
if (flowFile == null && context.hasNonLoopConnection()) {
return;
}
}
final FlowFile incoming = flowFile;
final ComponentLog logger = getLog();
final DBCPService dbcpService = context.getProperty(JDBC_SERVICE).asControllerService(DBCPService.class);
final MetadataProviderService metadataService = context.getProperty(METADATA_SERVICE).asControllerService(MetadataProviderService.class);
final String loadStrategy = context.getProperty(LOAD_STRATEGY).getValue();
final String categoryName = context.getProperty(FEED_CATEGORY).evaluateAttributeExpressions(incoming).getValue();
final String feedName = context.getProperty(FEED_NAME).evaluateAttributeExpressions(incoming).getValue();
final String tableName = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(incoming).getValue();
final String fieldSpecs = context.getProperty(TABLE_SPECS).evaluateAttributeExpressions(incoming).getValue();
final String dateField = context.getProperty(DATE_FIELD).evaluateAttributeExpressions(incoming).getValue();
final Integer queryTimeout = context.getProperty(QUERY_TIMEOUT).asTimePeriod(TimeUnit.SECONDS).intValue();
final Integer overlapTime = context.getProperty(OVERLAP_TIME).evaluateAttributeExpressions(incoming).asTimePeriod(TimeUnit.SECONDS).intValue();
final Integer backoffTime = context.getProperty(BACKOFF_PERIOD).asTimePeriod(TimeUnit.SECONDS).intValue();
final String unitSize = context.getProperty(UNIT_SIZE).getValue();
final String outputType = context.getProperty(OUTPUT_TYPE).getValue();
String outputDelimiter = context.getProperty(OUTPUT_DELIMITER).evaluateAttributeExpressions(incoming).getValue();
final String delimiter = StringUtils.isBlank(outputDelimiter) ? "," : outputDelimiter;
final PropertyValue waterMarkPropName = context.getProperty(HIGH_WATER_MARK_PROP).evaluateAttributeExpressions(incoming);
final String[] selectFields = parseFields(fieldSpecs);
final LoadStrategy strategy = LoadStrategy.valueOf(loadStrategy);
final StopWatch stopWatch = new StopWatch(true);
try (final Connection conn = dbcpService.getConnection()) {
FlowFile outgoing = (incoming == null ? session.create() : incoming);
final AtomicLong nrOfRows = new AtomicLong(0L);
final LastFieldVisitor visitor = new LastFieldVisitor(dateField, null);
final FlowFile current = outgoing;
outgoing = session.write(outgoing, new OutputStreamCallback() {
@Override
public void process(final OutputStream out) throws IOException {
ResultSet rs = null;
try {
GetTableDataSupport support = new GetTableDataSupport(conn, queryTimeout);
if (strategy == LoadStrategy.FULL_LOAD) {
rs = support.selectFullLoad(tableName, selectFields);
} else if (strategy == LoadStrategy.INCREMENTAL) {
String waterMarkValue = getIncrementalWaterMarkValue(current, waterMarkPropName);
LocalDateTime waterMarkTime = LocalDateTime.parse(waterMarkValue, DATE_TIME_FORMAT);
Date lastLoadDate = toDate(waterMarkTime);
visitor.setLastModifyDate(lastLoadDate);
rs = support.selectIncremental(tableName, selectFields, dateField, overlapTime, lastLoadDate, backoffTime, GetTableDataSupport.UnitSizes.valueOf(unitSize));
} else {
throw new RuntimeException("Unsupported loadStrategy [" + loadStrategy + "]");
}
if (GetTableDataSupport.OutputType.DELIMITED.equals(GetTableDataSupport.OutputType.valueOf(outputType))) {
nrOfRows.set(JdbcCommon.convertToDelimitedStream(rs, out, (strategy == LoadStrategy.INCREMENTAL ? visitor : null), delimiter));
} else if (GetTableDataSupport.OutputType.AVRO.equals(GetTableDataSupport.OutputType.valueOf(outputType))) {
avroSchema = JdbcCommon.createSchema(rs);
nrOfRows.set(JdbcCommon.convertToAvroStream(rs, out, (strategy == LoadStrategy.INCREMENTAL ? visitor : null), avroSchema));
} else {
throw new RuntimeException("Unsupported output format type [" + outputType + "]");
}
} catch (final SQLException e) {
throw new IOException("SQL execution failure", e);
} finally {
if (rs != null) {
try {
if (rs.getStatement() != null) {
rs.getStatement().close();
}
rs.close();
} catch (SQLException e) {
getLog().error("Error closing sql statement and resultset");
}
}
}
}
});
// set attribute how many rows were selected
outgoing = session.putAttribute(outgoing, RESULT_ROW_COUNT, Long.toString(nrOfRows.get()));
// set output format type and avro schema for feed setup, if available
outgoing = session.putAttribute(outgoing, "db.table.output.format", outputType);
String avroSchemaForFeedSetup = (avroSchema != null) ? JdbcCommon.getAvroSchemaForFeedSetup(avroSchema) : EMPTY_STRING;
outgoing = session.putAttribute(outgoing, "db.table.avro.schema", avroSchemaForFeedSetup);
session.getProvenanceReporter().modifyContent(outgoing, "Retrieved " + nrOfRows.get() + " rows", stopWatch.getElapsed(TimeUnit.MILLISECONDS));
// Terminate flow file if no work
Long rowcount = nrOfRows.get();
outgoing = session.putAttribute(outgoing, ComponentAttributes.NUM_SOURCE_RECORDS.key(), String.valueOf(rowcount));
if (nrOfRows.get() == 0L) {
logger.info("{} contains no data; transferring to 'nodata'", new Object[] { outgoing });
session.transfer(outgoing, REL_NO_DATA);
} else {
logger.info("{} contains {} records; transferring to 'success'", new Object[] { outgoing, nrOfRows.get() });
if (strategy == LoadStrategy.INCREMENTAL) {
String newWaterMarkStr = format(visitor.getLastModifyDate());
outgoing = setIncrementalWaterMarkValue(session, outgoing, waterMarkPropName, newWaterMarkStr);
logger.info("Recorded load status feed {} date {}", new Object[] { feedName, newWaterMarkStr });
}
session.transfer(outgoing, REL_SUCCESS);
}
} catch (final Exception e) {
if (incoming == null) {
logger.error("Unable to execute SQL select from table due to {}. No incoming flow file to route to failure", new Object[] { e });
} else {
logger.error("Unable to execute SQL select from table due to {}; routing to failure", new Object[] { incoming, e });
session.transfer(incoming, REL_FAILURE);
}
}
}
use of org.apache.nifi.components.PropertyValue in project nifi by apache.
the class TestStandardReportingContext method testGetPropertyReportingTask.
@Test
public void testGetPropertyReportingTask() throws ReportingTaskInstantiationException {
ReportingTaskNode reportingTask = controller.createReportingTask(DummyScheduledReportingTask.class.getName(), systemBundle.getBundleDetails().getCoordinate());
PropertyDescriptor TEST_WITHOUT_DEFAULT_VALUE = new PropertyDescriptor.Builder().name("Test without default value").build();
PropertyDescriptor TEST_WITH_DEFAULT_VALUE = new PropertyDescriptor.Builder().name("Test with default value").build();
PropertyValue defaultValue = reportingTask.getReportingContext().getProperty(TEST_WITH_DEFAULT_VALUE);
assertEquals("nifi", defaultValue.getValue());
PropertyValue value = reportingTask.getReportingContext().getProperty(TEST_WITHOUT_DEFAULT_VALUE);
assertEquals(null, value.getValue());
}
use of org.apache.nifi.components.PropertyValue in project nifi by apache.
the class TestWriteAheadLocalStateProvider method setup.
@Before
public void setup() throws IOException {
provider = new WriteAheadLocalStateProvider();
final Map<PropertyDescriptor, PropertyValue> properties = new HashMap<>();
properties.put(WriteAheadLocalStateProvider.PATH, new StandardPropertyValue("target/local-state-provider/" + UUID.randomUUID().toString(), null));
properties.put(WriteAheadLocalStateProvider.ALWAYS_SYNC, new StandardPropertyValue("false", null));
properties.put(WriteAheadLocalStateProvider.CHECKPOINT_INTERVAL, new StandardPropertyValue("2 mins", null));
properties.put(WriteAheadLocalStateProvider.NUM_PARTITIONS, new StandardPropertyValue("16", null));
provider.initialize(new StateProviderInitializationContext() {
@Override
public String getIdentifier() {
return "Unit Test Provider Initialization Context";
}
@Override
public Map<PropertyDescriptor, PropertyValue> getProperties() {
return Collections.unmodifiableMap(properties);
}
@Override
public Map<String, String> getAllProperties() {
final Map<String, String> propValueMap = new LinkedHashMap<>();
for (final Map.Entry<PropertyDescriptor, PropertyValue> entry : getProperties().entrySet()) {
propValueMap.put(entry.getKey().getName(), entry.getValue().getValue());
}
return propValueMap;
}
@Override
public PropertyValue getProperty(final PropertyDescriptor property) {
final PropertyValue prop = properties.get(property);
if (prop == null) {
return new StandardPropertyValue(null, null);
}
return prop;
}
@Override
public SSLContext getSSLContext() {
return null;
}
@Override
public ComponentLog getLogger() {
return null;
}
});
}
use of org.apache.nifi.components.PropertyValue in project nifi by apache.
the class TestStandardPropertyValue method testSubstituteAttributesWithNoMatch.
@Test
public void testSubstituteAttributesWithNoMatch() {
final PropertyValue value = new StandardPropertyValue("Hello, ${audience}${comma}${question:replaceNull('')}!", lookup);
final Map<String, String> attributes = new HashMap<>();
assertEquals("Hello, !", value.evaluateAttributeExpressions(createFlowFile(attributes)).getValue());
}
Aggregations