use of org.apache.nifi.processor.exception.ProcessException in project kylo by Teradata.
the class MergeHiveTableMetadata method onTrigger.
@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
final ComponentLog logger = getLog();
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
try {
final String databaseNameField = context.getProperty(DATABASE_NAME).evaluateAttributeExpressions(flowFile).getValue();
final String databaseOwnerField = context.getProperty(DATABASE_OWNER).evaluateAttributeExpressions(flowFile).getValue();
final String tableCreateTimeField = context.getProperty(TABLE_CREATE_TIME).evaluateAttributeExpressions(flowFile).getValue();
final String tableNameField = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(flowFile).getValue();
final String tableTypeField = context.getProperty(TABLE_TYPE).evaluateAttributeExpressions(flowFile).getValue();
final String columnNameField = context.getProperty(COLUMN_NAME).evaluateAttributeExpressions(flowFile).getValue();
final String columnTypeField = context.getProperty(COLUMN_TYPE).evaluateAttributeExpressions(flowFile).getValue();
final String columnCommentField = context.getProperty(COLUMN_COMMENT).evaluateAttributeExpressions(flowFile).getValue();
final StringBuffer sb = new StringBuffer();
session.read(flowFile, new InputStreamCallback() {
@Override
public void process(InputStream in) throws IOException {
sb.append(IOUtils.toString(in, Charset.defaultCharset()));
}
});
logger.debug("The json that was received is: " + sb.toString());
flowFile = session.write(flowFile, new OutputStreamCallback() {
@Override
public void process(final OutputStream out) throws IOException {
try {
JSONArray array = new JSONArray(sb.toString());
Map<String, Metadata> tables = new HashMap<>();
for (int i = 0; i < array.length(); i++) {
JSONObject jsonObj = array.getJSONObject(i);
String databaseName = jsonObj.getString(databaseNameField);
String databaseOwner = jsonObj.getString(databaseOwnerField);
String tableName = jsonObj.getString(tableNameField);
String tableCreateTime = jsonObj.getString(tableCreateTimeField);
String tableType = jsonObj.getString(tableTypeField);
String columnName = jsonObj.getString(columnNameField);
String columnType = jsonObj.getString(columnTypeField);
String columnComment = jsonObj.getString(columnCommentField);
String key = databaseName + tableName;
if (tables.containsKey(key)) {
Metadata meta = tables.get(key);
HiveColumn column = new HiveColumn();
column.setColumnName(columnName);
column.setColumnType(columnType);
column.setColumnComment(columnComment);
meta.getHiveColumns().add(column);
} else {
Metadata meta = new Metadata();
meta.setDatabaseName(databaseName);
meta.setDatabaseOwner(databaseOwner);
meta.setTableCreateTime(tableCreateTime);
meta.setTableName(tableName);
meta.setTableType(tableType);
HiveColumn column = new HiveColumn();
column.setColumnName(columnName);
column.setColumnType(columnType);
column.setColumnComment(columnComment);
meta.getHiveColumns().add(column);
tables.put(key, meta);
}
}
List<Metadata> tablesAsList = new ArrayList<>();
Iterator iter = tables.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry pair = (Map.Entry) iter.next();
tablesAsList.add((Metadata) pair.getValue());
}
Gson gson = new Gson();
JsonElement element = gson.toJsonTree(tablesAsList, new TypeToken<List<Metadata>>() {
}.getType());
JsonArray jsonArray = element.getAsJsonArray();
out.write(jsonArray.toString().getBytes());
} catch (final Exception e) {
throw new ProcessException(e);
}
}
});
logger.info("*** Completed with status ");
session.transfer(flowFile, REL_SUCCESS);
} catch (final Exception e) {
logger.error("Unable to execute merge hive json job", new Object[] { flowFile, e });
session.transfer(flowFile, REL_FAILURE);
}
}
use of org.apache.nifi.processor.exception.ProcessException in project kylo by Teradata.
the class DistCopyHDFS method onTrigger.
/**
* onTrigger is called when the flow file proceeds through the processor
*
* @param context passed in by the framework and provides access to the data configured in the processor
* @param session passed in by the framework and provides access to the flow file
* @throws ProcessException if any framework actions fail
*/
@Override
public void onTrigger(@Nonnull final ProcessContext context, @Nonnull final ProcessSession session) throws ProcessException {
FlowFile flowFile = session.get();
if (flowFile == null) {
return;
}
final FileSystem fs = getFileSystem(context);
if (fs == null) {
getLog().error("Couldn't initialize HDFS");
session.transfer(flowFile, REL_FAILURE);
return;
}
String filesJSON = context.getProperty(FILES).evaluateAttributeExpressions(flowFile).getValue();
String source = context.getProperty(SOURCE).evaluateAttributeExpressions(flowFile).getValue();
String destination = context.getProperty(DESTINATION).evaluateAttributeExpressions(flowFile).getValue();
Gson jsonParser = new Gson();
File[] filesList;
ArrayList<Path> pathsList = new ArrayList<>();
try {
if (!(filesJSON == null) && !filesJSON.isEmpty()) {
filesList = jsonParser.fromJson(filesJSON, File[].class);
if (filesList == null) {
filesList = new File[0];
}
if (source != null && !source.isEmpty()) {
for (File f : filesList) {
pathsList.add(new Path(source, f.getName()));
}
} else {
for (File f : filesList) {
pathsList.add(new Path(f.getName()));
}
}
} else {
if (source == null || source.isEmpty()) {
getLog().error(String.format("At least one of attributes: %s or %s needs to be set", SOURCE.getName(), FILES.getName()));
session.transfer(flowFile, REL_FAILURE);
return;
}
pathsList.add(new Path(source));
}
DistCp distCp = getDistCp(pathsList, new Path(destination));
Job job = distCp.execute();
job.waitForCompletion(false);
} catch (JsonSyntaxException e) {
getLog().error("Files list attribute does not contain a proper JSON array");
session.transfer(flowFile, REL_FAILURE);
return;
} catch (Exception e) {
getLog().error("Exception during processor execution: " + e.getMessage());
session.transfer(flowFile, REL_FAILURE);
return;
}
session.transfer(flowFile, REL_SUCCESS);
}
use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class TestStandardProcessSession method testCommitFailureRequeuesFlowFiles.
@Test
public void testCommitFailureRequeuesFlowFiles() {
final FlowFileRecord flowFileRecord = new StandardFlowFileRecord.Builder().addAttribute("uuid", "12345678-1234-1234-1234-123456789012").entryDate(System.currentTimeMillis()).contentClaim(new StandardContentClaim(resourceClaimManager.newResourceClaim("x", "x", "0", true, false), 0L)).contentClaimOffset(0L).size(0L).build();
flowFileQueue.put(flowFileRecord);
final FlowFile originalFlowFile = session.get();
assertTrue(flowFileQueue.isActiveQueueEmpty());
assertEquals(1, flowFileQueue.getUnacknowledgedQueueSize().getObjectCount());
final FlowFile modified = session.write(originalFlowFile, new OutputStreamCallback() {
@Override
public void process(OutputStream out) throws IOException {
out.write("Hello".getBytes());
}
});
session.transfer(modified);
// instruct flowfile repo to throw IOException on update
flowFileRepo.setFailOnUpdate(true);
try {
session.commit();
Assert.fail("Session commit completed, even though FlowFile Repo threw IOException");
} catch (final ProcessException pe) {
// expected behavior because FlowFile Repo will throw IOException
}
assertFalse(flowFileQueue.isActiveQueueEmpty());
assertEquals(1, flowFileQueue.size().getObjectCount());
assertEquals(0, flowFileQueue.getUnacknowledgedQueueSize().getObjectCount());
}
use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class TestStandardProcessSession method testAppendToChildThrowsIOExceptionThenRemove.
@Test
public void testAppendToChildThrowsIOExceptionThenRemove() throws IOException {
final FlowFileRecord flowFileRecord = new StandardFlowFileRecord.Builder().id(1000L).addAttribute("uuid", "12345678-1234-1234-1234-123456789012").entryDate(System.currentTimeMillis()).build();
flowFileQueue.put(flowFileRecord);
FlowFile original = session.get();
assertNotNull(original);
FlowFile child = session.create(original);
child = session.append(child, out -> out.write("hello".getBytes()));
// Force an IOException. This will decrement out claim count for the resource claim.
try {
child = session.append(child, out -> {
throw new IOException();
});
Assert.fail("append() callback threw IOException but it was not wrapped in ProcessException");
} catch (final ProcessException pe) {
// expected
}
session.remove(child);
session.transfer(original);
session.commit();
final int numClaims = contentRepo.getExistingClaims().size();
assertEquals(0, numClaims);
}
use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.
the class StandardRootGroupPort method transferFlowFiles.
@Override
public int transferFlowFiles(final Peer peer, final ServerProtocol serverProtocol) throws NotAuthorizedException, BadRequestException, RequestExpiredException {
if (getConnectableType() != ConnectableType.OUTPUT_PORT) {
throw new IllegalStateException("Cannot send FlowFiles because this port is not an Output Port");
}
if (!this.isRunning()) {
throw new IllegalStateException("Port not running");
}
try {
final FlowFileRequest request = new FlowFileRequest(peer, serverProtocol);
if (!this.requestQueue.offer(request)) {
throw new RequestExpiredException();
}
// Trigger this port to run
scheduler.registerEvent(this);
// Get a response from the response queue but don't wait forever if the port is stopped
ProcessingResult result = null;
// before the request expires
while (!request.isBeingServiced()) {
if (request.isExpired()) {
// Remove expired request, so that it won't block new request to be offered.
this.requestQueue.remove(request);
throw new SocketTimeoutException("Read timed out");
} else {
try {
Thread.sleep(100L);
} catch (final InterruptedException e) {
}
}
}
// we've started to service the request. Now just wait until it's finished
result = request.getResponseQueue().take();
final Exception problem = result.getProblem();
if (problem == null) {
return result.getFileCount();
} else {
throw problem;
}
} catch (final NotAuthorizedException | BadRequestException | RequestExpiredException e) {
throw e;
} catch (final ProtocolException e) {
throw new BadRequestException(e);
} catch (final Exception e) {
throw new ProcessException(e);
}
}
Aggregations