Search in sources :

Example 11 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project kylo by Teradata.

the class MergeHiveTableMetadata method onTrigger.

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {
    final ComponentLog logger = getLog();
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    try {
        final String databaseNameField = context.getProperty(DATABASE_NAME).evaluateAttributeExpressions(flowFile).getValue();
        final String databaseOwnerField = context.getProperty(DATABASE_OWNER).evaluateAttributeExpressions(flowFile).getValue();
        final String tableCreateTimeField = context.getProperty(TABLE_CREATE_TIME).evaluateAttributeExpressions(flowFile).getValue();
        final String tableNameField = context.getProperty(TABLE_NAME).evaluateAttributeExpressions(flowFile).getValue();
        final String tableTypeField = context.getProperty(TABLE_TYPE).evaluateAttributeExpressions(flowFile).getValue();
        final String columnNameField = context.getProperty(COLUMN_NAME).evaluateAttributeExpressions(flowFile).getValue();
        final String columnTypeField = context.getProperty(COLUMN_TYPE).evaluateAttributeExpressions(flowFile).getValue();
        final String columnCommentField = context.getProperty(COLUMN_COMMENT).evaluateAttributeExpressions(flowFile).getValue();
        final StringBuffer sb = new StringBuffer();
        session.read(flowFile, new InputStreamCallback() {

            @Override
            public void process(InputStream in) throws IOException {
                sb.append(IOUtils.toString(in, Charset.defaultCharset()));
            }
        });
        logger.debug("The json that was received is: " + sb.toString());
        flowFile = session.write(flowFile, new OutputStreamCallback() {

            @Override
            public void process(final OutputStream out) throws IOException {
                try {
                    JSONArray array = new JSONArray(sb.toString());
                    Map<String, Metadata> tables = new HashMap<>();
                    for (int i = 0; i < array.length(); i++) {
                        JSONObject jsonObj = array.getJSONObject(i);
                        String databaseName = jsonObj.getString(databaseNameField);
                        String databaseOwner = jsonObj.getString(databaseOwnerField);
                        String tableName = jsonObj.getString(tableNameField);
                        String tableCreateTime = jsonObj.getString(tableCreateTimeField);
                        String tableType = jsonObj.getString(tableTypeField);
                        String columnName = jsonObj.getString(columnNameField);
                        String columnType = jsonObj.getString(columnTypeField);
                        String columnComment = jsonObj.getString(columnCommentField);
                        String key = databaseName + tableName;
                        if (tables.containsKey(key)) {
                            Metadata meta = tables.get(key);
                            HiveColumn column = new HiveColumn();
                            column.setColumnName(columnName);
                            column.setColumnType(columnType);
                            column.setColumnComment(columnComment);
                            meta.getHiveColumns().add(column);
                        } else {
                            Metadata meta = new Metadata();
                            meta.setDatabaseName(databaseName);
                            meta.setDatabaseOwner(databaseOwner);
                            meta.setTableCreateTime(tableCreateTime);
                            meta.setTableName(tableName);
                            meta.setTableType(tableType);
                            HiveColumn column = new HiveColumn();
                            column.setColumnName(columnName);
                            column.setColumnType(columnType);
                            column.setColumnComment(columnComment);
                            meta.getHiveColumns().add(column);
                            tables.put(key, meta);
                        }
                    }
                    List<Metadata> tablesAsList = new ArrayList<>();
                    Iterator iter = tables.entrySet().iterator();
                    while (iter.hasNext()) {
                        Map.Entry pair = (Map.Entry) iter.next();
                        tablesAsList.add((Metadata) pair.getValue());
                    }
                    Gson gson = new Gson();
                    JsonElement element = gson.toJsonTree(tablesAsList, new TypeToken<List<Metadata>>() {
                    }.getType());
                    JsonArray jsonArray = element.getAsJsonArray();
                    out.write(jsonArray.toString().getBytes());
                } catch (final Exception e) {
                    throw new ProcessException(e);
                }
            }
        });
        logger.info("*** Completed with status ");
        session.transfer(flowFile, REL_SUCCESS);
    } catch (final Exception e) {
        logger.error("Unable to execute merge hive json job", new Object[] { flowFile, e });
        session.transfer(flowFile, REL_FAILURE);
    }
}
Also used : HashMap(java.util.HashMap) OutputStream(java.io.OutputStream) ArrayList(java.util.ArrayList) Gson(com.google.gson.Gson) Iterator(java.util.Iterator) OutputStreamCallback(org.apache.nifi.processor.io.OutputStreamCallback) FlowFile(org.apache.nifi.flowfile.FlowFile) InputStream(java.io.InputStream) JSONArray(org.codehaus.jettison.json.JSONArray) IOException(java.io.IOException) ComponentLog(org.apache.nifi.logging.ComponentLog) ProcessException(org.apache.nifi.processor.exception.ProcessException) IOException(java.io.IOException) JsonArray(com.google.gson.JsonArray) ProcessException(org.apache.nifi.processor.exception.ProcessException) JSONObject(org.codehaus.jettison.json.JSONObject) JsonElement(com.google.gson.JsonElement) TypeToken(com.google.gson.reflect.TypeToken) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) JSONObject(org.codehaus.jettison.json.JSONObject) HashMap(java.util.HashMap) Map(java.util.Map)

Example 12 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project kylo by Teradata.

the class DistCopyHDFS method onTrigger.

/**
 * onTrigger is called when the flow file proceeds through the processor
 *
 * @param context passed in by the framework and provides access to the data configured in the processor
 * @param session passed in by the framework and provides access to the flow file
 * @throws ProcessException if any framework actions fail
 */
@Override
public void onTrigger(@Nonnull final ProcessContext context, @Nonnull final ProcessSession session) throws ProcessException {
    FlowFile flowFile = session.get();
    if (flowFile == null) {
        return;
    }
    final FileSystem fs = getFileSystem(context);
    if (fs == null) {
        getLog().error("Couldn't initialize HDFS");
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    String filesJSON = context.getProperty(FILES).evaluateAttributeExpressions(flowFile).getValue();
    String source = context.getProperty(SOURCE).evaluateAttributeExpressions(flowFile).getValue();
    String destination = context.getProperty(DESTINATION).evaluateAttributeExpressions(flowFile).getValue();
    Gson jsonParser = new Gson();
    File[] filesList;
    ArrayList<Path> pathsList = new ArrayList<>();
    try {
        if (!(filesJSON == null) && !filesJSON.isEmpty()) {
            filesList = jsonParser.fromJson(filesJSON, File[].class);
            if (filesList == null) {
                filesList = new File[0];
            }
            if (source != null && !source.isEmpty()) {
                for (File f : filesList) {
                    pathsList.add(new Path(source, f.getName()));
                }
            } else {
                for (File f : filesList) {
                    pathsList.add(new Path(f.getName()));
                }
            }
        } else {
            if (source == null || source.isEmpty()) {
                getLog().error(String.format("At least one of attributes: %s or %s needs to be set", SOURCE.getName(), FILES.getName()));
                session.transfer(flowFile, REL_FAILURE);
                return;
            }
            pathsList.add(new Path(source));
        }
        DistCp distCp = getDistCp(pathsList, new Path(destination));
        Job job = distCp.execute();
        job.waitForCompletion(false);
    } catch (JsonSyntaxException e) {
        getLog().error("Files list attribute does not contain a proper JSON array");
        session.transfer(flowFile, REL_FAILURE);
        return;
    } catch (Exception e) {
        getLog().error("Exception during processor execution: " + e.getMessage());
        session.transfer(flowFile, REL_FAILURE);
        return;
    }
    session.transfer(flowFile, REL_SUCCESS);
}
Also used : Path(org.apache.hadoop.fs.Path) FlowFile(org.apache.nifi.flowfile.FlowFile) ArrayList(java.util.ArrayList) Gson(com.google.gson.Gson) JsonSyntaxException(com.google.gson.JsonSyntaxException) ProcessException(org.apache.nifi.processor.exception.ProcessException) JsonSyntaxException(com.google.gson.JsonSyntaxException) DistCp(org.apache.hadoop.tools.DistCp) FileSystem(org.apache.hadoop.fs.FileSystem) Job(org.apache.hadoop.mapreduce.Job) FlowFile(org.apache.nifi.flowfile.FlowFile)

Example 13 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.

the class TestStandardProcessSession method testCommitFailureRequeuesFlowFiles.

@Test
public void testCommitFailureRequeuesFlowFiles() {
    final FlowFileRecord flowFileRecord = new StandardFlowFileRecord.Builder().addAttribute("uuid", "12345678-1234-1234-1234-123456789012").entryDate(System.currentTimeMillis()).contentClaim(new StandardContentClaim(resourceClaimManager.newResourceClaim("x", "x", "0", true, false), 0L)).contentClaimOffset(0L).size(0L).build();
    flowFileQueue.put(flowFileRecord);
    final FlowFile originalFlowFile = session.get();
    assertTrue(flowFileQueue.isActiveQueueEmpty());
    assertEquals(1, flowFileQueue.getUnacknowledgedQueueSize().getObjectCount());
    final FlowFile modified = session.write(originalFlowFile, new OutputStreamCallback() {

        @Override
        public void process(OutputStream out) throws IOException {
            out.write("Hello".getBytes());
        }
    });
    session.transfer(modified);
    // instruct flowfile repo to throw IOException on update
    flowFileRepo.setFailOnUpdate(true);
    try {
        session.commit();
        Assert.fail("Session commit completed, even though FlowFile Repo threw IOException");
    } catch (final ProcessException pe) {
    // expected behavior because FlowFile Repo will throw IOException
    }
    assertFalse(flowFileQueue.isActiveQueueEmpty());
    assertEquals(1, flowFileQueue.size().getObjectCount());
    assertEquals(0, flowFileQueue.getUnacknowledgedQueueSize().getObjectCount());
}
Also used : FlowFile(org.apache.nifi.flowfile.FlowFile) MockFlowFile(org.apache.nifi.util.MockFlowFile) StandardContentClaim(org.apache.nifi.controller.repository.claim.StandardContentClaim) ProcessException(org.apache.nifi.processor.exception.ProcessException) FilterOutputStream(java.io.FilterOutputStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) OutputStreamCallback(org.apache.nifi.processor.io.OutputStreamCallback) IOException(java.io.IOException) Test(org.junit.Test)

Example 14 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.

the class TestStandardProcessSession method testAppendToChildThrowsIOExceptionThenRemove.

@Test
public void testAppendToChildThrowsIOExceptionThenRemove() throws IOException {
    final FlowFileRecord flowFileRecord = new StandardFlowFileRecord.Builder().id(1000L).addAttribute("uuid", "12345678-1234-1234-1234-123456789012").entryDate(System.currentTimeMillis()).build();
    flowFileQueue.put(flowFileRecord);
    FlowFile original = session.get();
    assertNotNull(original);
    FlowFile child = session.create(original);
    child = session.append(child, out -> out.write("hello".getBytes()));
    // Force an IOException. This will decrement out claim count for the resource claim.
    try {
        child = session.append(child, out -> {
            throw new IOException();
        });
        Assert.fail("append() callback threw IOException but it was not wrapped in ProcessException");
    } catch (final ProcessException pe) {
    // expected
    }
    session.remove(child);
    session.transfer(original);
    session.commit();
    final int numClaims = contentRepo.getExistingClaims().size();
    assertEquals(0, numClaims);
}
Also used : OutputStreamCallback(org.apache.nifi.processor.io.OutputStreamCallback) Arrays(java.util.Arrays) FlowFileFilter(org.apache.nifi.processor.FlowFileFilter) ProcessGroup(org.apache.nifi.groups.ProcessGroup) ConnectableType(org.apache.nifi.connectable.ConnectableType) Mockito.doThrow(org.mockito.Mockito.doThrow) ByteArrayInputStream(java.io.ByteArrayInputStream) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) StandardContentClaim(org.apache.nifi.controller.repository.claim.StandardContentClaim) FlowFileFilterResult(org.apache.nifi.processor.FlowFileFilter.FlowFileFilterResult) Map(java.util.Map) After(org.junit.After) MockProvenanceRepository(org.apache.nifi.provenance.MockProvenanceRepository) Connectable(org.apache.nifi.connectable.Connectable) Connection(org.apache.nifi.connectable.Connection) Path(java.nio.file.Path) InputStreamCallback(org.apache.nifi.processor.io.InputStreamCallback) FlowFileAccessException(org.apache.nifi.processor.exception.FlowFileAccessException) FlowFile(org.apache.nifi.flowfile.FlowFile) Collection(java.util.Collection) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) Set(java.util.Set) FilterOutputStream(java.io.FilterOutputStream) MissingFlowFileException(org.apache.nifi.processor.exception.MissingFlowFileException) FileNotFoundException(java.io.FileNotFoundException) Matchers.any(org.mockito.Matchers.any) List(java.util.List) Assert.assertFalse(org.junit.Assert.assertFalse) Pattern(java.util.regex.Pattern) StreamCallback(org.apache.nifi.processor.io.StreamCallback) MockFlowFile(org.apache.nifi.util.MockFlowFile) ContentClaim(org.apache.nifi.controller.repository.claim.ContentClaim) ByteArrayOutputStream(java.io.ByteArrayOutputStream) ProcessScheduler(org.apache.nifi.controller.ProcessScheduler) HashMap(java.util.HashMap) ProvenanceEventRepository(org.apache.nifi.provenance.ProvenanceEventRepository) AtomicReference(java.util.concurrent.atomic.AtomicReference) ProcessException(org.apache.nifi.processor.exception.ProcessException) ArrayList(java.util.ArrayList) ConcurrentMap(java.util.concurrent.ConcurrentMap) HashSet(java.util.HashSet) StandardFlowFileQueue(org.apache.nifi.controller.StandardFlowFileQueue) Answer(org.mockito.stubbing.Answer) InvocationOnMock(org.mockito.invocation.InvocationOnMock) Relationship(org.apache.nifi.processor.Relationship) ResourceClaim(org.apache.nifi.controller.repository.claim.ResourceClaim) ProvenanceEventRecord(org.apache.nifi.provenance.ProvenanceEventRecord) Assert.assertArrayEquals(org.junit.Assert.assertArrayEquals) Before(org.junit.Before) OutputStream(java.io.OutputStream) StandardResourceClaimManager(org.apache.nifi.controller.repository.claim.StandardResourceClaimManager) Files(java.nio.file.Files) Assert.assertNotNull(org.junit.Assert.assertNotNull) ProvenanceEventType(org.apache.nifi.provenance.ProvenanceEventType) Assert.assertTrue(org.junit.Assert.assertTrue) Matchers.notNull(org.mockito.Matchers.notNull) FileOutputStream(java.io.FileOutputStream) Mockito.times(org.mockito.Mockito.times) IOException(java.io.IOException) Test(org.junit.Test) Mockito.when(org.mockito.Mockito.when) FileInputStream(java.io.FileInputStream) File(java.io.File) Mockito.verify(org.mockito.Mockito.verify) AtomicLong(java.util.concurrent.atomic.AtomicLong) ResourceClaimManager(org.apache.nifi.controller.repository.claim.ResourceClaimManager) Mockito(org.mockito.Mockito) NiFiProperties(org.apache.nifi.util.NiFiProperties) Ignore(org.junit.Ignore) Paths(java.nio.file.Paths) StreamUtils(org.apache.nifi.stream.io.StreamUtils) CoreAttributes(org.apache.nifi.flowfile.attributes.CoreAttributes) Assert(org.junit.Assert) Collections(java.util.Collections) FlowFileQueue(org.apache.nifi.controller.queue.FlowFileQueue) Assert.assertEquals(org.junit.Assert.assertEquals) InputStream(java.io.InputStream) FlowFile(org.apache.nifi.flowfile.FlowFile) MockFlowFile(org.apache.nifi.util.MockFlowFile) ProcessException(org.apache.nifi.processor.exception.ProcessException) IOException(java.io.IOException) Test(org.junit.Test)

Example 15 with ProcessException

use of org.apache.nifi.processor.exception.ProcessException in project nifi by apache.

the class StandardRootGroupPort method transferFlowFiles.

@Override
public int transferFlowFiles(final Peer peer, final ServerProtocol serverProtocol) throws NotAuthorizedException, BadRequestException, RequestExpiredException {
    if (getConnectableType() != ConnectableType.OUTPUT_PORT) {
        throw new IllegalStateException("Cannot send FlowFiles because this port is not an Output Port");
    }
    if (!this.isRunning()) {
        throw new IllegalStateException("Port not running");
    }
    try {
        final FlowFileRequest request = new FlowFileRequest(peer, serverProtocol);
        if (!this.requestQueue.offer(request)) {
            throw new RequestExpiredException();
        }
        // Trigger this port to run
        scheduler.registerEvent(this);
        // Get a response from the response queue but don't wait forever if the port is stopped
        ProcessingResult result = null;
        // before the request expires
        while (!request.isBeingServiced()) {
            if (request.isExpired()) {
                // Remove expired request, so that it won't block new request to be offered.
                this.requestQueue.remove(request);
                throw new SocketTimeoutException("Read timed out");
            } else {
                try {
                    Thread.sleep(100L);
                } catch (final InterruptedException e) {
                }
            }
        }
        // we've started to service the request. Now just wait until it's finished
        result = request.getResponseQueue().take();
        final Exception problem = result.getProblem();
        if (problem == null) {
            return result.getFileCount();
        } else {
            throw problem;
        }
    } catch (final NotAuthorizedException | BadRequestException | RequestExpiredException e) {
        throw e;
    } catch (final ProtocolException e) {
        throw new BadRequestException(e);
    } catch (final Exception e) {
        throw new ProcessException(e);
    }
}
Also used : ProtocolException(org.apache.nifi.remote.exception.ProtocolException) ProcessException(org.apache.nifi.processor.exception.ProcessException) SocketTimeoutException(java.net.SocketTimeoutException) BadRequestException(org.apache.nifi.remote.exception.BadRequestException) RequestExpiredException(org.apache.nifi.remote.exception.RequestExpiredException) NotAuthorizedException(org.apache.nifi.remote.exception.NotAuthorizedException) BadRequestException(org.apache.nifi.remote.exception.BadRequestException) ProtocolException(org.apache.nifi.remote.exception.ProtocolException) TransmissionDisabledException(org.apache.nifi.remote.exception.TransmissionDisabledException) NotAuthorizedException(org.apache.nifi.remote.exception.NotAuthorizedException) ProcessException(org.apache.nifi.processor.exception.ProcessException) SocketTimeoutException(java.net.SocketTimeoutException) IOException(java.io.IOException) RequestExpiredException(org.apache.nifi.remote.exception.RequestExpiredException)

Aggregations

ProcessException (org.apache.nifi.processor.exception.ProcessException)274 FlowFile (org.apache.nifi.flowfile.FlowFile)169 IOException (java.io.IOException)162 InputStream (java.io.InputStream)79 HashMap (java.util.HashMap)78 ComponentLog (org.apache.nifi.logging.ComponentLog)78 OutputStream (java.io.OutputStream)62 ArrayList (java.util.ArrayList)55 Map (java.util.Map)52 PropertyDescriptor (org.apache.nifi.components.PropertyDescriptor)39 InputStreamCallback (org.apache.nifi.processor.io.InputStreamCallback)38 StopWatch (org.apache.nifi.util.StopWatch)37 HashSet (java.util.HashSet)36 ProcessSession (org.apache.nifi.processor.ProcessSession)35 Relationship (org.apache.nifi.processor.Relationship)33 List (java.util.List)31 OutputStreamCallback (org.apache.nifi.processor.io.OutputStreamCallback)29 AtomicReference (java.util.concurrent.atomic.AtomicReference)28 Set (java.util.Set)26 ProcessContext (org.apache.nifi.processor.ProcessContext)25