use of org.apache.nifi.processor.Relationship in project nifi by apache.
the class TestConnectWebSocket method testSuccess.
@Test
public void testSuccess() throws Exception {
final TestRunner runner = TestRunners.newTestRunner(ConnectWebSocket.class);
final ConnectWebSocket processor = (ConnectWebSocket) runner.getProcessor();
final SharedSessionState sharedSessionState = new SharedSessionState(processor, new AtomicLong(0));
// Use this custom session factory implementation so that createdSessions can be read from test case,
// because MockSessionFactory doesn't expose it.
final Set<MockProcessSession> createdSessions = new HashSet<>();
final ProcessSessionFactory sessionFactory = () -> {
final MockProcessSession session = new MockProcessSession(sharedSessionState, processor);
createdSessions.add(session);
return session;
};
final WebSocketClientService service = mock(WebSocketClientService.class);
final WebSocketSession webSocketSession = spy(AbstractWebSocketSession.class);
when(webSocketSession.getSessionId()).thenReturn("ws-session-id");
when(webSocketSession.getLocalAddress()).thenReturn(new InetSocketAddress("localhost", 12345));
when(webSocketSession.getRemoteAddress()).thenReturn(new InetSocketAddress("example.com", 80));
final String serviceId = "ws-service";
final String endpointId = "client-1";
final String textMessageFromServer = "message from server.";
when(service.getIdentifier()).thenReturn(serviceId);
when(service.getTargetUri()).thenReturn("ws://example.com/web-socket");
doAnswer(invocation -> {
processor.connected(webSocketSession);
// Two times.
processor.consume(webSocketSession, textMessageFromServer);
processor.consume(webSocketSession, textMessageFromServer);
// Three times.
final byte[] binaryMessage = textMessageFromServer.getBytes();
processor.consume(webSocketSession, binaryMessage, 0, binaryMessage.length);
processor.consume(webSocketSession, binaryMessage, 0, binaryMessage.length);
processor.consume(webSocketSession, binaryMessage, 0, binaryMessage.length);
return null;
}).when(service).connect(endpointId);
runner.addControllerService(serviceId, service);
runner.enableControllerService(service);
runner.setProperty(ConnectWebSocket.PROP_WEBSOCKET_CLIENT_SERVICE, serviceId);
runner.setProperty(ConnectWebSocket.PROP_WEBSOCKET_CLIENT_ID, endpointId);
processor.onTrigger(runner.getProcessContext(), sessionFactory);
final Map<Relationship, List<MockFlowFile>> transferredFlowFiles = getAllTransferredFlowFiles(createdSessions, processor);
List<MockFlowFile> connectedFlowFiles = transferredFlowFiles.get(AbstractWebSocketGatewayProcessor.REL_CONNECTED);
assertEquals(1, connectedFlowFiles.size());
connectedFlowFiles.forEach(ff -> {
assertFlowFile(webSocketSession, serviceId, endpointId, ff, null);
});
List<MockFlowFile> textFlowFiles = transferredFlowFiles.get(AbstractWebSocketGatewayProcessor.REL_MESSAGE_TEXT);
assertEquals(2, textFlowFiles.size());
textFlowFiles.forEach(ff -> {
assertFlowFile(webSocketSession, serviceId, endpointId, ff, WebSocketMessage.Type.TEXT);
});
List<MockFlowFile> binaryFlowFiles = transferredFlowFiles.get(AbstractWebSocketGatewayProcessor.REL_MESSAGE_BINARY);
assertEquals(3, binaryFlowFiles.size());
binaryFlowFiles.forEach(ff -> {
assertFlowFile(webSocketSession, serviceId, endpointId, ff, WebSocketMessage.Type.BINARY);
});
final List<ProvenanceEventRecord> provenanceEvents = sharedSessionState.getProvenanceEvents();
assertEquals(6, provenanceEvents.size());
assertTrue(provenanceEvents.stream().allMatch(event -> ProvenanceEventType.RECEIVE.equals(event.getEventType())));
}
use of org.apache.nifi.processor.Relationship in project nifi by apache.
the class TestJoltTransformJSON method testRelationshipsCreated.
@Test
public void testRelationshipsCreated() throws IOException {
Processor processor = new JoltTransformJSON();
final TestRunner runner = TestRunners.newTestRunner(processor);
final String spec = new String(Files.readAllBytes(Paths.get("src/test/resources/TestJoltTransformJson/chainrSpec.json")));
runner.setProperty(JoltTransformJSON.JOLT_SPEC, spec);
runner.enqueue(JSON_INPUT);
Set<Relationship> relationships = processor.getRelationships();
assertTrue(relationships.contains(JoltTransformJSON.REL_FAILURE));
assertTrue(relationships.contains(JoltTransformJSON.REL_SUCCESS));
assertTrue(relationships.size() == 2);
}
use of org.apache.nifi.processor.Relationship in project nifi by apache.
the class WriteResourceToStream method init.
@Override
protected void init(final ProcessorInitializationContext context) {
final Set<Relationship> relationships = new HashSet<Relationship>();
relationships.add(REL_SUCCESS);
relationships.add(REL_FAILURE);
this.relationships = Collections.unmodifiableSet(relationships);
final InputStream resourceStream = Thread.currentThread().getContextClassLoader().getResourceAsStream("file.txt");
try {
this.resourceData = IOUtils.toString(resourceStream);
} catch (IOException e) {
throw new RuntimeException("Unable to load resources", e);
} finally {
IOUtils.closeQuietly(resourceStream);
}
}
use of org.apache.nifi.processor.Relationship in project nifi by apache.
the class MockProcessSession method assertAllFlowFilesTransferred.
/**
* Asserts that all FlowFiles that were transferred were transferred to the
* given relationship
*
* @param relationship to validate
*/
public void assertAllFlowFilesTransferred(final Relationship relationship) {
for (final Map.Entry<Relationship, List<MockFlowFile>> entry : transferMap.entrySet()) {
final Relationship rel = entry.getKey();
final List<MockFlowFile> flowFiles = entry.getValue();
if (!rel.equals(relationship) && flowFiles != null && !flowFiles.isEmpty()) {
Assert.fail("Expected all Transferred FlowFiles to go to " + relationship + " but " + flowFiles.size() + " were routed to " + rel);
}
}
}
use of org.apache.nifi.processor.Relationship in project nifi by apache.
the class MockProcessSession method migrate.
private void migrate(final MockProcessSession newOwner, final Collection<MockFlowFile> flowFiles) {
for (final FlowFile flowFile : flowFiles) {
if (openInputStreams.containsKey(flowFile)) {
throw new IllegalStateException(flowFile + " cannot be migrated to a new Process Session because this session currently " + "has an open InputStream for the FlowFile, created by calling ProcessSession.read(FlowFile)");
}
if (openOutputStreams.containsKey(flowFile)) {
throw new IllegalStateException(flowFile + " cannot be migrated to a new Process Session because this session currently " + "has an open OutputStream for the FlowFile, created by calling ProcessSession.write(FlowFile)");
}
if (readRecursionSet.containsKey(flowFile)) {
throw new IllegalStateException(flowFile + " already in use for an active callback or InputStream created by ProcessSession.read(FlowFile) has not been closed");
}
if (writeRecursionSet.contains(flowFile)) {
throw new IllegalStateException(flowFile + " already in use for an active callback or OutputStream created by ProcessSession.write(FlowFile) has not been closed");
}
final FlowFile currentVersion = currentVersions.get(flowFile.getId());
if (currentVersion == null) {
throw new FlowFileHandlingException(flowFile + " is not known in this session");
}
}
for (final Map.Entry<Relationship, List<MockFlowFile>> entry : transferMap.entrySet()) {
final Relationship relationship = entry.getKey();
final List<MockFlowFile> transferredFlowFiles = entry.getValue();
for (final MockFlowFile flowFile : flowFiles) {
if (transferredFlowFiles.remove(flowFile)) {
newOwner.transferMap.computeIfAbsent(relationship, rel -> new ArrayList<>()).add(flowFile);
}
}
}
for (final MockFlowFile flowFile : flowFiles) {
if (beingProcessed.remove(flowFile.getId())) {
newOwner.beingProcessed.add(flowFile.getId());
}
if (penalized.remove(flowFile)) {
newOwner.penalized.add(flowFile);
}
if (currentVersions.containsKey(flowFile.getId())) {
newOwner.currentVersions.put(flowFile.getId(), currentVersions.remove(flowFile.getId()));
}
if (originalVersions.containsKey(flowFile.getId())) {
newOwner.originalVersions.put(flowFile.getId(), originalVersions.remove(flowFile.getId()));
}
if (removedFlowFiles.remove(flowFile.getId())) {
newOwner.removedFlowFiles.add(flowFile.getId());
}
}
final Set<String> flowFileIds = flowFiles.stream().map(ff -> ff.getAttribute(CoreAttributes.UUID.key())).collect(Collectors.toSet());
provenanceReporter.migrate(newOwner.provenanceReporter, flowFileIds);
}
Aggregations