use of io.automatiko.engine.services.event.ProcessInstanceDataEvent in project automatiko-engine by automatiko-io.
the class ProcessInstanceEventBatch method events.
@Override
public Collection<DataEvent<?>> events() {
Map<String, ProcessInstanceEventBody> processInstances = new LinkedHashMap<>();
Map<String, UserTaskInstanceEventBody> userTaskInstances = new LinkedHashMap<>();
Set<VariableInstanceEventBody> variables = new LinkedHashSet<>();
for (ProcessEvent event : rawEvents) {
ProcessInstanceEventBody body = processInstances.computeIfAbsent(event.getProcessInstance().getId(), key -> create(event));
if (event instanceof ProcessNodeTriggeredEvent) {
handleProcessNodeTriggeredEvent((ProcessNodeTriggeredEvent) event, body);
} else if (event instanceof ProcessNodeLeftEvent) {
handleProcessNodeLeftEvent((ProcessNodeLeftEvent) event, body);
} else if (event instanceof ProcessCompletedEvent) {
handleProcessCompletedEvent((ProcessCompletedEvent) event, body);
} else if (event instanceof ProcessWorkItemTransitionEvent) {
handleProcessWorkItemTransitionEvent((ProcessWorkItemTransitionEvent) event, userTaskInstances);
} else if (event instanceof ProcessVariableChangedEvent) {
handleProcessVariableChangedEvent((ProcessVariableChangedEvent) event, variables);
}
}
Collection<DataEvent<?>> processedEvents = new ArrayList<>();
processInstances.values().stream().map(pi -> new ProcessInstanceDataEvent(extractRuntimeSource(pi.metaData()), addons.toString(), pi.metaData(), pi)).forEach(processedEvents::add);
userTaskInstances.values().stream().map(pi -> new UserTaskInstanceDataEvent(extractRuntimeSource(pi.metaData()), addons.toString(), pi.metaData(), pi)).forEach(processedEvents::add);
variables.stream().map(pi -> new VariableInstanceDataEvent(extractRuntimeSource(pi.metaData()), addons.toString(), pi.metaData(), pi)).forEach(processedEvents::add);
return processedEvents;
}
use of io.automatiko.engine.services.event.ProcessInstanceDataEvent in project automatiko-engine by automatiko-io.
the class WebSocketEventPublisher method publish.
@Override
public void publish(DataEvent<?> event) {
if (event instanceof ProcessInstanceDataEvent && !config.instance().orElse(true)) {
LOGGER.debug("Skipping process instance event as the publisher should not deal with instances");
return;
} else if (event instanceof UserTaskInstanceDataEvent && !config.tasks().orElse(true)) {
LOGGER.debug("Skipping user task event as the publisher should not deal with tasks");
return;
}
String text;
try {
text = json.writeValueAsString(event);
for (Session session : sessions.values()) {
String filter = (String) session.getUserProperties().get("atk_filter");
if (filter != null && !filter.matches(event.getType())) {
continue;
}
boolean allowed = true;
IdentityProvider identityProvider = (IdentityProvider) session.getUserProperties().get("atk_identity");
if (event instanceof ProcessInstanceDataEvent) {
List<String> visibleTo = ((ProcessInstanceDataEvent) event).getData().getVisibleTo();
allowed = visibleTo.isEmpty() || visibleTo.contains(identityProvider.getName()) || visibleTo.stream().anyMatch(item -> identityProvider.getRoles().contains(item));
} else if (event instanceof UserTaskInstanceDataEvent) {
HumanTaskWorkItem workItem = ((UserTaskInstanceDataEvent) event).getData().sourceInstance();
allowed = workItem.enforce(SecurityPolicy.of(identityProvider));
}
if (allowed) {
session.getAsyncRemote().sendText(text);
}
}
} catch (Exception e) {
LOGGER.error("Unexpected error when publishing websocket event", e);
}
}
use of io.automatiko.engine.services.event.ProcessInstanceDataEvent in project automatiko-engine by automatiko-io.
the class ElasticEventPublisher method publish.
@Override
public void publish(DataEvent<?> event) {
try {
Request request;
Map<String, Object> payload;
if (event instanceof ProcessInstanceDataEvent) {
ProcessInstanceDataEvent pevent = (ProcessInstanceDataEvent) event;
if (config.instance().orElse(true)) {
Map<String, Object> metadata = new LinkedHashMap<>();
metadata.put("instanceId", pevent.getData().getId());
metadata.put("processId", pevent.getData().getProcessId());
metadata.put("rootInstanceId", pevent.getData().getRootInstanceId());
metadata.put("rootProcessId", pevent.getData().getRootProcessId());
metadata.put("parentInstanceId", pevent.getData().getParentInstanceId());
metadata.put("businessKey", pevent.getData().getBusinessKey());
metadata.put("state", pevent.getData().getState());
metadata.put("tags", pevent.getData().getTags());
if (pevent.getData().getRoles() != null) {
metadata.put("roles", pevent.getData().getRoles());
}
if (pevent.getData().getVisibleTo() != null) {
metadata.put("visibleTo", pevent.getData().getVisibleTo());
}
metadata.put("startDate", pevent.getData().getStartDate());
metadata.put("endDate", pevent.getData().getEndDate());
payload = new LinkedHashMap<>(pevent.getData().getVariables());
payload.put("_metadata", metadata);
request = new Request("PUT", "/" + pevent.getData().sourceInstance().process().id() + "/_doc/" + pevent.getData().getId());
request.setJsonEntity(mapper.writeValueAsString(payload));
sendRequest(request, event);
}
if (config.audit().orElse(false)) {
String index = config.auditIndex().orElse("atk_audit");
StringBuilder bulkRequestBody = new StringBuilder();
for (NodeInstanceEventBody nevent : pevent.getData().getNodeInstances()) {
String actionMetaData = String.format("{ \"index\" : { \"_index\" : \"%s\", \"_id\" : \"%s\" } }%n", index, nevent.getId());
Map<String, Object> audit = new LinkedHashMap<>();
audit.put("instanceId", pevent.getData().getId());
audit.put("processId", pevent.getData().getProcessId());
audit.put("rootInstanceId", pevent.getData().getRootInstanceId());
audit.put("rootProcessId", pevent.getData().getRootProcessId());
audit.put("parentInstanceId", pevent.getData().getParentInstanceId());
audit.put("businessKey", pevent.getData().getBusinessKey());
audit.put("nodeDefinitionId", nevent.getNodeDefinitionId());
audit.put("nodeId", nevent.getNodeId());
audit.put("nodeName", nevent.getNodeName());
audit.put("nodeType", nevent.getNodeType());
audit.put("triggerTime", nevent.getTriggerTime());
audit.put("leaveTime", nevent.getLeaveTime());
bulkRequestBody.append(actionMetaData);
bulkRequestBody.append(mapper.writeValueAsString(audit));
bulkRequestBody.append("\n");
}
request = new Request("POST", "/" + index + "/_bulk");
request.setJsonEntity(bulkRequestBody.toString());
sendRequest(request, event);
}
} else if (event instanceof UserTaskInstanceDataEvent && config.tasks().orElse(true)) {
UserTaskInstanceDataEvent uevent = (UserTaskInstanceDataEvent) event;
Set<String> potentialOwners = new LinkedHashSet<String>();
if (uevent.getData().getPotentialUsers() != null) {
potentialOwners.addAll(uevent.getData().getPotentialUsers());
}
if (uevent.getData().getPotentialGroups() != null) {
potentialOwners.addAll(uevent.getData().getPotentialGroups());
}
if (uevent.getData().getAdminUsers() != null) {
potentialOwners.addAll(uevent.getData().getAdminUsers());
}
if (uevent.getData().getAdminGroups() != null) {
potentialOwners.addAll(uevent.getData().getAdminUsers());
}
// remove any excluded users known
if (uevent.getData().getExcludedUsers() != null) {
potentialOwners.removeAll(uevent.getData().getExcludedUsers());
}
Map<String, Object> metadata = new LinkedHashMap<>();
metadata.put("processInstanceId", uevent.getData().getProcessInstanceId());
metadata.put("processId", uevent.getData().getProcessId());
metadata.put("rootInstanceId", uevent.getData().getRootProcessInstanceId());
metadata.put("rootProcessId", uevent.getData().getRootProcessId());
metadata.put("referenceName", uevent.getData().getReferenceName());
payload = new LinkedHashMap<>();
payload.put("instanceId", uevent.getData().getId());
payload.put("name", uevent.getData().getTaskName());
payload.put("description", uevent.getData().getTaskDescription());
payload.put("state", uevent.getData().getState());
payload.put("owner", uevent.getData().getActualOwner());
payload.put("potentialOwners", potentialOwners);
payload.put("excludedUsers", uevent.getData().getExcludedUsers());
payload.put("startDate", uevent.getData().getStartDate());
payload.put("endDate", uevent.getData().getCompleteDate());
payload.put("inputs", uevent.getData().getInputs());
payload.put("outputs", uevent.getData().getOutputs());
payload.put("_metadata", metadata);
request = new Request("PUT", "/tasks/_doc/" + uevent.getData().getId());
request.setJsonEntity(mapper.writeValueAsString(payload));
sendRequest(request, event);
} else {
return;
}
} catch (IOException e) {
LOGGER.error("Error when publishing event to elastic", e);
}
}
use of io.automatiko.engine.services.event.ProcessInstanceDataEvent in project automatiko-engine by automatiko-io.
the class VerificationTest method testProcessAsyncCallFailure.
@Test
public void testProcessAsyncCallFailure() {
String addPayload = "{\"name\" : null}";
given().contentType(ContentType.JSON).accept(ContentType.JSON).body(addPayload).when().post("/async").then().statusCode(200).body("id", notNullValue(), "name", nullValue());
List<DataEvent<?>> received = publisher.events();
assertEquals(2, received.size());
ProcessInstanceDataEvent piEvent = (ProcessInstanceDataEvent) received.stream().filter(pi -> ((ProcessInstanceDataEvent) pi).getData().getState().equals(ProcessInstance.STATE_COMPLETED)).findFirst().get();
assertEquals(null, piEvent.getData().getVariables().get("name"));
given().accept(ContentType.JSON).when().get("/async").then().statusCode(200).body("$.size()", is(0));
}
Aggregations