Search in sources :

Example 1 with ProcessInstanceJobDescription

use of io.automatiko.engine.api.jobs.ProcessInstanceJobDescription in project automatiko-engine by automatiko-io.

the class TestJobService method triggerProcessInstanceJob.

public void triggerProcessInstanceJob(String jobId) {
    LOGGER.debug("Job {} started", jobId);
    ProcessInstanceJobDescription job = (ProcessInstanceJobDescription) jobs.remove(jobId);
    if (job == null) {
        throw new IllegalArgumentException("Job with id " + jobId + " not found");
    }
    try {
        Process<?> process = mappedProcesses.get(job.processId());
        if (process == null) {
            LOGGER.warn("No process found for process id {}", job.processId());
            return;
        }
        IdentityProvider.set(new TrustedIdentityProvider("System<timer>"));
        UnitOfWorkExecutor.executeInUnitOfWork(unitOfWorkManager, () -> {
            Optional<? extends ProcessInstance<?>> processInstanceFound = process.instances().findById(job.processInstanceId());
            if (processInstanceFound.isPresent()) {
                ProcessInstance<?> processInstance = processInstanceFound.get();
                String[] ids = job.id().split("_");
                processInstance.send(Sig.of(job.triggerType(), TimerInstance.with(Long.parseLong(ids[1]), job.id(), job.expirationTime().repeatLimit())));
                if (job.expirationTime().repeatLimit() == 0) {
                    jobs.remove(jobId);
                }
            } else {
                // since owning process instance does not exist cancel timers
                jobs.remove(jobId);
            }
            return null;
        });
        LOGGER.debug("Job {} completed", job.id());
    } finally {
        if (job.expirationTime().next() != null) {
            jobs.remove(jobId);
            scheduleProcessInstanceJob(job);
        } else {
            jobs.remove(jobId);
        }
    }
}
Also used : ProcessInstanceJobDescription(io.automatiko.engine.api.jobs.ProcessInstanceJobDescription) TrustedIdentityProvider(io.automatiko.engine.api.auth.TrustedIdentityProvider)

Example 2 with ProcessInstanceJobDescription

use of io.automatiko.engine.api.jobs.ProcessInstanceJobDescription in project automatiko-engine by automatiko-io.

the class WorkflowProcessInstanceImpl method configureSLATimer.

public TimerInstance configureSLATimer(String slaDueDateExpression) {
    // setup SLA if provided
    slaDueDateExpression = resolveVariable(slaDueDateExpression);
    if (slaDueDateExpression == null || slaDueDateExpression.trim().isEmpty()) {
        logger.debug("Sla due date expression resolved to no value '{}'", slaDueDateExpression);
        return null;
    }
    logger.debug("SLA due date is set to {}", slaDueDateExpression);
    long duration = DateTimeUtils.parseDuration(slaDueDateExpression);
    TimerInstance timerInstance = new TimerInstance();
    timerInstance.setTimerId(-1);
    timerInstance.setDelay(duration);
    timerInstance.setPeriod(0);
    if (useTimerSLATracking()) {
        String parentProcessInstanceId = getParentProcessInstanceId();
        if (parentProcessInstanceId != null && !parentProcessInstanceId.isEmpty()) {
            parentProcessInstanceId += ":";
        } else {
            parentProcessInstanceId = "";
        }
        String id = parentProcessInstanceId + getId();
        ProcessInstanceJobDescription description = ProcessInstanceJobDescription.of(-1L, DurationExpirationTime.after(duration), id, getProcessId(), getProcess().getVersion());
        timerInstance.setId(getProcessRuntime().getJobsService().scheduleProcessInstanceJob(description));
    }
    return timerInstance;
}
Also used : TimerInstance(io.automatiko.engine.services.time.TimerInstance) ProcessInstanceJobDescription(io.automatiko.engine.api.jobs.ProcessInstanceJobDescription)

Example 3 with ProcessInstanceJobDescription

use of io.automatiko.engine.api.jobs.ProcessInstanceJobDescription in project automatiko-engine by automatiko-io.

the class CassandraJobService method start.

public void start(@Observes StartupEvent event) {
    loadScheduler.scheduleAtFixedRate(() -> {
        try {
            long next = LocalDateTime.now().plus(Duration.ofMinutes(config.interval().orElse(10L))).atZone(ZoneId.systemDefault()).toInstant().toEpochMilli();
            Select select = selectFrom(config.keyspace().orElse("automatiko"), tableName).all().whereColumn(FIRE_AT_FIELD).isLessThan(literal(next)).allowFiltering();
            ResultSet rs = cqlSession.execute(select.build());
            List<Row> jobs = rs.all();
            LOGGER.debug("Loaded jobs ({}) to be executed before {}", jobs.size(), next);
            for (Row job : jobs) {
                if (job.getString(OWNER_INSTANCE_ID_FIELD) == null) {
                    ProcessJobDescription description = ProcessJobDescription.of(build(job.getString(EXPRESSION_FIELD)), null, job.getString(OWNER_DEF_ID_FIELD));
                    scheduledJobs.computeIfAbsent(job.getString(INSTANCE_ID_FIELD), k -> {
                        return log(job.getString(INSTANCE_ID_FIELD), scheduler.schedule(new StartProcessOnExpiredTimer(job.getString(INSTANCE_ID_FIELD), job.getString(OWNER_DEF_ID_FIELD), -1, description), Duration.between(LocalDateTime.now(), ZonedDateTime.ofInstant(Instant.ofEpochMilli(job.getLong(FIRE_AT_FIELD)), ZoneId.systemDefault())).toMillis(), TimeUnit.MILLISECONDS));
                    });
                } else {
                    ProcessInstanceJobDescription description = ProcessInstanceJobDescription.of(job.getString(INSTANCE_ID_FIELD), job.getString(TRIGGER_TYPE_FIELD), build(job.getString(EXPRESSION_FIELD)), job.getString(OWNER_INSTANCE_ID_FIELD), job.getString(OWNER_DEF_ID_FIELD), null);
                    scheduledJobs.computeIfAbsent(job.getString(INSTANCE_ID_FIELD), k -> {
                        return log(job.getString(INSTANCE_ID_FIELD), scheduler.schedule(new SignalProcessInstanceOnExpiredTimer(job.getString(INSTANCE_ID_FIELD), job.getString(TRIGGER_TYPE_FIELD), job.getString(OWNER_DEF_ID_FIELD), job.getString(OWNER_INSTANCE_ID_FIELD), job.getInt(FIRE_LIMIT_FIELD), description), Duration.between(LocalDateTime.now(), ZonedDateTime.ofInstant(Instant.ofEpochMilli(job.getLong(FIRE_AT_FIELD)), ZoneId.systemDefault())).toMillis(), TimeUnit.MILLISECONDS));
                    });
                }
            }
        } catch (Exception e) {
            LOGGER.error("Error while loading jobs from cassandra", e);
        }
    }, 1, config.interval().orElse(10L) * 60, TimeUnit.SECONDS);
}
Also used : ProcessInstanceJobDescription(io.automatiko.engine.api.jobs.ProcessInstanceJobDescription) Select(com.datastax.oss.driver.api.querybuilder.select.Select) ResultSet(com.datastax.oss.driver.api.core.cql.ResultSet) Row(com.datastax.oss.driver.api.core.cql.Row) QueryExecutionException(com.datastax.oss.driver.api.core.servererrors.QueryExecutionException) ProcessJobDescription(io.automatiko.engine.api.jobs.ProcessJobDescription)

Example 4 with ProcessInstanceJobDescription

use of io.automatiko.engine.api.jobs.ProcessInstanceJobDescription in project automatiko-engine by automatiko-io.

the class DynamoDBJobService method start.

public void start(@Observes StartupEvent event) {
    loadScheduler.scheduleAtFixedRate(() -> {
        try {
            long next = LocalDateTime.now().plus(Duration.ofMinutes(config.interval().orElse(10L))).atZone(ZoneId.systemDefault()).toInstant().toEpochMilli();
            Map<String, AttributeValue> attrValues = new HashMap<String, AttributeValue>();
            attrValues.put(":value", AttributeValue.builder().n(Long.toString(next)).build());
            ScanRequest query = ScanRequest.builder().tableName(tableName).projectionExpression(INSTANCE_ID_FIELD + "," + FIRE_AT_FIELD + "," + OWNER_INSTANCE_ID_FIELD + "," + OWNER_DEF_ID_FIELD + "," + TRIGGER_TYPE_FIELD + "," + FIRE_LIMIT_FIELD + "," + REPEAT_INTERVAL_FIELD).filterExpression(FIRE_AT_FIELD + " < :value").expressionAttributeValues(attrValues).build();
            List<Map<String, AttributeValue>> jobs = dynamodb.scan(query).items();
            LOGGER.debug("Loaded jobs ({}) to be executed before {}", jobs.size(), next);
            for (Map<String, AttributeValue> job : jobs) {
                if (job.get(OWNER_INSTANCE_ID_FIELD) == null) {
                    ProcessJobDescription description = ProcessJobDescription.of(build(job.get(EXPRESSION_FIELD).s()), null, job.get(OWNER_DEF_ID_FIELD).s());
                    scheduledJobs.computeIfAbsent(job.get(INSTANCE_ID_FIELD).s(), k -> {
                        return log(job.get(INSTANCE_ID_FIELD).s(), scheduler.schedule(new StartProcessOnExpiredTimer(job.get(INSTANCE_ID_FIELD).s(), job.get(OWNER_DEF_ID_FIELD).s(), -1, description), Duration.between(LocalDateTime.now(), ZonedDateTime.ofInstant(Instant.ofEpochMilli(Long.parseLong(job.get(FIRE_AT_FIELD).n())), ZoneId.systemDefault())).toMillis(), TimeUnit.MILLISECONDS));
                    });
                } else {
                    ProcessInstanceJobDescription description = ProcessInstanceJobDescription.of(job.get(INSTANCE_ID_FIELD).s(), job.get(TRIGGER_TYPE_FIELD).s(), build(job.get(EXPRESSION_FIELD).s()), job.get(OWNER_INSTANCE_ID_FIELD).s(), job.get(OWNER_DEF_ID_FIELD).s(), null);
                    scheduledJobs.computeIfAbsent(job.get(INSTANCE_ID_FIELD).s(), k -> {
                        return log(job.get(INSTANCE_ID_FIELD).s(), scheduler.schedule(new SignalProcessInstanceOnExpiredTimer(job.get(INSTANCE_ID_FIELD).s(), job.get(TRIGGER_TYPE_FIELD).s(), job.get(OWNER_DEF_ID_FIELD).s(), job.get(OWNER_INSTANCE_ID_FIELD).s(), Integer.parseInt(job.get(FIRE_LIMIT_FIELD).n()), description), Duration.between(LocalDateTime.now(), ZonedDateTime.ofInstant(Instant.ofEpochMilli(Long.parseLong(job.get(FIRE_AT_FIELD).n())), ZoneId.systemDefault())).toMillis(), TimeUnit.MILLISECONDS));
                    });
                }
            }
        } catch (Exception e) {
            LOGGER.error("Error while loading jobs from dynamodb", e);
        }
    }, 1, config.interval().orElse(10L) * 60, TimeUnit.SECONDS);
}
Also used : AttributeValue(software.amazon.awssdk.services.dynamodb.model.AttributeValue) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ResourceInUseException(software.amazon.awssdk.services.dynamodb.model.ResourceInUseException) TransactionCanceledException(software.amazon.awssdk.services.dynamodb.model.TransactionCanceledException) ResourceNotFoundException(software.amazon.awssdk.services.dynamodb.model.ResourceNotFoundException) DynamoDbException(software.amazon.awssdk.services.dynamodb.model.DynamoDbException) ScanRequest(software.amazon.awssdk.services.dynamodb.model.ScanRequest) ProcessInstanceJobDescription(io.automatiko.engine.api.jobs.ProcessInstanceJobDescription) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ProcessJobDescription(io.automatiko.engine.api.jobs.ProcessJobDescription)

Example 5 with ProcessInstanceJobDescription

use of io.automatiko.engine.api.jobs.ProcessInstanceJobDescription in project automatiko-engine by automatiko-io.

the class DynamoDBJobService method updateRepeatableJob.

protected void updateRepeatableJob(String id) {
    HashMap<String, AttributeValue> itemKey = new HashMap<String, AttributeValue>();
    itemKey.put(INSTANCE_ID_FIELD, AttributeValue.builder().s(id).build());
    GetItemRequest getrequest = GetItemRequest.builder().key(itemKey).tableName(tableName).build();
    Map<String, AttributeValue> job = dynamodb.getItem(getrequest).item();
    Integer limit = Integer.parseInt(job.get(FIRE_LIMIT_FIELD).n()) - 1;
    Long repeat = Long.parseLong(job.get(REPEAT_INTERVAL_FIELD).n());
    ZonedDateTime fireTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(Long.parseLong(job.get(FIRE_AT_FIELD).n())), ZoneId.systemDefault());
    Map<String, AttributeValueUpdate> updatedValues = new HashMap<String, AttributeValueUpdate>();
    updatedValues.put(FIRE_AT_FIELD, AttributeValueUpdate.builder().value(AttributeValue.builder().n(Long.toString(fireTime.plus(repeat, ChronoUnit.MILLIS).toInstant().toEpochMilli())).build()).action(AttributeAction.PUT).build());
    updatedValues.put(FIRE_LIMIT_FIELD, AttributeValueUpdate.builder().value(AttributeValue.builder().n(Integer.toString(limit)).build()).action(AttributeAction.PUT).build());
    updatedValues.put(STATUS_FIELD, AttributeValueUpdate.builder().value(AttributeValue.builder().s("scheduled").build()).action(AttributeAction.PUT).build());
    UpdateItemRequest request = UpdateItemRequest.builder().tableName(tableName).key(itemKey).attributeUpdates(updatedValues).build();
    dynamodb.updateItem(request);
    if (job.get(OWNER_INSTANCE_ID_FIELD) == null) {
        ProcessJobDescription description = ProcessJobDescription.of(build(job.get(EXPRESSION_FIELD).s()), null, job.get(OWNER_DEF_ID_FIELD).s());
        scheduledJobs.computeIfAbsent(job.get(INSTANCE_ID_FIELD).s(), k -> {
            return log(job.get(INSTANCE_ID_FIELD).s(), scheduler.schedule(new StartProcessOnExpiredTimer(job.get(INSTANCE_ID_FIELD).s(), job.get(OWNER_DEF_ID_FIELD).s(), limit, description), Duration.between(LocalDateTime.now(), fireTime).toMillis(), TimeUnit.MILLISECONDS));
        });
    } else {
        ProcessInstanceJobDescription description = ProcessInstanceJobDescription.of(job.get(INSTANCE_ID_FIELD).s(), job.get(TRIGGER_TYPE_FIELD).s(), build(job.get(EXPRESSION_FIELD).s()), job.get(OWNER_INSTANCE_ID_FIELD).s(), job.get(OWNER_DEF_ID_FIELD).s(), null);
        scheduledJobs.computeIfAbsent(job.get(INSTANCE_ID_FIELD).s(), k -> {
            return log(job.get(INSTANCE_ID_FIELD).s(), scheduler.scheduleAtFixedRate(new SignalProcessInstanceOnExpiredTimer(job.get(INSTANCE_ID_FIELD).s(), job.get(TRIGGER_TYPE_FIELD).s(), job.get(OWNER_DEF_ID_FIELD).s(), job.get(OWNER_INSTANCE_ID_FIELD).s(), limit, description), Duration.between(LocalDateTime.now(), fireTime).toMillis(), repeat, TimeUnit.MILLISECONDS));
        });
    }
}
Also used : AttributeValue(software.amazon.awssdk.services.dynamodb.model.AttributeValue) UpdateItemRequest(software.amazon.awssdk.services.dynamodb.model.UpdateItemRequest) AttributeValueUpdate(software.amazon.awssdk.services.dynamodb.model.AttributeValueUpdate) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ZonedDateTime(java.time.ZonedDateTime) ProcessInstanceJobDescription(io.automatiko.engine.api.jobs.ProcessInstanceJobDescription) GetItemRequest(software.amazon.awssdk.services.dynamodb.model.GetItemRequest) ProcessJobDescription(io.automatiko.engine.api.jobs.ProcessJobDescription)

Aggregations

ProcessInstanceJobDescription (io.automatiko.engine.api.jobs.ProcessInstanceJobDescription)15 ProcessJobDescription (io.automatiko.engine.api.jobs.ProcessJobDescription)9 Test (org.junit.jupiter.api.Test)4 FileSystemBasedJobService (io.automatiko.engine.addons.persistence.filesystem.job.FileSystemBasedJobService)3 CollectingUnitOfWorkFactory (io.automatiko.engine.services.uow.CollectingUnitOfWorkFactory)3 DefaultUnitOfWorkManager (io.automatiko.engine.services.uow.DefaultUnitOfWorkManager)3 ZonedDateTime (java.time.ZonedDateTime)3 CountDownLatch (java.util.concurrent.CountDownLatch)3 ResultSet (com.datastax.oss.driver.api.core.cql.ResultSet)2 Row (com.datastax.oss.driver.api.core.cql.Row)2 Select (com.datastax.oss.driver.api.querybuilder.select.Select)2 JobInstanceEntity (io.automatiko.engine.addons.persistence.db.model.JobInstanceEntity)2 HashMap (java.util.HashMap)2 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)2 Document (org.bson.Document)2 AttributeValue (software.amazon.awssdk.services.dynamodb.model.AttributeValue)2 SimpleStatement (com.datastax.oss.driver.api.core.cql.SimpleStatement)1 QueryExecutionException (com.datastax.oss.driver.api.core.servererrors.QueryExecutionException)1 MongoWriteConcernException (com.mongodb.MongoWriteConcernException)1 MongoWriteException (com.mongodb.MongoWriteException)1