Search in sources :

Example 1 with ProcessJobDescription

use of io.automatiko.engine.api.jobs.ProcessJobDescription in project automatiko-engine by automatiko-io.

the class CassandraJobService method start.

public void start(@Observes StartupEvent event) {
    loadScheduler.scheduleAtFixedRate(() -> {
        try {
            long next = LocalDateTime.now().plus(Duration.ofMinutes(config.interval().orElse(10L))).atZone(ZoneId.systemDefault()).toInstant().toEpochMilli();
            Select select = selectFrom(config.keyspace().orElse("automatiko"), tableName).all().whereColumn(FIRE_AT_FIELD).isLessThan(literal(next)).allowFiltering();
            ResultSet rs = cqlSession.execute(select.build());
            List<Row> jobs = rs.all();
            LOGGER.debug("Loaded jobs ({}) to be executed before {}", jobs.size(), next);
            for (Row job : jobs) {
                if (job.getString(OWNER_INSTANCE_ID_FIELD) == null) {
                    ProcessJobDescription description = ProcessJobDescription.of(build(job.getString(EXPRESSION_FIELD)), null, job.getString(OWNER_DEF_ID_FIELD));
                    scheduledJobs.computeIfAbsent(job.getString(INSTANCE_ID_FIELD), k -> {
                        return log(job.getString(INSTANCE_ID_FIELD), scheduler.schedule(new StartProcessOnExpiredTimer(job.getString(INSTANCE_ID_FIELD), job.getString(OWNER_DEF_ID_FIELD), -1, description), Duration.between(LocalDateTime.now(), ZonedDateTime.ofInstant(Instant.ofEpochMilli(job.getLong(FIRE_AT_FIELD)), ZoneId.systemDefault())).toMillis(), TimeUnit.MILLISECONDS));
                    });
                } else {
                    ProcessInstanceJobDescription description = ProcessInstanceJobDescription.of(job.getString(INSTANCE_ID_FIELD), job.getString(TRIGGER_TYPE_FIELD), build(job.getString(EXPRESSION_FIELD)), job.getString(OWNER_INSTANCE_ID_FIELD), job.getString(OWNER_DEF_ID_FIELD), null);
                    scheduledJobs.computeIfAbsent(job.getString(INSTANCE_ID_FIELD), k -> {
                        return log(job.getString(INSTANCE_ID_FIELD), scheduler.schedule(new SignalProcessInstanceOnExpiredTimer(job.getString(INSTANCE_ID_FIELD), job.getString(TRIGGER_TYPE_FIELD), job.getString(OWNER_DEF_ID_FIELD), job.getString(OWNER_INSTANCE_ID_FIELD), job.getInt(FIRE_LIMIT_FIELD), description), Duration.between(LocalDateTime.now(), ZonedDateTime.ofInstant(Instant.ofEpochMilli(job.getLong(FIRE_AT_FIELD)), ZoneId.systemDefault())).toMillis(), TimeUnit.MILLISECONDS));
                    });
                }
            }
        } catch (Exception e) {
            LOGGER.error("Error while loading jobs from cassandra", e);
        }
    }, 1, config.interval().orElse(10L) * 60, TimeUnit.SECONDS);
}
Also used : ProcessInstanceJobDescription(io.automatiko.engine.api.jobs.ProcessInstanceJobDescription) Select(com.datastax.oss.driver.api.querybuilder.select.Select) ResultSet(com.datastax.oss.driver.api.core.cql.ResultSet) Row(com.datastax.oss.driver.api.core.cql.Row) QueryExecutionException(com.datastax.oss.driver.api.core.servererrors.QueryExecutionException) ProcessJobDescription(io.automatiko.engine.api.jobs.ProcessJobDescription)

Example 2 with ProcessJobDescription

use of io.automatiko.engine.api.jobs.ProcessJobDescription in project automatiko-engine by automatiko-io.

the class DynamoDBJobService method start.

public void start(@Observes StartupEvent event) {
    loadScheduler.scheduleAtFixedRate(() -> {
        try {
            long next = LocalDateTime.now().plus(Duration.ofMinutes(config.interval().orElse(10L))).atZone(ZoneId.systemDefault()).toInstant().toEpochMilli();
            Map<String, AttributeValue> attrValues = new HashMap<String, AttributeValue>();
            attrValues.put(":value", AttributeValue.builder().n(Long.toString(next)).build());
            ScanRequest query = ScanRequest.builder().tableName(tableName).projectionExpression(INSTANCE_ID_FIELD + "," + FIRE_AT_FIELD + "," + OWNER_INSTANCE_ID_FIELD + "," + OWNER_DEF_ID_FIELD + "," + TRIGGER_TYPE_FIELD + "," + FIRE_LIMIT_FIELD + "," + REPEAT_INTERVAL_FIELD).filterExpression(FIRE_AT_FIELD + " < :value").expressionAttributeValues(attrValues).build();
            List<Map<String, AttributeValue>> jobs = dynamodb.scan(query).items();
            LOGGER.debug("Loaded jobs ({}) to be executed before {}", jobs.size(), next);
            for (Map<String, AttributeValue> job : jobs) {
                if (job.get(OWNER_INSTANCE_ID_FIELD) == null) {
                    ProcessJobDescription description = ProcessJobDescription.of(build(job.get(EXPRESSION_FIELD).s()), null, job.get(OWNER_DEF_ID_FIELD).s());
                    scheduledJobs.computeIfAbsent(job.get(INSTANCE_ID_FIELD).s(), k -> {
                        return log(job.get(INSTANCE_ID_FIELD).s(), scheduler.schedule(new StartProcessOnExpiredTimer(job.get(INSTANCE_ID_FIELD).s(), job.get(OWNER_DEF_ID_FIELD).s(), -1, description), Duration.between(LocalDateTime.now(), ZonedDateTime.ofInstant(Instant.ofEpochMilli(Long.parseLong(job.get(FIRE_AT_FIELD).n())), ZoneId.systemDefault())).toMillis(), TimeUnit.MILLISECONDS));
                    });
                } else {
                    ProcessInstanceJobDescription description = ProcessInstanceJobDescription.of(job.get(INSTANCE_ID_FIELD).s(), job.get(TRIGGER_TYPE_FIELD).s(), build(job.get(EXPRESSION_FIELD).s()), job.get(OWNER_INSTANCE_ID_FIELD).s(), job.get(OWNER_DEF_ID_FIELD).s(), null);
                    scheduledJobs.computeIfAbsent(job.get(INSTANCE_ID_FIELD).s(), k -> {
                        return log(job.get(INSTANCE_ID_FIELD).s(), scheduler.schedule(new SignalProcessInstanceOnExpiredTimer(job.get(INSTANCE_ID_FIELD).s(), job.get(TRIGGER_TYPE_FIELD).s(), job.get(OWNER_DEF_ID_FIELD).s(), job.get(OWNER_INSTANCE_ID_FIELD).s(), Integer.parseInt(job.get(FIRE_LIMIT_FIELD).n()), description), Duration.between(LocalDateTime.now(), ZonedDateTime.ofInstant(Instant.ofEpochMilli(Long.parseLong(job.get(FIRE_AT_FIELD).n())), ZoneId.systemDefault())).toMillis(), TimeUnit.MILLISECONDS));
                    });
                }
            }
        } catch (Exception e) {
            LOGGER.error("Error while loading jobs from dynamodb", e);
        }
    }, 1, config.interval().orElse(10L) * 60, TimeUnit.SECONDS);
}
Also used : AttributeValue(software.amazon.awssdk.services.dynamodb.model.AttributeValue) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ResourceInUseException(software.amazon.awssdk.services.dynamodb.model.ResourceInUseException) TransactionCanceledException(software.amazon.awssdk.services.dynamodb.model.TransactionCanceledException) ResourceNotFoundException(software.amazon.awssdk.services.dynamodb.model.ResourceNotFoundException) DynamoDbException(software.amazon.awssdk.services.dynamodb.model.DynamoDbException) ScanRequest(software.amazon.awssdk.services.dynamodb.model.ScanRequest) ProcessInstanceJobDescription(io.automatiko.engine.api.jobs.ProcessInstanceJobDescription) Map(java.util.Map) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ProcessJobDescription(io.automatiko.engine.api.jobs.ProcessJobDescription)

Example 3 with ProcessJobDescription

use of io.automatiko.engine.api.jobs.ProcessJobDescription in project automatiko-engine by automatiko-io.

the class DynamoDBJobService method updateRepeatableJob.

protected void updateRepeatableJob(String id) {
    HashMap<String, AttributeValue> itemKey = new HashMap<String, AttributeValue>();
    itemKey.put(INSTANCE_ID_FIELD, AttributeValue.builder().s(id).build());
    GetItemRequest getrequest = GetItemRequest.builder().key(itemKey).tableName(tableName).build();
    Map<String, AttributeValue> job = dynamodb.getItem(getrequest).item();
    Integer limit = Integer.parseInt(job.get(FIRE_LIMIT_FIELD).n()) - 1;
    Long repeat = Long.parseLong(job.get(REPEAT_INTERVAL_FIELD).n());
    ZonedDateTime fireTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(Long.parseLong(job.get(FIRE_AT_FIELD).n())), ZoneId.systemDefault());
    Map<String, AttributeValueUpdate> updatedValues = new HashMap<String, AttributeValueUpdate>();
    updatedValues.put(FIRE_AT_FIELD, AttributeValueUpdate.builder().value(AttributeValue.builder().n(Long.toString(fireTime.plus(repeat, ChronoUnit.MILLIS).toInstant().toEpochMilli())).build()).action(AttributeAction.PUT).build());
    updatedValues.put(FIRE_LIMIT_FIELD, AttributeValueUpdate.builder().value(AttributeValue.builder().n(Integer.toString(limit)).build()).action(AttributeAction.PUT).build());
    updatedValues.put(STATUS_FIELD, AttributeValueUpdate.builder().value(AttributeValue.builder().s("scheduled").build()).action(AttributeAction.PUT).build());
    UpdateItemRequest request = UpdateItemRequest.builder().tableName(tableName).key(itemKey).attributeUpdates(updatedValues).build();
    dynamodb.updateItem(request);
    if (job.get(OWNER_INSTANCE_ID_FIELD) == null) {
        ProcessJobDescription description = ProcessJobDescription.of(build(job.get(EXPRESSION_FIELD).s()), null, job.get(OWNER_DEF_ID_FIELD).s());
        scheduledJobs.computeIfAbsent(job.get(INSTANCE_ID_FIELD).s(), k -> {
            return log(job.get(INSTANCE_ID_FIELD).s(), scheduler.schedule(new StartProcessOnExpiredTimer(job.get(INSTANCE_ID_FIELD).s(), job.get(OWNER_DEF_ID_FIELD).s(), limit, description), Duration.between(LocalDateTime.now(), fireTime).toMillis(), TimeUnit.MILLISECONDS));
        });
    } else {
        ProcessInstanceJobDescription description = ProcessInstanceJobDescription.of(job.get(INSTANCE_ID_FIELD).s(), job.get(TRIGGER_TYPE_FIELD).s(), build(job.get(EXPRESSION_FIELD).s()), job.get(OWNER_INSTANCE_ID_FIELD).s(), job.get(OWNER_DEF_ID_FIELD).s(), null);
        scheduledJobs.computeIfAbsent(job.get(INSTANCE_ID_FIELD).s(), k -> {
            return log(job.get(INSTANCE_ID_FIELD).s(), scheduler.scheduleAtFixedRate(new SignalProcessInstanceOnExpiredTimer(job.get(INSTANCE_ID_FIELD).s(), job.get(TRIGGER_TYPE_FIELD).s(), job.get(OWNER_DEF_ID_FIELD).s(), job.get(OWNER_INSTANCE_ID_FIELD).s(), limit, description), Duration.between(LocalDateTime.now(), fireTime).toMillis(), repeat, TimeUnit.MILLISECONDS));
        });
    }
}
Also used : AttributeValue(software.amazon.awssdk.services.dynamodb.model.AttributeValue) UpdateItemRequest(software.amazon.awssdk.services.dynamodb.model.UpdateItemRequest) AttributeValueUpdate(software.amazon.awssdk.services.dynamodb.model.AttributeValueUpdate) ConcurrentHashMap(java.util.concurrent.ConcurrentHashMap) HashMap(java.util.HashMap) ZonedDateTime(java.time.ZonedDateTime) ProcessInstanceJobDescription(io.automatiko.engine.api.jobs.ProcessInstanceJobDescription) GetItemRequest(software.amazon.awssdk.services.dynamodb.model.GetItemRequest) ProcessJobDescription(io.automatiko.engine.api.jobs.ProcessJobDescription)

Example 4 with ProcessJobDescription

use of io.automatiko.engine.api.jobs.ProcessJobDescription in project automatiko-engine by automatiko-io.

the class DatabaseJobService method start.

public void start(@Observes StartupEvent event) {
    loadScheduler.scheduleAtFixedRate(() -> {
        UnitOfWorkExecutor.executeInUnitOfWork(unitOfWorkManager, () -> {
            LocalDateTime next = LocalDateTime.now().plus(Duration.ofMinutes(interval));
            List<JobInstanceEntity> jobs = JobInstanceEntity.loadJobs(next);
            LOGGER.debug("Loaded jobs ({}) to be executed before {}", jobs.size(), next);
            for (JobInstanceEntity job : jobs) {
                if (job.ownerInstanceId == null) {
                    ProcessJobDescription description = ProcessJobDescription.of(build(job), null, job.ownerDefinitionId);
                    scheduledJobs.computeIfAbsent(job.id, k -> {
                        return log(job.id, scheduler.schedule(new StartProcessOnExpiredTimer(job.id, job.ownerDefinitionId, -1, description), Duration.between(LocalDateTime.now(), job.expirationTime).toMillis(), TimeUnit.MILLISECONDS));
                    });
                } else {
                    ProcessInstanceJobDescription description = ProcessInstanceJobDescription.of(job.id, job.triggerType, build(job), job.ownerInstanceId, job.ownerDefinitionId, null);
                    scheduledJobs.computeIfAbsent(job.id, k -> {
                        return log(job.id, scheduler.schedule(new SignalProcessInstanceOnExpiredTimer(job.id, job.triggerType, job.ownerDefinitionId, job.ownerInstanceId, job.limit, description), Duration.between(LocalDateTime.now(), job.expirationTime).toMillis(), TimeUnit.MILLISECONDS));
                    });
                }
            }
            return null;
        });
    }, 1, interval * 60, TimeUnit.SECONDS);
}
Also used : LocalDateTime(java.time.LocalDateTime) ProcessInstanceJobDescription(io.automatiko.engine.api.jobs.ProcessInstanceJobDescription) JobInstanceEntity(io.automatiko.engine.addons.persistence.db.model.JobInstanceEntity) ProcessJobDescription(io.automatiko.engine.api.jobs.ProcessJobDescription)

Example 5 with ProcessJobDescription

use of io.automatiko.engine.api.jobs.ProcessJobDescription in project automatiko-engine by automatiko-io.

the class TestJobService method triggerProcessJob.

@SuppressWarnings({ "unchecked", "rawtypes" })
public void triggerProcessJob(String jobId) {
    ProcessJobDescription job = (ProcessJobDescription) jobs.remove(jobId);
    if (job == null) {
        throw new IllegalArgumentException("Job with id " + jobId + " not found");
    }
    int limit = job.expirationTime().repeatLimit();
    try {
        LOGGER.debug("Job {} started", job.id());
        Process process = mappedProcesses.get(job.processId());
        if (process == null) {
            LOGGER.warn("No process found for process id {}", job.processId());
            return;
        }
        IdentityProvider.set(new TrustedIdentityProvider("System<timer>"));
        UnitOfWorkExecutor.executeInUnitOfWork(unitOfWorkManager, () -> {
            ProcessInstance<?> pi = process.createInstance(process.createModel());
            if (pi != null) {
                pi.start(TRIGGER, null, null);
            }
            return null;
        });
        limit--;
        if (limit == 0) {
            jobs.remove(jobId);
        }
        LOGGER.debug("Job {} completed", job.id());
    } finally {
        if (job.expirationTime().next() != null) {
            jobs.remove(jobId);
            scheduleProcessJob(job);
        } else {
            jobs.remove(jobId);
        }
    }
}
Also used : TrustedIdentityProvider(io.automatiko.engine.api.auth.TrustedIdentityProvider) Process(io.automatiko.engine.api.workflow.Process) ProcessJobDescription(io.automatiko.engine.api.jobs.ProcessJobDescription)

Aggregations

ProcessJobDescription (io.automatiko.engine.api.jobs.ProcessJobDescription)11 ProcessInstanceJobDescription (io.automatiko.engine.api.jobs.ProcessInstanceJobDescription)9 ZonedDateTime (java.time.ZonedDateTime)3 ResultSet (com.datastax.oss.driver.api.core.cql.ResultSet)2 Row (com.datastax.oss.driver.api.core.cql.Row)2 Select (com.datastax.oss.driver.api.querybuilder.select.Select)2 JobInstanceEntity (io.automatiko.engine.addons.persistence.db.model.JobInstanceEntity)2 HashMap (java.util.HashMap)2 ConcurrentHashMap (java.util.concurrent.ConcurrentHashMap)2 Document (org.bson.Document)2 AttributeValue (software.amazon.awssdk.services.dynamodb.model.AttributeValue)2 SimpleStatement (com.datastax.oss.driver.api.core.cql.SimpleStatement)1 QueryExecutionException (com.datastax.oss.driver.api.core.servererrors.QueryExecutionException)1 MongoWriteConcernException (com.mongodb.MongoWriteConcernException)1 MongoWriteException (com.mongodb.MongoWriteException)1 TrustedIdentityProvider (io.automatiko.engine.api.auth.TrustedIdentityProvider)1 Process (io.automatiko.engine.api.workflow.Process)1 IOException (java.io.IOException)1 UncheckedIOException (java.io.UncheckedIOException)1 LocalDateTime (java.time.LocalDateTime)1