Search in sources :

Example 56 with Job

use of com.google.cloud.dataproc.v1beta2.Job in project strimzi-kafka-operator by strimzi.

the class OauthAbstractST method tearDownEach.

@AfterEach
void tearDownEach(ExtensionContext extensionContext) throws Exception {
    List<Job> clusterJobList = kubeClient().getJobList().getItems().stream().filter(job -> job.getMetadata().getName().contains(mapWithClusterNames.get(extensionContext.getDisplayName()))).collect(Collectors.toList());
    for (Job job : clusterJobList) {
        LOGGER.info("Deleting {} job", job.getMetadata().getName());
        JobUtils.deleteJobWithWait(job.getMetadata().getNamespace(), job.getMetadata().getName());
    }
}
Also used : AbstractST(io.strimzi.systemtest.AbstractST) CoreMatchers(org.hamcrest.CoreMatchers) GenericKafkaListener(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListener) HashMap(java.util.HashMap) ExtensionContext(org.junit.jupiter.api.extension.ExtensionContext) Function(java.util.function.Function) KeycloakInstance(io.strimzi.systemtest.keycloak.KeycloakInstance) SecretUtils(io.strimzi.systemtest.utils.kubeUtils.objects.SecretUtils) Map(java.util.Map) DefaultNetworkPolicy(io.strimzi.systemtest.enums.DefaultNetworkPolicy) Tag(org.junit.jupiter.api.Tag) MatcherAssert.assertThat(org.hamcrest.MatcherAssert.assertThat) Job(io.fabric8.kubernetes.api.model.batch.v1.Job) JobUtils(io.strimzi.systemtest.utils.kubeUtils.controllers.JobUtils) Constants(io.strimzi.systemtest.Constants) GenericKafkaListenerBuilder(io.strimzi.api.kafka.model.listener.arraylistener.GenericKafkaListenerBuilder) OAUTH(io.strimzi.systemtest.Constants.OAUTH) Collectors(java.util.stream.Collectors) NetworkPolicyTemplates(io.strimzi.systemtest.templates.kubernetes.NetworkPolicyTemplates) KubeClusterResource.kubeClient(io.strimzi.test.k8s.KubeClusterResource.kubeClient) KeycloakUtils(io.strimzi.systemtest.utils.specific.KeycloakUtils) AfterEach(org.junit.jupiter.api.AfterEach) Base64(java.util.Base64) List(java.util.List) CertSecretSourceBuilder(io.strimzi.api.kafka.model.CertSecretSourceBuilder) Logger(org.apache.logging.log4j.Logger) KafkaListenerType(io.strimzi.api.kafka.model.listener.arraylistener.KafkaListenerType) LogManager(org.apache.logging.log4j.LogManager) REGRESSION(io.strimzi.systemtest.Constants.REGRESSION) Job(io.fabric8.kubernetes.api.model.batch.v1.Job) AfterEach(org.junit.jupiter.api.AfterEach)

Example 57 with Job

use of com.google.cloud.dataproc.v1beta2.Job in project hugegraph-computer by hugegraph.

the class ComputerJobController method observeComponent.

private ComputerJobComponent observeComponent(HugeGraphComputerJob computerJob) {
    ComputerJobComponent observed = new ComputerJobComponent();
    observed.computerJob(computerJob);
    String namespace = computerJob.getMetadata().getNamespace();
    String crName = computerJob.getMetadata().getName();
    String masterName = KubeUtil.masterJobName(crName);
    Job master = this.getResourceByName(namespace, masterName, Job.class);
    observed.masterJob(master);
    if (master != null) {
        List<Pod> masterPods = this.getPodsByJob(master);
        observed.masterPods(masterPods);
    }
    String workerName = KubeUtil.workerJobName(crName);
    Job worker = this.getResourceByName(namespace, workerName, Job.class);
    observed.workerJob(worker);
    if (worker != null) {
        List<Pod> workerPods = this.getPodsByJob(worker);
        observed.workerPods(workerPods);
    }
    String configMapName = KubeUtil.configMapName(crName);
    ConfigMap configMap = this.getResourceByName(namespace, configMapName, ConfigMap.class);
    observed.configMap(configMap);
    return observed;
}
Also used : Pod(io.fabric8.kubernetes.api.model.Pod) ConfigMap(io.fabric8.kubernetes.api.model.ConfigMap) Job(io.fabric8.kubernetes.api.model.batch.v1.Job) HugeGraphComputerJob(com.baidu.hugegraph.computer.k8s.crd.model.HugeGraphComputerJob)

Example 58 with Job

use of com.google.cloud.dataproc.v1beta2.Job in project hugegraph-computer by hugegraph.

the class ComputerJobDeployer method reconcileComponent.

private void reconcileComponent(String namespace, ComputerJobComponent desired, ComputerJobComponent observed) {
    ConfigMap desiredConfigMap = desired.configMap();
    ConfigMap observedConfigMap = observed.configMap();
    final KubernetesClient client;
    if (!Objects.equals(this.kubeClient.getNamespace(), namespace)) {
        client = this.kubeClient.inNamespace(namespace);
    } else {
        client = this.kubeClient;
    }
    if (desiredConfigMap == null && observedConfigMap != null) {
        client.configMaps().delete(observedConfigMap);
    } else if (desiredConfigMap != null && observedConfigMap == null) {
        KubeUtil.ignoreExists(() -> client.configMaps().create(desiredConfigMap));
    }
    if (desiredConfigMap != null && observedConfigMap != null) {
        LOG.debug("ConfigMap already exists, no action");
    }
    Job desiredMasterJob = desired.masterJob();
    Job observedMasterJob = observed.masterJob();
    if (desiredMasterJob == null && observedMasterJob != null) {
        client.batch().v1().jobs().delete(observedMasterJob);
    } else if (desiredMasterJob != null && observedMasterJob == null) {
        KubeUtil.ignoreExists(() -> client.batch().v1().jobs().create(desiredMasterJob));
    }
    if (desiredMasterJob != null && observedMasterJob != null) {
        LOG.debug("MasterJob already exists, no action");
    }
    Job desiredWorkerJob = desired.workerJob();
    Job observedWorkerJob = observed.workerJob();
    if (desiredWorkerJob == null && observedWorkerJob != null) {
        client.batch().v1().jobs().delete(observedWorkerJob);
    } else if (desiredWorkerJob != null && observedWorkerJob == null) {
        KubeUtil.ignoreExists(() -> client.batch().v1().jobs().create(desiredWorkerJob));
    }
    if (desiredWorkerJob != null && observedWorkerJob != null) {
        LOG.debug("WorkerJob already exists, no action");
    }
}
Also used : NamespacedKubernetesClient(io.fabric8.kubernetes.client.NamespacedKubernetesClient) KubernetesClient(io.fabric8.kubernetes.client.KubernetesClient) ConfigMap(io.fabric8.kubernetes.api.model.ConfigMap) Job(io.fabric8.kubernetes.api.model.batch.v1.Job) HugeGraphComputerJob(com.baidu.hugegraph.computer.k8s.crd.model.HugeGraphComputerJob)

Example 59 with Job

use of com.google.cloud.dataproc.v1beta2.Job in project pravega by pravega.

the class RemoteSequential method isTestRunning.

private boolean isTestRunning(final String jobId, final Metronome client) {
    Job jobStatus = client.getJob(jobId);
    boolean isRunning = false;
    if (jobStatus.getHistory() == null) {
        isRunning = true;
    } else if ((jobStatus.getHistory().getSuccessCount() == 0) && (jobStatus.getHistory().getFailureCount() == 0)) {
        isRunning = true;
    }
    return isRunning;
}
Also used : Job(io.pravega.test.system.framework.metronome.model.v1.Job)

Example 60 with Job

use of com.google.cloud.dataproc.v1beta2.Job in project pravega by pravega.

the class RemoteSequential method startTestExecution.

@Override
public CompletableFuture<Void> startTestExecution(Method testMethod) {
    Exceptions.handleInterrupted(() -> TimeUnit.SECONDS.sleep(60));
    // This will be removed once issue https://github.com/pravega/pravega/issues/1665 is resolved.
    log.debug("Starting test execution for method: {}", testMethod);
    final Metronome client = AuthEnabledMetronomeClient.getClient();
    String className = testMethod.getDeclaringClass().getName();
    String methodName = testMethod.getName();
    // All jobIds should have lowercase for metronome.
    String jobId = (methodName + ".testJob").toLowerCase();
    return CompletableFuture.runAsync(() -> {
        client.createJob(newJob(jobId, className, methodName));
        Response response = client.triggerJobRun(jobId);
        if (response.status() != CREATED.getStatusCode()) {
            throw new TestFrameworkException(TestFrameworkException.Type.ConnectionFailed, "Error while starting " + "test " + testMethod);
        } else {
            log.info("Created job succeeded with: " + response.toString());
        }
    }).thenCompose(v2 -> waitForJobCompletion(jobId, client)).<Void>thenApply(v1 -> {
        if (client.getJob(jobId).getHistory().getFailureCount() != 0) {
            throw new AssertionError("Test failed, detailed logs can be found at " + "https://MasterIP/mesos, under metronome framework tasks. MethodName: " + methodName);
        }
        return null;
    }).whenComplete((v, ex) -> {
        // deletejob once execution is complete.
        deleteJob(jobId, client);
        if (ex != null) {
            log.error("Error while executing the test. ClassName: {}, MethodName: {}", className, methodName);
        }
    });
}
Also used : Response(feign.Response) NotImplementedException(org.apache.commons.lang3.NotImplementedException) Response(feign.Response) Job(io.pravega.test.system.framework.metronome.model.v1.Job) Exceptions(io.pravega.common.Exceptions) Restart(io.pravega.test.system.framework.metronome.model.v1.Restart) HashMap(java.util.HashMap) CompletableFuture(java.util.concurrent.CompletableFuture) Metronome(io.pravega.test.system.framework.metronome.Metronome) Run(io.pravega.test.system.framework.metronome.model.v1.Run) TimeUnit(java.util.concurrent.TimeUnit) Slf4j(lombok.extern.slf4j.Slf4j) MetronomeException(io.pravega.test.system.framework.metronome.MetronomeException) Duration(java.time.Duration) Map(java.util.Map) Artifact(io.pravega.test.system.framework.metronome.model.v1.Artifact) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) ExecutorServiceHelpers(io.pravega.common.concurrent.ExecutorServiceHelpers) Method(java.lang.reflect.Method) Collections(java.util.Collections) Futures(io.pravega.common.concurrent.Futures) AuthEnabledMetronomeClient(io.pravega.test.system.framework.metronome.AuthEnabledMetronomeClient) CREATED(javax.ws.rs.core.Response.Status.CREATED) Metronome(io.pravega.test.system.framework.metronome.Metronome)

Aggregations

Job (org.pentaho.platform.api.scheduler2.Job)94 Test (org.junit.Test)69 Serializable (java.io.Serializable)25 SimpleJobTrigger (org.pentaho.platform.api.scheduler2.SimpleJobTrigger)21 HashMap (java.util.HashMap)20 ArrayList (java.util.ArrayList)19 JobScheduleRequest (org.pentaho.platform.web.http.api.resources.JobScheduleRequest)19 ComplexJobTrigger (org.pentaho.platform.api.scheduler2.ComplexJobTrigger)18 SchedulerException (org.pentaho.platform.api.scheduler2.SchedulerException)17 Date (java.util.Date)14 IJobFilter (org.pentaho.platform.api.scheduler2.IJobFilter)14 Job (com.google.cloud.video.transcoder.v1.Job)13 TranscoderServiceClient (com.google.cloud.video.transcoder.v1.TranscoderServiceClient)13 IPentahoSession (org.pentaho.platform.api.engine.IPentahoSession)12 IJobTrigger (org.pentaho.platform.api.scheduler2.IJobTrigger)12 AudioStream (com.google.cloud.video.transcoder.v1.AudioStream)8 JobConfig (com.google.cloud.video.transcoder.v1.JobConfig)8 VideoStream (com.google.cloud.video.transcoder.v1.VideoStream)8 Map (java.util.Map)8 CronJobTrigger (org.pentaho.platform.api.scheduler2.CronJobTrigger)8