Search in sources :

Example 31 with Notification

use of io.cdap.cdap.proto.Notification in project cdap by cdapio.

the class MapReduceRunnerTestBase method getDataNotifications.

/**
 * Returns a list of {@link Notification} object fetched from the data event topic in TMS that was published
 * starting from the given time.
 */
protected List<Notification> getDataNotifications(long startTime) throws Exception {
    // Get data notifications from TMS
    List<Notification> notifications = new ArrayList<>();
    MessagingContext messagingContext = new MultiThreadMessagingContext(injector.getInstance(MessagingService.class));
    try (CloseableIterator<Message> messages = messagingContext.getMessageFetcher().fetch(NamespaceId.SYSTEM.getNamespace(), injector.getInstance(CConfiguration.class).get(Constants.Dataset.DATA_EVENT_TOPIC), 10, startTime)) {
        while (messages.hasNext()) {
            notifications.add(GSON.fromJson(new String(messages.next().getPayload(), StandardCharsets.UTF_8), Notification.class));
        }
    }
    return notifications;
}
Also used : Message(io.cdap.cdap.api.messaging.Message) ArrayList(java.util.ArrayList) MultiThreadMessagingContext(io.cdap.cdap.messaging.context.MultiThreadMessagingContext) MessagingContext(io.cdap.cdap.api.messaging.MessagingContext) MultiThreadMessagingContext(io.cdap.cdap.messaging.context.MultiThreadMessagingContext) Notification(io.cdap.cdap.proto.Notification) MessagingService(io.cdap.cdap.messaging.MessagingService)

Example 32 with Notification

use of io.cdap.cdap.proto.Notification in project cdap by caskdata.

the class JobQueueTable method addNotification.

@Override
public void addNotification(ProgramScheduleRecord record, Notification notification) throws IOException {
    boolean jobExists = false;
    ProgramSchedule schedule = record.getSchedule();
    // Only add notifications for enabled schedules
    if (record.getMeta().getStatus() != ProgramScheduleStatus.SCHEDULED) {
        return;
    }
    int nextGenerationId = 0;
    try (CloseableIterator<Job> jobs = getJobsForSchedule(schedule.getScheduleId())) {
        while (jobs.hasNext()) {
            Job job = jobs.next();
            if (job.getGenerationId() >= nextGenerationId) {
                nextGenerationId = job.getGenerationId() + 1;
            }
            if (job.getState() == Job.State.PENDING_TRIGGER) {
                // ConstraintCheckerService
                if (job.isToBeDeleted()) {
                    // ignore, it will be deleted by ConstraintCheckerService
                    continue;
                }
                long scheduleLastUpdated = record.getMeta().getLastUpdated();
                if (job.getScheduleLastUpdatedTime() != scheduleLastUpdated) {
                    // schedule has changed: this job is obsolete
                    writeJobObsolete(job, System.currentTimeMillis());
                } else if (System.currentTimeMillis() - job.getCreationTime() > job.getSchedule().getTimeoutMillis()) {
                    // job has timed out; mark it obsolete
                    writeJobObsolete(job, System.currentTimeMillis());
                } else {
                    jobExists = true;
                    addNotification(job, notification);
                    break;
                }
            }
        }
    }
    // if no job exists for the scheduleId, add a new job with the first notification
    if (!jobExists) {
        List<Notification> notifications = Collections.singletonList(notification);
        Job.State jobState = isTriggerSatisfied(schedule, notifications) ? Job.State.PENDING_CONSTRAINT : Job.State.PENDING_TRIGGER;
        writeJob(new SimpleJob(schedule, nextGenerationId, System.currentTimeMillis(), notifications, jobState, record.getMeta().getLastUpdated()));
    }
}
Also used : ProgramSchedule(io.cdap.cdap.internal.app.runtime.schedule.ProgramSchedule) Constraint(io.cdap.cdap.internal.schedule.constraint.Constraint) Notification(io.cdap.cdap.proto.Notification)

Example 33 with Notification

use of io.cdap.cdap.proto.Notification in project cdap by caskdata.

the class DynamicPartitionerWithAvroTest method runDynamicPartitionerMR.

private void runDynamicPartitionerMR(final List<? extends GenericRecord> records, boolean allowConcurrentWriters, final boolean precreatePartitions, @Nullable final DynamicPartitioner.PartitionWriteOption partitionWriteOption, boolean expectedStatus) throws Exception {
    ApplicationWithPrograms app = deployApp(AppWithMapReduceUsingAvroDynamicPartitioner.class);
    final long now = System.currentTimeMillis();
    final Multimap<PartitionKey, GenericRecord> keyToRecordsMap = groupByPartitionKey(records, now);
    // write values to the input kvTable
    final KeyValueTable kvTable = datasetCache.getDataset(INPUT_DATASET);
    Transactions.createTransactionExecutor(txExecutorFactory, kvTable).execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() {
            // the keys are not used; it matters that they're unique though
            for (int i = 0; i < records.size(); i++) {
                kvTable.write(Integer.toString(i), records.get(i).toString());
            }
        }
    });
    final PartitionedFileSet pfs = datasetCache.getDataset(OUTPUT_DATASET);
    if (precreatePartitions) {
        Transactions.createTransactionExecutor(txExecutorFactory, (TransactionAware) pfs).execute(new TransactionExecutor.Subroutine() {

            @Override
            public void apply() throws IOException {
                writeFile(pfs, createKey(now, 95111));
                writeFile(pfs, createKey(now, 98123));
                writeFile(pfs, createKey(now, 84125));
            }
        });
    }
    String allowConcurrencyKey = "dataset." + OUTPUT_DATASET + "." + PartitionedFileSetArguments.DYNAMIC_PARTITIONER_ALLOW_CONCURRENCY;
    // run the partition writer m/r with this output partition time
    Map<String, String> arguments = new HashMap<>();
    arguments.put(OUTPUT_PARTITION_KEY, Long.toString(now));
    arguments.put(allowConcurrencyKey, Boolean.toString(allowConcurrentWriters));
    if (partitionWriteOption != null) {
        arguments.put("partitionWriteOption", partitionWriteOption.name());
    }
    long startTime = System.currentTimeMillis();
    boolean status = runProgram(app, AppWithMapReduceUsingAvroDynamicPartitioner.DynamicPartitioningMapReduce.class, new BasicArguments(arguments));
    Assert.assertEquals(expectedStatus, status);
    if (!expectedStatus) {
        // if we expect the program to fail, no need to check the output data for expected results
        return;
    }
    // Verify notifications
    List<Notification> notifications = getDataNotifications(startTime);
    Assert.assertEquals(1, notifications.size());
    Assert.assertEquals(NamespaceId.DEFAULT.dataset(OUTPUT_DATASET), DatasetId.fromString(notifications.get(0).getProperties().get("datasetId")));
    // this should have created a partition in the pfs
    final Location pfsBaseLocation = pfs.getEmbeddedFileSet().getBaseLocation();
    Transactions.createTransactionExecutor(txExecutorFactory, (TransactionAware) pfs).execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws IOException {
            Map<PartitionKey, PartitionDetail> partitions = new HashMap<>();
            for (PartitionDetail partition : pfs.getPartitions(null)) {
                partitions.put(partition.getPartitionKey(), partition);
                // check that the mapreduce wrote the output partition metadata to all the output partitions
                Assert.assertEquals(getExpectedMetadata(precreatePartitions, partitionWriteOption), partition.getMetadata().asMap());
                // if files were precreated, and the option is to append, expect the empty file to exist
                // if partition write option is configured to overwrite, then the file is expected to not exist
                Location preexistingFile = partition.getLocation().append("file");
                if (precreatePartitions && partitionWriteOption == DynamicPartitioner.PartitionWriteOption.CREATE_OR_APPEND) {
                    Assert.assertTrue(preexistingFile.exists());
                    try (InputStream inputStream = preexistingFile.getInputStream()) {
                        Assert.assertEquals(-1, inputStream.read());
                    }
                } else {
                    Assert.assertFalse(preexistingFile.exists());
                }
            }
            Assert.assertEquals(3, partitions.size());
            Assert.assertEquals(keyToRecordsMap.keySet(), partitions.keySet());
            // Check relative paths of the partitions. Also check that their location = pfs baseLocation + relativePath
            for (Map.Entry<PartitionKey, PartitionDetail> partitionKeyEntry : partitions.entrySet()) {
                PartitionDetail partitionDetail = partitionKeyEntry.getValue();
                String relativePath = partitionDetail.getRelativePath();
                int zip = (int) partitionKeyEntry.getKey().getField("zip");
                Assert.assertEquals(Long.toString(now) + Path.SEPARATOR + zip, relativePath);
                Assert.assertEquals(pfsBaseLocation.append(relativePath), partitionDetail.getLocation());
            }
            for (Map.Entry<PartitionKey, Collection<GenericRecord>> keyToRecordsEntry : keyToRecordsMap.asMap().entrySet()) {
                Set<GenericRecord> genericRecords = new HashSet<>(keyToRecordsEntry.getValue());
                Assert.assertEquals(genericRecords, readOutput(partitions.get(keyToRecordsEntry.getKey()).getLocation()));
            }
        }
    });
}
Also used : HashSet(java.util.HashSet) PartitionedFileSet(io.cdap.cdap.api.dataset.lib.PartitionedFileSet) Set(java.util.Set) HashMap(java.util.HashMap) PartitionDetail(io.cdap.cdap.api.dataset.lib.PartitionDetail) Notification(io.cdap.cdap.proto.Notification) ApplicationWithPrograms(io.cdap.cdap.internal.app.deploy.pipeline.ApplicationWithPrograms) BasicArguments(io.cdap.cdap.internal.app.runtime.BasicArguments) GenericRecord(org.apache.avro.generic.GenericRecord) InputStream(java.io.InputStream) TransactionExecutor(org.apache.tephra.TransactionExecutor) PartitionedFileSet(io.cdap.cdap.api.dataset.lib.PartitionedFileSet) IOException(java.io.IOException) KeyValueTable(io.cdap.cdap.api.dataset.lib.KeyValueTable) TransactionAware(org.apache.tephra.TransactionAware) PartitionKey(io.cdap.cdap.api.dataset.lib.PartitionKey) HashMap(java.util.HashMap) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) Location(org.apache.twill.filesystem.Location)

Example 34 with Notification

use of io.cdap.cdap.proto.Notification in project cdap by caskdata.

the class TimeTrigger method updateLaunchArguments.

@Override
public void updateLaunchArguments(ProgramSchedule schedule, List<Notification> notifications, Map<String, String> systemArgs, Map<String, String> userArgs) {
    for (Notification notification : notifications) {
        if (!isSatisfied(schedule, notification)) {
            continue;
        }
        String systemOverridesJson = notification.getProperties().get(ProgramOptionConstants.SYSTEM_OVERRIDES);
        String userOverridesJson = notification.getProperties().get(ProgramOptionConstants.USER_OVERRIDES);
        if (userOverridesJson == null || systemOverridesJson == null) {
            // Ignore the malformed notification
            continue;
        }
        systemArgs.putAll(GSON.<Map<String, String>>fromJson(systemOverridesJson, STRING_STRING_MAP));
        userArgs.putAll(GSON.<Map<String, String>>fromJson(userOverridesJson, STRING_STRING_MAP));
        return;
    }
}
Also used : Notification(io.cdap.cdap.proto.Notification)

Example 35 with Notification

use of io.cdap.cdap.proto.Notification in project cdap by caskdata.

the class ProgramNotificationSubscriberService method processWorkflowOnStop.

/**
 * On workflow program stop, inspects inner program states and adjust them if they are not in end state already.
 *
 * @param appMetadataStore the {@link AppMetadataStore} to write the status to
 * @param programHeartbeatTable the {@link ProgramHeartbeatTable} to write the status to
 * @param programRunId the program run of the completed program
 * @param programRunStatus the status of the completion
 * @param notification the {@link Notification} that carries information about the workflow completion
 * @param sourceId the source message id of the notification
 * @param runnables a {@link List} adding {@link Runnable} to be executed after event handling is completed
 * @throws Exception if failed to update program status
 */
private void processWorkflowOnStop(AppMetadataStore appMetadataStore, ProgramHeartbeatTable programHeartbeatTable, ProgramRunId programRunId, ProgramRunStatus programRunStatus, Notification notification, byte[] sourceId, List<Runnable> runnables) throws Exception {
    ApplicationId appId = programRunId.getParent().getParent();
    WorkflowSpecification workflowSpec = Optional.ofNullable(appMetadataStore.getApplication(appId)).map(appMeta -> appMeta.getSpec().getWorkflows().get(programRunId.getProgram())).orElse(null);
    // If cannot find the workflow spec (e.g. app deleted), then there is nothing we can do.
    if (workflowSpec == null) {
        return;
    }
    // For all MR and Spark nodes, we need to update the inner program run status if they are not in end state yet.
    for (WorkflowNode workflowNode : workflowSpec.getNodeIdMap().values()) {
        if (!(workflowNode instanceof WorkflowActionNode)) {
            continue;
        }
        ScheduleProgramInfo programInfo = ((WorkflowActionNode) workflowNode).getProgram();
        if (!WORKFLOW_INNER_PROGRAM_TYPES.containsKey(programInfo.getProgramType())) {
            continue;
        }
        // Get all active runs of the inner program. If the parent workflow runId is the same as this one,
        // set a terminal state for the inner program run.
        ProgramId innerProgramId = appId.program(WORKFLOW_INNER_PROGRAM_TYPES.get(programInfo.getProgramType()), programInfo.getProgramName());
        Map<ProgramRunId, Notification> innerProgramNotifications = new LinkedHashMap<>();
        appMetadataStore.scanActiveRuns(innerProgramId, runRecord -> {
            Map<String, String> systemArgs = runRecord.getSystemArgs();
            String workflowName = systemArgs.get(ProgramOptionConstants.WORKFLOW_NAME);
            String workflowRun = systemArgs.get(ProgramOptionConstants.WORKFLOW_RUN_ID);
            if (workflowName == null || workflowRun == null) {
                return;
            }
            ProgramRunId workflowRunId = appId.program(ProgramType.WORKFLOW, workflowName).run(workflowRun);
            if (!programRunId.equals(workflowRunId)) {
                return;
            }
            Map<String, String> notificationProps = new HashMap<>(notification.getProperties());
            notificationProps.put(ProgramOptionConstants.PROGRAM_RUN_ID, GSON.toJson(runRecord.getProgramRunId()));
            innerProgramNotifications.put(runRecord.getProgramRunId(), new Notification(Notification.Type.PROGRAM_STATUS, notificationProps));
        });
        for (Map.Entry<ProgramRunId, Notification> entry : innerProgramNotifications.entrySet()) {
            handleProgramEvent(entry.getKey(), programRunStatus, entry.getValue(), sourceId, appMetadataStore, programHeartbeatTable, runnables);
        }
    }
}
Also used : RunRecordDetail(io.cdap.cdap.internal.app.store.RunRecordDetail) ProvisionRequest(io.cdap.cdap.internal.provision.ProvisionRequest) ProvisionerNotifier(io.cdap.cdap.internal.provision.ProvisionerNotifier) TypeToken(com.google.gson.reflect.TypeToken) ImmutablePair(io.cdap.cdap.common.utils.ImmutablePair) NamespaceId(io.cdap.cdap.proto.id.NamespaceId) Notification(io.cdap.cdap.proto.Notification) Inject(com.google.inject.Inject) LoggerFactory(org.slf4j.LoggerFactory) RetryStrategies(io.cdap.cdap.common.service.RetryStrategies) GsonBuilder(com.google.gson.GsonBuilder) WorkflowNode(io.cdap.cdap.api.workflow.WorkflowNode) ProgramStateWriter(io.cdap.cdap.app.runtime.ProgramStateWriter) SimpleProgramOptions(io.cdap.cdap.internal.app.runtime.SimpleProgramOptions) Gson(com.google.gson.Gson) Map(java.util.Map) BasicThrowable(io.cdap.cdap.proto.BasicThrowable) ImmutableMap(com.google.common.collect.ImmutableMap) MessagingService(io.cdap.cdap.messaging.MessagingService) Set(java.util.Set) ProgramRunStatus(io.cdap.cdap.proto.ProgramRunStatus) StructuredTableContext(io.cdap.cdap.spi.data.StructuredTableContext) SchedulableProgramType(io.cdap.cdap.api.schedule.SchedulableProgramType) StandardCharsets(java.nio.charset.StandardCharsets) ApplicationSpecificationAdapter(io.cdap.cdap.internal.app.ApplicationSpecificationAdapter) ScheduleProgramInfo(io.cdap.cdap.api.workflow.ScheduleProgramInfo) MetricsContext(io.cdap.cdap.api.metrics.MetricsContext) List(java.util.List) SecurityRequestContext(io.cdap.cdap.security.spi.authentication.SecurityRequestContext) Type(java.lang.reflect.Type) TransactionRunner(io.cdap.cdap.spi.data.transaction.TransactionRunner) Optional(java.util.Optional) Constants(io.cdap.cdap.common.conf.Constants) ProfileId(io.cdap.cdap.proto.id.ProfileId) Queue(java.util.Queue) ProgramOptionConstants(io.cdap.cdap.internal.app.runtime.ProgramOptionConstants) ApplicationId(io.cdap.cdap.proto.id.ApplicationId) RunRecordDetailWithExistingStatus(io.cdap.cdap.internal.app.store.RunRecordDetailWithExistingStatus) ProgramRunners(io.cdap.cdap.internal.app.runtime.ProgramRunners) Retries(io.cdap.cdap.common.service.Retries) RetryStrategy(io.cdap.cdap.common.service.RetryStrategy) Cluster(io.cdap.cdap.runtime.spi.provisioner.Cluster) WorkflowSpecification(io.cdap.cdap.api.workflow.WorkflowSpecification) HashMap(java.util.HashMap) ProgramType(io.cdap.cdap.proto.ProgramType) ArrayList(java.util.ArrayList) LinkedHashMap(java.util.LinkedHashMap) ProvisioningService(io.cdap.cdap.internal.provision.ProvisioningService) ProgramRunId(io.cdap.cdap.proto.id.ProgramRunId) ProgramHeartbeatTable(io.cdap.cdap.reporting.ProgramHeartbeatTable) ProgramOptions(io.cdap.cdap.app.runtime.ProgramOptions) SystemArguments(io.cdap.cdap.internal.app.runtime.SystemArguments) WorkflowActionNode(io.cdap.cdap.api.workflow.WorkflowActionNode) LinkedList(java.util.LinkedList) Nullable(javax.annotation.Nullable) AppMetadataStore(io.cdap.cdap.internal.app.store.AppMetadataStore) Logger(org.slf4j.Logger) Iterator(java.util.Iterator) JsonSyntaxException(com.google.gson.JsonSyntaxException) RunIds(io.cdap.cdap.common.app.RunIds) ProgramId(io.cdap.cdap.proto.id.ProgramId) ProgramDescriptor(io.cdap.cdap.app.program.ProgramDescriptor) IOException(java.io.IOException) ProjectInfo(io.cdap.cdap.common.utils.ProjectInfo) ProgramRunClusterStatus(io.cdap.cdap.proto.ProgramRunClusterStatus) TableNotFoundException(io.cdap.cdap.spi.data.TableNotFoundException) MetricsCollectionService(io.cdap.cdap.api.metrics.MetricsCollectionService) Store(io.cdap.cdap.app.store.Store) TimeUnit(java.util.concurrent.TimeUnit) CConfiguration(io.cdap.cdap.common.conf.CConfiguration) Collections(java.util.Collections) BasicArguments(io.cdap.cdap.internal.app.runtime.BasicArguments) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap) WorkflowActionNode(io.cdap.cdap.api.workflow.WorkflowActionNode) ProgramId(io.cdap.cdap.proto.id.ProgramId) WorkflowNode(io.cdap.cdap.api.workflow.WorkflowNode) Notification(io.cdap.cdap.proto.Notification) LinkedHashMap(java.util.LinkedHashMap) WorkflowSpecification(io.cdap.cdap.api.workflow.WorkflowSpecification) ProgramRunId(io.cdap.cdap.proto.id.ProgramRunId) ApplicationId(io.cdap.cdap.proto.id.ApplicationId) ScheduleProgramInfo(io.cdap.cdap.api.workflow.ScheduleProgramInfo) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashMap(java.util.HashMap) LinkedHashMap(java.util.LinkedHashMap)

Aggregations

Notification (io.cdap.cdap.proto.Notification)52 ProgramRunId (io.cdap.cdap.proto.id.ProgramRunId)14 IOException (java.io.IOException)14 Map (java.util.Map)14 Test (org.junit.Test)14 MessagingService (io.cdap.cdap.messaging.MessagingService)12 ProgramRunStatus (io.cdap.cdap.proto.ProgramRunStatus)12 Gson (com.google.gson.Gson)10 ProgramStateWriter (io.cdap.cdap.app.runtime.ProgramStateWriter)10 RunIds (io.cdap.cdap.common.app.RunIds)10 CConfiguration (io.cdap.cdap.common.conf.CConfiguration)10 Constants (io.cdap.cdap.common.conf.Constants)10 MessagingProgramStateWriter (io.cdap.cdap.internal.app.program.MessagingProgramStateWriter)10 ProgramOptionConstants (io.cdap.cdap.internal.app.runtime.ProgramOptionConstants)10 NamespaceId (io.cdap.cdap.proto.id.NamespaceId)10 ArrayList (java.util.ArrayList)10 Collections (java.util.Collections)10 List (java.util.List)10 TimeUnit (java.util.concurrent.TimeUnit)10 Nullable (javax.annotation.Nullable)10