Search in sources :

Example 26 with RunId

use of org.apache.twill.api.RunId in project cdap by caskdata.

the class SparkProgramRunner method run.

@Override
public ProgramController run(Program program, ProgramOptions options) {
    // Get the RunId first. It is used for the creation of the ClassLoader closing thread.
    Arguments arguments = options.getArguments();
    RunId runId = ProgramRunners.getRunId(options);
    Deque<Closeable> closeables = new LinkedList<>();
    try {
        // Extract and verify parameters
        ApplicationSpecification appSpec = program.getApplicationSpecification();
        Preconditions.checkNotNull(appSpec, "Missing application specification.");
        ProgramType processorType = program.getType();
        Preconditions.checkNotNull(processorType, "Missing processor type.");
        Preconditions.checkArgument(processorType == ProgramType.SPARK, "Only Spark process type is supported.");
        SparkSpecification spec = appSpec.getSpark().get(program.getName());
        Preconditions.checkNotNull(spec, "Missing SparkSpecification for %s", program.getName());
        String host = options.getArguments().getOption(ProgramOptionConstants.HOST);
        Preconditions.checkArgument(host != null, "No hostname is provided");
        // Get the WorkflowProgramInfo if it is started by Workflow
        WorkflowProgramInfo workflowInfo = WorkflowProgramInfo.create(arguments);
        DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, appSpec);
        // Setup dataset framework context, if required
        if (programDatasetFramework instanceof ProgramContextAware) {
            ProgramId programId = program.getId();
            ((ProgramContextAware) programDatasetFramework).setContext(new BasicProgramContext(programId.run(runId)));
        }
        PluginInstantiator pluginInstantiator = createPluginInstantiator(options, program.getClassLoader());
        if (pluginInstantiator != null) {
            closeables.addFirst(pluginInstantiator);
        }
        SparkRuntimeContext runtimeContext = new SparkRuntimeContext(new Configuration(hConf), program, options, cConf, host, txClient, programDatasetFramework, discoveryServiceClient, metricsCollectionService, streamAdmin, workflowInfo, pluginInstantiator, secureStore, secureStoreManager, authorizationEnforcer, authenticationContext, messagingService);
        closeables.addFirst(runtimeContext);
        Spark spark;
        try {
            spark = new InstantiatorFactory(false).get(TypeToken.of(program.<Spark>getMainClass())).create();
        } catch (Exception e) {
            LOG.error("Failed to instantiate Spark class for {}", spec.getClassName(), e);
            throw Throwables.propagate(e);
        }
        SparkSubmitter submitter = SparkRuntimeContextConfig.isLocal(hConf) ? new LocalSparkSubmitter() : new DistributedSparkSubmitter(hConf, locationFactory, host, runtimeContext, options.getArguments().getOption(Constants.AppFabric.APP_SCHEDULER_QUEUE));
        Service sparkRuntimeService = new SparkRuntimeService(cConf, spark, getPluginArchive(options), runtimeContext, submitter);
        sparkRuntimeService.addListener(createRuntimeServiceListener(program.getId(), runId, arguments, options.getUserArguments(), closeables, runtimeStore), Threads.SAME_THREAD_EXECUTOR);
        ProgramController controller = new SparkProgramController(sparkRuntimeService, runtimeContext);
        LOG.debug("Starting Spark Job. Context: {}", runtimeContext);
        if (SparkRuntimeContextConfig.isLocal(hConf) || UserGroupInformation.isSecurityEnabled()) {
            sparkRuntimeService.start();
        } else {
            ProgramRunners.startAsUser(cConf.get(Constants.CFG_HDFS_USER), sparkRuntimeService);
        }
        return controller;
    } catch (Throwable t) {
        closeAll(closeables);
        throw Throwables.propagate(t);
    }
}
Also used : ApplicationSpecification(co.cask.cdap.api.app.ApplicationSpecification) SparkSubmitter(co.cask.cdap.app.runtime.spark.submit.SparkSubmitter) DistributedSparkSubmitter(co.cask.cdap.app.runtime.spark.submit.DistributedSparkSubmitter) LocalSparkSubmitter(co.cask.cdap.app.runtime.spark.submit.LocalSparkSubmitter) CConfiguration(co.cask.cdap.common.conf.CConfiguration) Configuration(org.apache.hadoop.conf.Configuration) Closeable(java.io.Closeable) DistributedSparkSubmitter(co.cask.cdap.app.runtime.spark.submit.DistributedSparkSubmitter) NameMappedDatasetFramework(co.cask.cdap.internal.app.runtime.workflow.NameMappedDatasetFramework) DatasetFramework(co.cask.cdap.data2.dataset2.DatasetFramework) InstantiatorFactory(co.cask.cdap.common.lang.InstantiatorFactory) SparkSpecification(co.cask.cdap.api.spark.SparkSpecification) ProgramType(co.cask.cdap.proto.ProgramType) RunId(org.apache.twill.api.RunId) ProgramController(co.cask.cdap.app.runtime.ProgramController) Arguments(co.cask.cdap.app.runtime.Arguments) MessagingService(co.cask.cdap.messaging.MessagingService) MetricsCollectionService(co.cask.cdap.api.metrics.MetricsCollectionService) Service(com.google.common.util.concurrent.Service) ProgramId(co.cask.cdap.proto.id.ProgramId) BasicProgramContext(co.cask.cdap.internal.app.runtime.BasicProgramContext) LinkedList(java.util.LinkedList) IOException(java.io.IOException) WorkflowProgramInfo(co.cask.cdap.internal.app.runtime.workflow.WorkflowProgramInfo) BasicThrowable(co.cask.cdap.proto.BasicThrowable) PluginInstantiator(co.cask.cdap.internal.app.runtime.plugin.PluginInstantiator) Spark(co.cask.cdap.api.spark.Spark) LocalSparkSubmitter(co.cask.cdap.app.runtime.spark.submit.LocalSparkSubmitter) ProgramContextAware(co.cask.cdap.data.ProgramContextAware)

Example 27 with RunId

use of org.apache.twill.api.RunId in project cdap by caskdata.

the class FlowQueuePendingCorrector method run.

/**
   * Corrects queue.pending metric for a flowlet.
   */
public void run(FlowId flowId, String producerFlowlet, String consumerFlowlet, String flowletQueue, FlowSpecification flow) throws Exception {
    System.out.println("Running queue.pending correction on flow '" + flowId + "' producerFlowlet '" + producerFlowlet + "' consumerFlowlet '" + consumerFlowlet + "' flowletQueue '" + flowletQueue + "'");
    Map<RunId, ProgramRuntimeService.RuntimeInfo> runtimeInfos = programRuntimeService.list(flowId);
    Preconditions.checkState(runtimeInfos.isEmpty(), "Cannot run tool when flow " + flowId + " is still running");
    SimpleQueueSpecificationGenerator queueSpecGenerator = new SimpleQueueSpecificationGenerator(flowId.getParent());
    Table<QueueSpecificationGenerator.Node, String, Set<QueueSpecification>> table = queueSpecGenerator.create(flow);
    Preconditions.checkArgument(table.contains(QueueSpecificationGenerator.Node.flowlet(producerFlowlet), consumerFlowlet), "Flowlet " + producerFlowlet + " is not emitting to " + consumerFlowlet);
    Set<QueueSpecification> queueSpecs = table.get(QueueSpecificationGenerator.Node.flowlet(producerFlowlet), consumerFlowlet);
    boolean validQueue = false;
    for (QueueSpecification queueSpec : queueSpecs) {
        if (queueSpec.getQueueName().getSimpleName().equals(flowletQueue)) {
            validQueue = true;
            break;
        }
    }
    Preconditions.checkArgument(validQueue, "Queue " + flowletQueue + " does not exist for the given flowlets");
    QueueName queueName = QueueName.fromFlowlet(flowId, producerFlowlet, flowletQueue);
    long consumerGroupId = FlowUtils.generateConsumerGroupId(flowId, consumerFlowlet);
    long correctQueuePendingValue;
    try {
        HBaseQueueDebugger.QueueStatistics stats = queueDebugger.scanQueue(queueName, consumerGroupId);
        correctQueuePendingValue = stats.getUnprocessed() + stats.getProcessedAndNotVisible();
    } catch (NotFoundException e) {
        // OK since flowlet queue exists, but actual queue doesn't exist
        // (e.g. when running upgrade tool from 2.8 to 3.0)
        correctQueuePendingValue = 0;
    }
    Map<String, String> tags = ImmutableMap.<String, String>builder().put(Constants.Metrics.Tag.NAMESPACE, flowId.getNamespace()).put(Constants.Metrics.Tag.APP, flowId.getApplication()).put(Constants.Metrics.Tag.FLOW, flowId.getProgram()).put(Constants.Metrics.Tag.CONSUMER, consumerFlowlet).put(Constants.Metrics.Tag.PRODUCER, producerFlowlet).put(Constants.Metrics.Tag.FLOWLET_QUEUE, flowletQueue).build();
    MetricDataQuery query = new MetricDataQuery(0, 0, Integer.MAX_VALUE, 1, ImmutableMap.of("system.queue.pending", AggregationFunction.SUM), tags, ImmutableList.<String>of(), null);
    Collection<MetricTimeSeries> results = metricStore.query(query);
    long queuePending;
    if (results.isEmpty()) {
        queuePending = 0;
    } else {
        System.out.println("Got results: " + GSON.toJson(results));
        Preconditions.checkState(results.size() == 1);
        List<TimeValue> timeValues = results.iterator().next().getTimeValues();
        Preconditions.checkState(timeValues.size() == 1);
        TimeValue timeValue = timeValues.get(0);
        queuePending = timeValue.getValue();
    }
    metricsCollectionService.startAndWait();
    MetricsContext collector = metricsCollectionService.getContext(tags);
    collector.gauge("queue.pending", correctQueuePendingValue);
    System.out.printf("Adjusted system.queue.pending metric from %d to %d (tags %s)\n", queuePending, correctQueuePendingValue, GSON.toJson(tags));
    // stop will flush the metrics
    metricsCollectionService.stopAndWait();
}
Also used : HBaseQueueDebugger(co.cask.cdap.data.tools.HBaseQueueDebugger) Set(java.util.Set) MetricsContext(co.cask.cdap.api.metrics.MetricsContext) NotFoundException(co.cask.cdap.common.NotFoundException) MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) SimpleQueueSpecificationGenerator(co.cask.cdap.internal.app.queue.SimpleQueueSpecificationGenerator) QueueSpecification(co.cask.cdap.app.queue.QueueSpecification) RunId(org.apache.twill.api.RunId) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery) QueueName(co.cask.cdap.common.queue.QueueName) TimeValue(co.cask.cdap.api.dataset.lib.cube.TimeValue)

Example 28 with RunId

use of org.apache.twill.api.RunId in project cdap by caskdata.

the class FlowletProgramRunner method run.

@SuppressWarnings("unchecked")
@Override
public ProgramController run(Program program, ProgramOptions options) {
    BasicFlowletContext flowletContext = null;
    try {
        // Extract and verify parameters
        String flowletName = options.getName();
        int instanceId = Integer.parseInt(options.getArguments().getOption(ProgramOptionConstants.INSTANCE_ID, "-1"));
        Preconditions.checkArgument(instanceId >= 0, "Missing instance Id");
        int instanceCount = Integer.parseInt(options.getArguments().getOption(ProgramOptionConstants.INSTANCES, "0"));
        Preconditions.checkArgument(instanceCount > 0, "Invalid or missing instance count");
        RunId runId = ProgramRunners.getRunId(options);
        ApplicationSpecification appSpec = program.getApplicationSpecification();
        Preconditions.checkNotNull(appSpec, "Missing application specification.");
        ProgramType processorType = program.getType();
        Preconditions.checkNotNull(processorType, "Missing processor type.");
        Preconditions.checkArgument(processorType == ProgramType.FLOW, "Only FLOW process type is supported.");
        String processorName = program.getName();
        Preconditions.checkNotNull(processorName, "Missing processor name.");
        FlowSpecification flowSpec = appSpec.getFlows().get(processorName);
        FlowletDefinition flowletDef = flowSpec.getFlowlets().get(flowletName);
        Preconditions.checkNotNull(flowletDef, "Definition missing for flowlet \"%s\"", flowletName);
        Class<?> clz = Class.forName(flowletDef.getFlowletSpec().getClassName(), true, program.getClassLoader());
        Preconditions.checkArgument(Flowlet.class.isAssignableFrom(clz), "%s is not a Flowlet.", clz);
        // Setup dataset framework context, if required
        ProgramId programId = program.getId();
        FlowletId flowletId = programId.flowlet(flowletName);
        ProgramRunId run = programId.run(runId);
        ProgramContext programContext = new BasicProgramContext(run, flowletId);
        if (dsFramework instanceof ProgramContextAware) {
            ((ProgramContextAware) dsFramework).setContext(programContext);
        }
        Class<? extends Flowlet> flowletClass = (Class<? extends Flowlet>) clz;
        // Creates flowlet context
        flowletContext = new BasicFlowletContext(program, options, flowletId, instanceId, instanceCount, flowletDef.getDatasets(), flowletDef.getFlowletSpec(), metricsCollectionService, discoveryServiceClient, txClient, dsFramework, secureStore, secureStoreManager, messageService, cConf);
        // Creates tx related objects
        DataFabricFacade dataFabricFacade = dataFabricFacadeFactory.create(program, flowletContext.getDatasetCache());
        if (dataFabricFacade instanceof ProgramContextAware) {
            ((ProgramContextAware) dataFabricFacade).setContext(programContext);
        }
        // Creates QueueSpecification
        Table<Node, String, Set<QueueSpecification>> queueSpecs = new SimpleQueueSpecificationGenerator(new ApplicationId(program.getNamespaceId(), program.getApplicationId())).create(flowSpec);
        Flowlet flowlet = new InstantiatorFactory(false).get(TypeToken.of(flowletClass)).create();
        TypeToken<? extends Flowlet> flowletType = TypeToken.of(flowletClass);
        // Set the context classloader to the cdap classloader. It is needed for the DatumWriterFactory be able
        // to load cdap classes
        Thread.currentThread().setContextClassLoader(FlowletProgramRunner.class.getClassLoader());
        // Inject DataSet, OutputEmitter, Metric fields
        ImmutableList.Builder<ProducerSupplier> queueProducerSupplierBuilder = ImmutableList.builder();
        Reflections.visit(flowlet, flowlet.getClass(), new PropertyFieldSetter(flowletDef.getFlowletSpec().getProperties()), new DataSetFieldSetter(flowletContext), new MetricsFieldSetter(flowletContext.getMetrics()), new OutputEmitterFieldSetter(outputEmitterFactory(flowletContext, flowletName, dataFabricFacade, queueProducerSupplierBuilder, queueSpecs)));
        ImmutableList.Builder<ConsumerSupplier<?>> queueConsumerSupplierBuilder = ImmutableList.builder();
        Collection<ProcessSpecification<?>> processSpecs = createProcessSpecification(flowletContext, flowletType, processMethodFactory(flowlet), processSpecificationFactory(flowletContext, dataFabricFacade, queueReaderFactory, flowletName, queueSpecs, queueConsumerSupplierBuilder, createSchemaCache(program)), Lists.<ProcessSpecification<?>>newLinkedList());
        List<ConsumerSupplier<?>> consumerSuppliers = queueConsumerSupplierBuilder.build();
        // Create the flowlet driver
        AtomicReference<FlowletProgramController> controllerRef = new AtomicReference<>();
        Service serviceHook = createServiceHook(flowletName, consumerSuppliers, controllerRef);
        FlowletRuntimeService driver = new FlowletRuntimeService(flowlet, flowletContext, processSpecs, createCallback(flowlet, flowletDef.getFlowletSpec()), dataFabricFacade, serviceHook);
        FlowletProgramController controller = new FlowletProgramController(program.getId(), flowletName, flowletContext, driver, queueProducerSupplierBuilder.build(), consumerSuppliers);
        controllerRef.set(controller);
        LOG.info("Starting flowlet: {}", flowletContext);
        driver.start();
        LOG.info("Flowlet started: {}", flowletContext);
        return controller;
    } catch (Exception e) {
        // of the flowlet context.
        if (flowletContext != null) {
            flowletContext.close();
        }
        throw Throwables.propagate(e);
    }
}
Also used : ApplicationSpecification(co.cask.cdap.api.app.ApplicationSpecification) Set(java.util.Set) ImmutableSet(com.google.common.collect.ImmutableSet) ImmutableList(com.google.common.collect.ImmutableList) Node(co.cask.cdap.app.queue.QueueSpecificationGenerator.Node) BasicProgramContext(co.cask.cdap.internal.app.runtime.BasicProgramContext) ProgramContext(co.cask.cdap.data.ProgramContext) InstantiatorFactory(co.cask.cdap.common.lang.InstantiatorFactory) FlowletDefinition(co.cask.cdap.api.flow.FlowletDefinition) FlowSpecification(co.cask.cdap.api.flow.FlowSpecification) MetricsFieldSetter(co.cask.cdap.internal.app.runtime.MetricsFieldSetter) ProgramType(co.cask.cdap.proto.ProgramType) RunId(org.apache.twill.api.RunId) ProgramRunId(co.cask.cdap.proto.id.ProgramRunId) Flowlet(co.cask.cdap.api.flow.flowlet.Flowlet) MetricsCollectionService(co.cask.cdap.api.metrics.MetricsCollectionService) Service(com.google.common.util.concurrent.Service) MessagingService(co.cask.cdap.messaging.MessagingService) AbstractService(com.google.common.util.concurrent.AbstractService) AtomicReference(java.util.concurrent.atomic.AtomicReference) ProgramId(co.cask.cdap.proto.id.ProgramId) BasicProgramContext(co.cask.cdap.internal.app.runtime.BasicProgramContext) DataSetFieldSetter(co.cask.cdap.internal.app.runtime.DataSetFieldSetter) UnsupportedTypeException(co.cask.cdap.api.data.schema.UnsupportedTypeException) IOException(java.io.IOException) SimpleQueueSpecificationGenerator(co.cask.cdap.internal.app.queue.SimpleQueueSpecificationGenerator) PropertyFieldSetter(co.cask.cdap.common.lang.PropertyFieldSetter) FlowletId(co.cask.cdap.proto.id.FlowletId) ProgramRunId(co.cask.cdap.proto.id.ProgramRunId) DataFabricFacade(co.cask.cdap.internal.app.runtime.DataFabricFacade) ApplicationId(co.cask.cdap.proto.id.ApplicationId) ProgramContextAware(co.cask.cdap.data.ProgramContextAware)

Example 29 with RunId

use of org.apache.twill.api.RunId in project cdap by caskdata.

the class LineageTestRun method testAllProgramsLineage.

@Test
public void testAllProgramsLineage() throws Exception {
    NamespaceId namespace = new NamespaceId("testAllProgramsLineage");
    ApplicationId app = namespace.app(AllProgramsApp.NAME);
    ProgramId flow = app.flow(AllProgramsApp.NoOpFlow.NAME);
    ProgramId mapreduce = app.mr(AllProgramsApp.NoOpMR.NAME);
    ProgramId mapreduce2 = app.mr(AllProgramsApp.NoOpMR2.NAME);
    ProgramId spark = app.spark(AllProgramsApp.NoOpSpark.NAME);
    ProgramId service = app.service(AllProgramsApp.NoOpService.NAME);
    ProgramId worker = app.worker(AllProgramsApp.NoOpWorker.NAME);
    ProgramId workflow = app.workflow(AllProgramsApp.NoOpWorkflow.NAME);
    DatasetId dataset = namespace.dataset(AllProgramsApp.DATASET_NAME);
    DatasetId dataset2 = namespace.dataset(AllProgramsApp.DATASET_NAME2);
    DatasetId dataset3 = namespace.dataset(AllProgramsApp.DATASET_NAME3);
    StreamId stream = namespace.stream(AllProgramsApp.STREAM_NAME);
    namespaceClient.create(new NamespaceMeta.Builder().setName(namespace.getNamespace()).build());
    try {
        appClient.deploy(namespace, createAppJarFile(AllProgramsApp.class));
        // Add metadata
        ImmutableSet<String> sparkTags = ImmutableSet.of("spark-tag1", "spark-tag2");
        addTags(spark, sparkTags);
        Assert.assertEquals(sparkTags, getTags(spark, MetadataScope.USER));
        ImmutableSet<String> workerTags = ImmutableSet.of("worker-tag1");
        addTags(worker, workerTags);
        Assert.assertEquals(workerTags, getTags(worker, MetadataScope.USER));
        ImmutableMap<String, String> datasetProperties = ImmutableMap.of("data-key1", "data-value1");
        addProperties(dataset, datasetProperties);
        Assert.assertEquals(datasetProperties, getProperties(dataset, MetadataScope.USER));
        // Start all programs
        RunId flowRunId = runAndWait(flow);
        RunId mrRunId = runAndWait(mapreduce);
        RunId mrRunId2 = runAndWait(mapreduce2);
        RunId sparkRunId = runAndWait(spark);
        runAndWait(workflow);
        RunId workflowMrRunId = getRunId(mapreduce, mrRunId);
        RunId serviceRunId = runAndWait(service);
        // Worker makes a call to service to make it access datasets,
        // hence need to make sure service starts before worker, and stops after it.
        RunId workerRunId = runAndWait(worker);
        // Wait for programs to finish
        waitForStop(flow, true);
        waitForStop(mapreduce, false);
        waitForStop(mapreduce2, false);
        waitForStop(spark, false);
        waitForStop(workflow, false);
        waitForStop(worker, false);
        waitForStop(service, true);
        long now = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis());
        long oneHour = TimeUnit.HOURS.toSeconds(1);
        // Fetch dataset lineage
        LineageRecord lineage = fetchLineage(dataset, now - oneHour, now + oneHour, toSet(CollapseType.ACCESS), 10);
        // dataset is accessed by all programs
        LineageRecord expected = LineageSerializer.toLineageRecord(now - oneHour, now + oneHour, new Lineage(ImmutableSet.of(// Dataset access
        new Relation(dataset, flow, AccessType.UNKNOWN, flowRunId, toSet(flow.flowlet(AllProgramsApp.A.NAME))), new Relation(dataset, mapreduce, AccessType.WRITE, mrRunId), new Relation(dataset, mapreduce2, AccessType.WRITE, mrRunId2), new Relation(dataset2, mapreduce2, AccessType.READ, mrRunId2), new Relation(dataset, spark, AccessType.READ, sparkRunId), new Relation(dataset2, spark, AccessType.WRITE, sparkRunId), new Relation(dataset3, spark, AccessType.READ, sparkRunId), new Relation(dataset3, spark, AccessType.WRITE, sparkRunId), new Relation(dataset, mapreduce, AccessType.WRITE, workflowMrRunId), new Relation(dataset, service, AccessType.WRITE, serviceRunId), new Relation(dataset, worker, AccessType.WRITE, workerRunId), // Stream access
        new Relation(stream, flow, AccessType.READ, flowRunId, ImmutableSet.of(flow.flowlet(AllProgramsApp.A.NAME))), new Relation(stream, mapreduce, AccessType.READ, mrRunId), new Relation(stream, spark, AccessType.READ, sparkRunId), new Relation(stream, mapreduce, AccessType.READ, workflowMrRunId), new Relation(stream, worker, AccessType.WRITE, workerRunId))), toSet(CollapseType.ACCESS));
        Assert.assertEquals(expected, lineage);
        // Fetch stream lineage
        lineage = fetchLineage(stream, now - oneHour, now + oneHour, toSet(CollapseType.ACCESS), 10);
        // stream too is accessed by all programs
        Assert.assertEquals(expected, lineage);
        // Assert metadata
        // Id.Flow needs conversion to Id.Program JIRA - CDAP-3658
        Assert.assertEquals(toSet(new MetadataRecord(app, MetadataScope.USER, emptyMap(), emptySet()), new MetadataRecord(flow, MetadataScope.USER, emptyMap(), emptySet()), new MetadataRecord(dataset, MetadataScope.USER, datasetProperties, emptySet()), new MetadataRecord(stream, MetadataScope.USER, emptyMap(), emptySet())), fetchRunMetadata(flow.run(flowRunId.getId())));
        // Id.Worker needs conversion to Id.Program JIRA - CDAP-3658
        ProgramId programForWorker = new ProgramId(worker.getNamespace(), worker.getApplication(), worker.getType(), worker.getEntityName());
        Assert.assertEquals(toSet(new MetadataRecord(app, MetadataScope.USER, emptyMap(), emptySet()), new MetadataRecord(programForWorker, MetadataScope.USER, emptyMap(), workerTags), new MetadataRecord(dataset, MetadataScope.USER, datasetProperties, emptySet()), new MetadataRecord(stream, MetadataScope.USER, emptyMap(), emptySet())), fetchRunMetadata(worker.run(workerRunId.getId())));
        // Id.Spark needs conversion to Id.Program JIRA - CDAP-3658
        ProgramId programForSpark = new ProgramId(spark.getNamespace(), spark.getApplication(), spark.getType(), spark.getEntityName());
        Assert.assertEquals(toSet(new MetadataRecord(app, MetadataScope.USER, emptyMap(), emptySet()), new MetadataRecord(programForSpark, MetadataScope.USER, emptyMap(), sparkTags), new MetadataRecord(dataset, MetadataScope.USER, datasetProperties, emptySet()), new MetadataRecord(dataset2, MetadataScope.USER, emptyMap(), emptySet()), new MetadataRecord(dataset3, MetadataScope.USER, emptyMap(), emptySet()), new MetadataRecord(stream, MetadataScope.USER, emptyMap(), emptySet())), fetchRunMetadata(spark.run(sparkRunId.getId())));
    } finally {
        namespaceClient.delete(namespace);
    }
}
Also used : StreamId(co.cask.cdap.proto.id.StreamId) Lineage(co.cask.cdap.data2.metadata.lineage.Lineage) AllProgramsApp(co.cask.cdap.client.app.AllProgramsApp) ProgramId(co.cask.cdap.proto.id.ProgramId) DatasetId(co.cask.cdap.proto.id.DatasetId) Relation(co.cask.cdap.data2.metadata.lineage.Relation) LineageRecord(co.cask.cdap.proto.metadata.lineage.LineageRecord) NamespaceMeta(co.cask.cdap.proto.NamespaceMeta) NamespaceId(co.cask.cdap.proto.id.NamespaceId) ApplicationId(co.cask.cdap.proto.id.ApplicationId) RunId(org.apache.twill.api.RunId) MetadataRecord(co.cask.cdap.proto.metadata.MetadataRecord) Test(org.junit.Test)

Example 30 with RunId

use of org.apache.twill.api.RunId in project cdap by caskdata.

the class LineageTestRun method testFlowLineage.

@Test
public void testFlowLineage() throws Exception {
    NamespaceId namespace = new NamespaceId("testFlowLineage");
    ApplicationId app = namespace.app(AllProgramsApp.NAME);
    ProgramId flow = app.flow(AllProgramsApp.NoOpFlow.NAME);
    DatasetId dataset = namespace.dataset(AllProgramsApp.DATASET_NAME);
    StreamId stream = namespace.stream(AllProgramsApp.STREAM_NAME);
    namespaceClient.create(new NamespaceMeta.Builder().setName(namespace.toId()).build());
    try {
        appClient.deploy(namespace, createAppJarFile(AllProgramsApp.class));
        // Add metadata to applicaton
        ImmutableMap<String, String> appProperties = ImmutableMap.of("app-key1", "app-value1");
        addProperties(app, appProperties);
        Assert.assertEquals(appProperties, getProperties(app, MetadataScope.USER));
        ImmutableSet<String> appTags = ImmutableSet.of("app-tag1");
        addTags(app, appTags);
        Assert.assertEquals(appTags, getTags(app, MetadataScope.USER));
        // Add metadata to flow
        ImmutableMap<String, String> flowProperties = ImmutableMap.of("flow-key1", "flow-value1");
        addProperties(flow, flowProperties);
        Assert.assertEquals(flowProperties, getProperties(flow, MetadataScope.USER));
        ImmutableSet<String> flowTags = ImmutableSet.of("flow-tag1", "flow-tag2");
        addTags(flow, flowTags);
        Assert.assertEquals(flowTags, getTags(flow, MetadataScope.USER));
        // Add metadata to dataset
        ImmutableMap<String, String> dataProperties = ImmutableMap.of("data-key1", "data-value1");
        addProperties(dataset, dataProperties);
        Assert.assertEquals(dataProperties, getProperties(dataset, MetadataScope.USER));
        ImmutableSet<String> dataTags = ImmutableSet.of("data-tag1", "data-tag2");
        addTags(dataset, dataTags);
        Assert.assertEquals(dataTags, getTags(dataset, MetadataScope.USER));
        // Add metadata to stream
        ImmutableMap<String, String> streamProperties = ImmutableMap.of("stream-key1", "stream-value1");
        addProperties(stream, streamProperties);
        Assert.assertEquals(streamProperties, getProperties(stream, MetadataScope.USER));
        ImmutableSet<String> streamTags = ImmutableSet.of("stream-tag1", "stream-tag2");
        addTags(stream, streamTags);
        Assert.assertEquals(streamTags, getTags(stream, MetadataScope.USER));
        long startTime = TimeMathParser.nowInSeconds();
        RunId flowRunId = runAndWait(flow);
        // Wait for few seconds so that the stop time secs is more than start time secs.
        TimeUnit.SECONDS.sleep(2);
        waitForStop(flow, true);
        long stopTime = TimeMathParser.nowInSeconds();
        // Fetch dataset lineage
        LineageRecord lineage = fetchLineage(dataset, startTime, stopTime, 10);
        LineageRecord expected = LineageSerializer.toLineageRecord(startTime, stopTime, new Lineage(ImmutableSet.of(new Relation(dataset, flow, AccessType.UNKNOWN, flowRunId, ImmutableSet.of(flow.flowlet(AllProgramsApp.A.NAME))), new Relation(stream, flow, AccessType.READ, flowRunId, ImmutableSet.of(flow.flowlet(AllProgramsApp.A.NAME))))), Collections.<CollapseType>emptySet());
        Assert.assertEquals(expected, lineage);
        // Fetch dataset lineage with time strings
        lineage = fetchLineage(dataset, "now-1h", "now+1h", 10);
        Assert.assertEquals(expected.getRelations(), lineage.getRelations());
        // Fetch stream lineage
        lineage = fetchLineage(stream, startTime, stopTime, 10);
        // same as dataset's lineage
        Assert.assertEquals(expected, lineage);
        // Fetch stream lineage with time strings
        lineage = fetchLineage(stream, "now-1h", "now+1h", 10);
        // same as dataset's lineage
        Assert.assertEquals(expected.getRelations(), lineage.getRelations());
        // Assert metadata
        // Id.Flow needs conversion to Id.Program JIRA - CDAP-3658
        Assert.assertEquals(toSet(new MetadataRecord(app, MetadataScope.USER, appProperties, appTags), new MetadataRecord(flow, MetadataScope.USER, flowProperties, flowTags), new MetadataRecord(dataset, MetadataScope.USER, dataProperties, dataTags), new MetadataRecord(stream, MetadataScope.USER, streamProperties, streamTags)), fetchRunMetadata(flow.run(flowRunId.getId())));
        // Assert with a time range after the flow run should return no results
        long laterStartTime = stopTime + 1000;
        long laterEndTime = stopTime + 5000;
        // Fetch stream lineage
        lineage = fetchLineage(stream, laterStartTime, laterEndTime, 10);
        Assert.assertEquals(LineageSerializer.toLineageRecord(laterStartTime, laterEndTime, new Lineage(ImmutableSet.<Relation>of()), Collections.<CollapseType>emptySet()), lineage);
        // Assert with a time range before the flow run should return no results
        long earlierStartTime = startTime - 5000;
        long earlierEndTime = startTime - 1000;
        // Fetch stream lineage
        lineage = fetchLineage(stream, earlierStartTime, earlierEndTime, 10);
        Assert.assertEquals(LineageSerializer.toLineageRecord(earlierStartTime, earlierEndTime, new Lineage(ImmutableSet.<Relation>of()), Collections.<CollapseType>emptySet()), lineage);
        // Test bad time ranges
        fetchLineage(dataset, "sometime", "sometime", 10, BadRequestException.class);
        fetchLineage(dataset, "now+1h", "now-1h", 10, BadRequestException.class);
        // Test non-existent run
        assertRunMetadataNotFound(flow.run(RunIds.generate(1000).getId()));
    } finally {
        namespaceClient.delete(namespace);
    }
}
Also used : StreamId(co.cask.cdap.proto.id.StreamId) CollapseType(co.cask.cdap.proto.metadata.lineage.CollapseType) Lineage(co.cask.cdap.data2.metadata.lineage.Lineage) AllProgramsApp(co.cask.cdap.client.app.AllProgramsApp) ProgramId(co.cask.cdap.proto.id.ProgramId) DatasetId(co.cask.cdap.proto.id.DatasetId) Relation(co.cask.cdap.data2.metadata.lineage.Relation) LineageRecord(co.cask.cdap.proto.metadata.lineage.LineageRecord) NamespaceMeta(co.cask.cdap.proto.NamespaceMeta) NamespaceId(co.cask.cdap.proto.id.NamespaceId) ApplicationId(co.cask.cdap.proto.id.ApplicationId) RunId(org.apache.twill.api.RunId) MetadataRecord(co.cask.cdap.proto.metadata.MetadataRecord) Test(org.junit.Test)

Aggregations

RunId (org.apache.twill.api.RunId)49 ProgramId (co.cask.cdap.proto.id.ProgramId)35 ProgramRunId (co.cask.cdap.proto.id.ProgramRunId)21 Test (org.junit.Test)19 ApplicationId (co.cask.cdap.proto.id.ApplicationId)13 ProgramType (co.cask.cdap.proto.ProgramType)12 ApplicationSpecification (co.cask.cdap.api.app.ApplicationSpecification)10 DatasetId (co.cask.cdap.proto.id.DatasetId)9 ProgramContextAware (co.cask.cdap.data.ProgramContextAware)6 BasicProgramContext (co.cask.cdap.internal.app.runtime.BasicProgramContext)6 NamespaceId (co.cask.cdap.proto.id.NamespaceId)6 Service (com.google.common.util.concurrent.Service)6 HashSet (java.util.HashSet)6 MetricsCollectionService (co.cask.cdap.api.metrics.MetricsCollectionService)5 ProgramController (co.cask.cdap.app.runtime.ProgramController)5 MessagingService (co.cask.cdap.messaging.MessagingService)5 NamespacedEntityId (co.cask.cdap.proto.id.NamespacedEntityId)5 StreamId (co.cask.cdap.proto.id.StreamId)5 ArrayList (java.util.ArrayList)5 Map (java.util.Map)5