Search in sources :

Example 96 with StreamId

use of co.cask.cdap.proto.id.StreamId in project cdap by caskdata.

the class LineageTestRun method testAllProgramsLineage.

@Test
public void testAllProgramsLineage() throws Exception {
    NamespaceId namespace = new NamespaceId("testAllProgramsLineage");
    ApplicationId app = namespace.app(AllProgramsApp.NAME);
    ProgramId flow = app.flow(AllProgramsApp.NoOpFlow.NAME);
    ProgramId mapreduce = app.mr(AllProgramsApp.NoOpMR.NAME);
    ProgramId mapreduce2 = app.mr(AllProgramsApp.NoOpMR2.NAME);
    ProgramId spark = app.spark(AllProgramsApp.NoOpSpark.NAME);
    ProgramId service = app.service(AllProgramsApp.NoOpService.NAME);
    ProgramId worker = app.worker(AllProgramsApp.NoOpWorker.NAME);
    ProgramId workflow = app.workflow(AllProgramsApp.NoOpWorkflow.NAME);
    DatasetId dataset = namespace.dataset(AllProgramsApp.DATASET_NAME);
    DatasetId dataset2 = namespace.dataset(AllProgramsApp.DATASET_NAME2);
    DatasetId dataset3 = namespace.dataset(AllProgramsApp.DATASET_NAME3);
    StreamId stream = namespace.stream(AllProgramsApp.STREAM_NAME);
    namespaceClient.create(new NamespaceMeta.Builder().setName(namespace.getNamespace()).build());
    try {
        appClient.deploy(namespace, createAppJarFile(AllProgramsApp.class));
        // Add metadata
        ImmutableSet<String> sparkTags = ImmutableSet.of("spark-tag1", "spark-tag2");
        addTags(spark, sparkTags);
        Assert.assertEquals(sparkTags, getTags(spark, MetadataScope.USER));
        ImmutableSet<String> workerTags = ImmutableSet.of("worker-tag1");
        addTags(worker, workerTags);
        Assert.assertEquals(workerTags, getTags(worker, MetadataScope.USER));
        ImmutableMap<String, String> datasetProperties = ImmutableMap.of("data-key1", "data-value1");
        addProperties(dataset, datasetProperties);
        Assert.assertEquals(datasetProperties, getProperties(dataset, MetadataScope.USER));
        // Start all programs
        RunId flowRunId = runAndWait(flow);
        RunId mrRunId = runAndWait(mapreduce);
        RunId mrRunId2 = runAndWait(mapreduce2);
        RunId sparkRunId = runAndWait(spark);
        runAndWait(workflow);
        RunId workflowMrRunId = getRunId(mapreduce, mrRunId);
        RunId serviceRunId = runAndWait(service);
        // Worker makes a call to service to make it access datasets,
        // hence need to make sure service starts before worker, and stops after it.
        RunId workerRunId = runAndWait(worker);
        // Wait for programs to finish
        waitForStop(flow, true);
        waitForStop(mapreduce, false);
        waitForStop(mapreduce2, false);
        waitForStop(spark, false);
        waitForStop(workflow, false);
        waitForStop(worker, false);
        waitForStop(service, true);
        long now = TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis());
        long oneHour = TimeUnit.HOURS.toSeconds(1);
        // Fetch dataset lineage
        LineageRecord lineage = fetchLineage(dataset, now - oneHour, now + oneHour, toSet(CollapseType.ACCESS), 10);
        // dataset is accessed by all programs
        LineageRecord expected = LineageSerializer.toLineageRecord(now - oneHour, now + oneHour, new Lineage(ImmutableSet.of(// Dataset access
        new Relation(dataset, flow, AccessType.UNKNOWN, flowRunId, toSet(flow.flowlet(AllProgramsApp.A.NAME))), new Relation(dataset, mapreduce, AccessType.WRITE, mrRunId), new Relation(dataset, mapreduce2, AccessType.WRITE, mrRunId2), new Relation(dataset2, mapreduce2, AccessType.READ, mrRunId2), new Relation(dataset, spark, AccessType.READ, sparkRunId), new Relation(dataset2, spark, AccessType.WRITE, sparkRunId), new Relation(dataset3, spark, AccessType.READ, sparkRunId), new Relation(dataset3, spark, AccessType.WRITE, sparkRunId), new Relation(dataset, mapreduce, AccessType.WRITE, workflowMrRunId), new Relation(dataset, service, AccessType.WRITE, serviceRunId), new Relation(dataset, worker, AccessType.WRITE, workerRunId), // Stream access
        new Relation(stream, flow, AccessType.READ, flowRunId, ImmutableSet.of(flow.flowlet(AllProgramsApp.A.NAME))), new Relation(stream, mapreduce, AccessType.READ, mrRunId), new Relation(stream, spark, AccessType.READ, sparkRunId), new Relation(stream, mapreduce, AccessType.READ, workflowMrRunId), new Relation(stream, worker, AccessType.WRITE, workerRunId))), toSet(CollapseType.ACCESS));
        Assert.assertEquals(expected, lineage);
        // Fetch stream lineage
        lineage = fetchLineage(stream, now - oneHour, now + oneHour, toSet(CollapseType.ACCESS), 10);
        // stream too is accessed by all programs
        Assert.assertEquals(expected, lineage);
        // Assert metadata
        // Id.Flow needs conversion to Id.Program JIRA - CDAP-3658
        Assert.assertEquals(toSet(new MetadataRecord(app, MetadataScope.USER, emptyMap(), emptySet()), new MetadataRecord(flow, MetadataScope.USER, emptyMap(), emptySet()), new MetadataRecord(dataset, MetadataScope.USER, datasetProperties, emptySet()), new MetadataRecord(stream, MetadataScope.USER, emptyMap(), emptySet())), fetchRunMetadata(flow.run(flowRunId.getId())));
        // Id.Worker needs conversion to Id.Program JIRA - CDAP-3658
        ProgramId programForWorker = new ProgramId(worker.getNamespace(), worker.getApplication(), worker.getType(), worker.getEntityName());
        Assert.assertEquals(toSet(new MetadataRecord(app, MetadataScope.USER, emptyMap(), emptySet()), new MetadataRecord(programForWorker, MetadataScope.USER, emptyMap(), workerTags), new MetadataRecord(dataset, MetadataScope.USER, datasetProperties, emptySet()), new MetadataRecord(stream, MetadataScope.USER, emptyMap(), emptySet())), fetchRunMetadata(worker.run(workerRunId.getId())));
        // Id.Spark needs conversion to Id.Program JIRA - CDAP-3658
        ProgramId programForSpark = new ProgramId(spark.getNamespace(), spark.getApplication(), spark.getType(), spark.getEntityName());
        Assert.assertEquals(toSet(new MetadataRecord(app, MetadataScope.USER, emptyMap(), emptySet()), new MetadataRecord(programForSpark, MetadataScope.USER, emptyMap(), sparkTags), new MetadataRecord(dataset, MetadataScope.USER, datasetProperties, emptySet()), new MetadataRecord(dataset2, MetadataScope.USER, emptyMap(), emptySet()), new MetadataRecord(dataset3, MetadataScope.USER, emptyMap(), emptySet()), new MetadataRecord(stream, MetadataScope.USER, emptyMap(), emptySet())), fetchRunMetadata(spark.run(sparkRunId.getId())));
    } finally {
        namespaceClient.delete(namespace);
    }
}
Also used : StreamId(co.cask.cdap.proto.id.StreamId) Lineage(co.cask.cdap.data2.metadata.lineage.Lineage) AllProgramsApp(co.cask.cdap.client.app.AllProgramsApp) ProgramId(co.cask.cdap.proto.id.ProgramId) DatasetId(co.cask.cdap.proto.id.DatasetId) Relation(co.cask.cdap.data2.metadata.lineage.Relation) LineageRecord(co.cask.cdap.proto.metadata.lineage.LineageRecord) NamespaceMeta(co.cask.cdap.proto.NamespaceMeta) NamespaceId(co.cask.cdap.proto.id.NamespaceId) ApplicationId(co.cask.cdap.proto.id.ApplicationId) RunId(org.apache.twill.api.RunId) MetadataRecord(co.cask.cdap.proto.metadata.MetadataRecord) Test(org.junit.Test)

Example 97 with StreamId

use of co.cask.cdap.proto.id.StreamId in project cdap by caskdata.

the class LineageTestRun method testFlowLineage.

@Test
public void testFlowLineage() throws Exception {
    NamespaceId namespace = new NamespaceId("testFlowLineage");
    ApplicationId app = namespace.app(AllProgramsApp.NAME);
    ProgramId flow = app.flow(AllProgramsApp.NoOpFlow.NAME);
    DatasetId dataset = namespace.dataset(AllProgramsApp.DATASET_NAME);
    StreamId stream = namespace.stream(AllProgramsApp.STREAM_NAME);
    namespaceClient.create(new NamespaceMeta.Builder().setName(namespace.toId()).build());
    try {
        appClient.deploy(namespace, createAppJarFile(AllProgramsApp.class));
        // Add metadata to applicaton
        ImmutableMap<String, String> appProperties = ImmutableMap.of("app-key1", "app-value1");
        addProperties(app, appProperties);
        Assert.assertEquals(appProperties, getProperties(app, MetadataScope.USER));
        ImmutableSet<String> appTags = ImmutableSet.of("app-tag1");
        addTags(app, appTags);
        Assert.assertEquals(appTags, getTags(app, MetadataScope.USER));
        // Add metadata to flow
        ImmutableMap<String, String> flowProperties = ImmutableMap.of("flow-key1", "flow-value1");
        addProperties(flow, flowProperties);
        Assert.assertEquals(flowProperties, getProperties(flow, MetadataScope.USER));
        ImmutableSet<String> flowTags = ImmutableSet.of("flow-tag1", "flow-tag2");
        addTags(flow, flowTags);
        Assert.assertEquals(flowTags, getTags(flow, MetadataScope.USER));
        // Add metadata to dataset
        ImmutableMap<String, String> dataProperties = ImmutableMap.of("data-key1", "data-value1");
        addProperties(dataset, dataProperties);
        Assert.assertEquals(dataProperties, getProperties(dataset, MetadataScope.USER));
        ImmutableSet<String> dataTags = ImmutableSet.of("data-tag1", "data-tag2");
        addTags(dataset, dataTags);
        Assert.assertEquals(dataTags, getTags(dataset, MetadataScope.USER));
        // Add metadata to stream
        ImmutableMap<String, String> streamProperties = ImmutableMap.of("stream-key1", "stream-value1");
        addProperties(stream, streamProperties);
        Assert.assertEquals(streamProperties, getProperties(stream, MetadataScope.USER));
        ImmutableSet<String> streamTags = ImmutableSet.of("stream-tag1", "stream-tag2");
        addTags(stream, streamTags);
        Assert.assertEquals(streamTags, getTags(stream, MetadataScope.USER));
        long startTime = TimeMathParser.nowInSeconds();
        RunId flowRunId = runAndWait(flow);
        // Wait for few seconds so that the stop time secs is more than start time secs.
        TimeUnit.SECONDS.sleep(2);
        waitForStop(flow, true);
        long stopTime = TimeMathParser.nowInSeconds();
        // Fetch dataset lineage
        LineageRecord lineage = fetchLineage(dataset, startTime, stopTime, 10);
        LineageRecord expected = LineageSerializer.toLineageRecord(startTime, stopTime, new Lineage(ImmutableSet.of(new Relation(dataset, flow, AccessType.UNKNOWN, flowRunId, ImmutableSet.of(flow.flowlet(AllProgramsApp.A.NAME))), new Relation(stream, flow, AccessType.READ, flowRunId, ImmutableSet.of(flow.flowlet(AllProgramsApp.A.NAME))))), Collections.<CollapseType>emptySet());
        Assert.assertEquals(expected, lineage);
        // Fetch dataset lineage with time strings
        lineage = fetchLineage(dataset, "now-1h", "now+1h", 10);
        Assert.assertEquals(expected.getRelations(), lineage.getRelations());
        // Fetch stream lineage
        lineage = fetchLineage(stream, startTime, stopTime, 10);
        // same as dataset's lineage
        Assert.assertEquals(expected, lineage);
        // Fetch stream lineage with time strings
        lineage = fetchLineage(stream, "now-1h", "now+1h", 10);
        // same as dataset's lineage
        Assert.assertEquals(expected.getRelations(), lineage.getRelations());
        // Assert metadata
        // Id.Flow needs conversion to Id.Program JIRA - CDAP-3658
        Assert.assertEquals(toSet(new MetadataRecord(app, MetadataScope.USER, appProperties, appTags), new MetadataRecord(flow, MetadataScope.USER, flowProperties, flowTags), new MetadataRecord(dataset, MetadataScope.USER, dataProperties, dataTags), new MetadataRecord(stream, MetadataScope.USER, streamProperties, streamTags)), fetchRunMetadata(flow.run(flowRunId.getId())));
        // Assert with a time range after the flow run should return no results
        long laterStartTime = stopTime + 1000;
        long laterEndTime = stopTime + 5000;
        // Fetch stream lineage
        lineage = fetchLineage(stream, laterStartTime, laterEndTime, 10);
        Assert.assertEquals(LineageSerializer.toLineageRecord(laterStartTime, laterEndTime, new Lineage(ImmutableSet.<Relation>of()), Collections.<CollapseType>emptySet()), lineage);
        // Assert with a time range before the flow run should return no results
        long earlierStartTime = startTime - 5000;
        long earlierEndTime = startTime - 1000;
        // Fetch stream lineage
        lineage = fetchLineage(stream, earlierStartTime, earlierEndTime, 10);
        Assert.assertEquals(LineageSerializer.toLineageRecord(earlierStartTime, earlierEndTime, new Lineage(ImmutableSet.<Relation>of()), Collections.<CollapseType>emptySet()), lineage);
        // Test bad time ranges
        fetchLineage(dataset, "sometime", "sometime", 10, BadRequestException.class);
        fetchLineage(dataset, "now+1h", "now-1h", 10, BadRequestException.class);
        // Test non-existent run
        assertRunMetadataNotFound(flow.run(RunIds.generate(1000).getId()));
    } finally {
        namespaceClient.delete(namespace);
    }
}
Also used : StreamId(co.cask.cdap.proto.id.StreamId) CollapseType(co.cask.cdap.proto.metadata.lineage.CollapseType) Lineage(co.cask.cdap.data2.metadata.lineage.Lineage) AllProgramsApp(co.cask.cdap.client.app.AllProgramsApp) ProgramId(co.cask.cdap.proto.id.ProgramId) DatasetId(co.cask.cdap.proto.id.DatasetId) Relation(co.cask.cdap.data2.metadata.lineage.Relation) LineageRecord(co.cask.cdap.proto.metadata.lineage.LineageRecord) NamespaceMeta(co.cask.cdap.proto.NamespaceMeta) NamespaceId(co.cask.cdap.proto.id.NamespaceId) ApplicationId(co.cask.cdap.proto.id.ApplicationId) RunId(org.apache.twill.api.RunId) MetadataRecord(co.cask.cdap.proto.metadata.MetadataRecord) Test(org.junit.Test)

Example 98 with StreamId

use of co.cask.cdap.proto.id.StreamId in project cdap by caskdata.

the class MetadataHttpHandlerTestRun method testInvalidEntities.

@Test
public void testInvalidEntities() throws IOException {
    ProgramId nonExistingProgram = application.service("NonExistingService");
    DatasetId nonExistingDataset = new DatasetId(NamespaceId.DEFAULT.getNamespace(), "NonExistingDataset");
    StreamId nonExistingStream = NamespaceId.DEFAULT.stream("NonExistingStream");
    StreamViewId nonExistingView = nonExistingStream.view("NonExistingView");
    ApplicationId nonExistingApp = NamespaceId.DEFAULT.app("NonExistingApp");
    Map<String, String> properties = ImmutableMap.of("aKey", "aValue", "aK", "aV");
    addProperties(nonExistingApp, properties, NotFoundException.class);
    addProperties(nonExistingProgram, properties, NotFoundException.class);
    addProperties(nonExistingDataset, properties, NotFoundException.class);
    addProperties(nonExistingView, properties, NotFoundException.class);
    addProperties(nonExistingStream, properties, NotFoundException.class);
}
Also used : StreamId(co.cask.cdap.proto.id.StreamId) ProgramId(co.cask.cdap.proto.id.ProgramId) ApplicationId(co.cask.cdap.proto.id.ApplicationId) DatasetId(co.cask.cdap.proto.id.DatasetId) StreamViewId(co.cask.cdap.proto.id.StreamViewId) Test(org.junit.Test)

Example 99 with StreamId

use of co.cask.cdap.proto.id.StreamId in project cdap by caskdata.

the class MetadataHttpHandlerTestRun method testSearchResultPagination.

@Test
public void testSearchResultPagination() throws Exception {
    NamespaceId namespace = new NamespaceId("pagination");
    namespaceClient.create(new NamespaceMeta.Builder().setName(namespace).build());
    StreamId stream = namespace.stream("text");
    DatasetId dataset = namespace.dataset("mydataset");
    StreamViewId view = stream.view("view");
    DatasetId trackerDataset = namespace.dataset("_auditLog");
    // create entities so system metadata is annotated
    streamClient.create(stream);
    streamViewClient.createOrUpdate(view, new ViewSpecification(new FormatSpecification("csv", null, null)));
    datasetClient.create(dataset, new DatasetInstanceConfiguration(Table.class.getName(), Collections.<String, String>emptyMap()));
    datasetClient.create(trackerDataset, new DatasetInstanceConfiguration(Table.class.getName(), Collections.<String, String>emptyMap()));
    // search with showHidden to true
    EnumSet<EntityTypeSimpleName> targets = EnumSet.allOf(EntityTypeSimpleName.class);
    String sort = AbstractSystemMetadataWriter.ENTITY_NAME_KEY + " asc";
    // search to get all the above entities offset 0, limit interger max  and cursors 0
    MetadataSearchResponse searchResponse = searchMetadata(namespace, "*", targets, sort, 0, Integer.MAX_VALUE, 0, null, true);
    List<MetadataSearchResultRecord> expectedResults = ImmutableList.of(new MetadataSearchResultRecord(trackerDataset), new MetadataSearchResultRecord(dataset), new MetadataSearchResultRecord(stream), new MetadataSearchResultRecord(view));
    List<String> expectedCursors = ImmutableList.of();
    Assert.assertEquals(expectedResults, new ArrayList<>(searchResponse.getResults()));
    Assert.assertEquals(expectedCursors, searchResponse.getCursors());
    // no offset, limit 1, no cursors
    searchResponse = searchMetadata(namespace, "*", targets, sort, 0, 1, 0, null);
    expectedResults = ImmutableList.of(new MetadataSearchResultRecord(dataset));
    expectedCursors = ImmutableList.of();
    Assert.assertEquals(expectedResults, new ArrayList<>(searchResponse.getResults()));
    Assert.assertEquals(expectedCursors, searchResponse.getCursors());
    // no offset, limit 1, 2 cursors, should return 1st result, with 2 cursors
    searchResponse = searchMetadata(namespace, "*", targets, sort, 0, 1, 2, null);
    expectedResults = ImmutableList.of(new MetadataSearchResultRecord(dataset));
    expectedCursors = ImmutableList.of(stream.getEntityName(), view.getEntityName());
    Assert.assertEquals(expectedResults, new ArrayList<>(searchResponse.getResults()));
    Assert.assertEquals(expectedCursors, searchResponse.getCursors());
    // offset 1, limit 1, 2 cursors, should return 2nd result, with only 1 cursor since we don't have enough data
    searchResponse = searchMetadata(namespace, "*", targets, sort, 1, 1, 2, null);
    expectedResults = ImmutableList.of(new MetadataSearchResultRecord(stream));
    expectedCursors = ImmutableList.of(view.getEntityName());
    Assert.assertEquals(expectedResults, new ArrayList<>(searchResponse.getResults()));
    Assert.assertEquals(expectedCursors, searchResponse.getCursors());
    // offset 2, limit 1, 2 cursors, should return 3rd result, with 0 cursors since we don't have enough data
    searchResponse = searchMetadata(namespace, "*", targets, sort, 2, 1, 2, null);
    expectedResults = ImmutableList.of(new MetadataSearchResultRecord(view));
    Assert.assertEquals(expectedResults, new ArrayList<>(searchResponse.getResults()));
    Assert.assertTrue(searchResponse.getCursors().isEmpty());
    // offset 3, limit 1, 2 cursors, should 0 results, with 0 cursors since we don't have enough data
    searchResponse = searchMetadata(namespace, "*", targets, sort, 3, 1, 2, null);
    Assert.assertTrue(searchResponse.getResults().isEmpty());
    Assert.assertTrue(searchResponse.getCursors().isEmpty());
    // no offset, no limit, should return everything
    searchResponse = searchMetadata(namespace, "*", targets, sort, 0, Integer.MAX_VALUE, 4, null);
    expectedResults = ImmutableList.of(new MetadataSearchResultRecord(dataset), new MetadataSearchResultRecord(stream), new MetadataSearchResultRecord(view));
    Assert.assertEquals(expectedResults, new ArrayList<>(searchResponse.getResults()));
    Assert.assertTrue(searchResponse.getCursors().isEmpty());
    // cleanup
    namespaceClient.delete(namespace);
}
Also used : StreamId(co.cask.cdap.proto.id.StreamId) FormatSpecification(co.cask.cdap.api.data.format.FormatSpecification) ViewSpecification(co.cask.cdap.proto.ViewSpecification) MetadataSearchResponse(co.cask.cdap.proto.metadata.MetadataSearchResponse) DatasetInstanceConfiguration(co.cask.cdap.proto.DatasetInstanceConfiguration) DatasetId(co.cask.cdap.proto.id.DatasetId) EntityTypeSimpleName(co.cask.cdap.proto.element.EntityTypeSimpleName) MetadataSearchResultRecord(co.cask.cdap.proto.metadata.MetadataSearchResultRecord) NamespaceMeta(co.cask.cdap.proto.NamespaceMeta) NamespaceId(co.cask.cdap.proto.id.NamespaceId) StreamViewId(co.cask.cdap.proto.id.StreamViewId) Test(org.junit.Test)

Example 100 with StreamId

use of co.cask.cdap.proto.id.StreamId in project cdap by caskdata.

the class MetadataHttpHandlerTestRun method testSearchResultSorting.

@Test
public void testSearchResultSorting() throws Exception {
    NamespaceId namespace = new NamespaceId("sorting");
    namespaceClient.create(new NamespaceMeta.Builder().setName(namespace).build());
    StreamId stream = namespace.stream("text");
    DatasetId dataset = namespace.dataset("mydataset");
    StreamViewId view = stream.view("view");
    // create entities so system metadata is annotated
    // also ensure that they are created at least 1 ms apart
    streamClient.create(stream);
    TimeUnit.MILLISECONDS.sleep(1);
    streamViewClient.createOrUpdate(view, new ViewSpecification(new FormatSpecification("csv", null, null)));
    TimeUnit.MILLISECONDS.sleep(1);
    datasetClient.create(dataset, new DatasetInstanceConfiguration(Table.class.getName(), Collections.<String, String>emptyMap()));
    // search with bad sort param
    EnumSet<EntityTypeSimpleName> targets = EnumSet.allOf(EntityTypeSimpleName.class);
    // test ascending order of entity name
    Set<MetadataSearchResultRecord> searchResults = searchMetadata(namespace, "*", targets, AbstractSystemMetadataWriter.ENTITY_NAME_KEY + " asc");
    List<MetadataSearchResultRecord> expected = ImmutableList.of(new MetadataSearchResultRecord(dataset), new MetadataSearchResultRecord(stream), new MetadataSearchResultRecord(view));
    Assert.assertEquals(expected, new ArrayList<>(searchResults));
    // test descending order of entity name
    searchResults = searchMetadata(namespace, "*", targets, AbstractSystemMetadataWriter.ENTITY_NAME_KEY + " desc");
    expected = ImmutableList.of(new MetadataSearchResultRecord(view), new MetadataSearchResultRecord(stream), new MetadataSearchResultRecord(dataset));
    Assert.assertEquals(expected, new ArrayList<>(searchResults));
    // test ascending order of creation time
    searchResults = searchMetadata(namespace, "*", targets, AbstractSystemMetadataWriter.CREATION_TIME_KEY + " asc");
    expected = ImmutableList.of(new MetadataSearchResultRecord(stream), new MetadataSearchResultRecord(view), new MetadataSearchResultRecord(dataset));
    Assert.assertEquals(expected, new ArrayList<>(searchResults));
    // test descending order of creation time
    searchResults = searchMetadata(namespace, "*", targets, AbstractSystemMetadataWriter.CREATION_TIME_KEY + " desc");
    expected = ImmutableList.of(new MetadataSearchResultRecord(dataset), new MetadataSearchResultRecord(view), new MetadataSearchResultRecord(stream));
    Assert.assertEquals(expected, new ArrayList<>(searchResults));
    // cleanup
    namespaceClient.delete(namespace);
}
Also used : StreamId(co.cask.cdap.proto.id.StreamId) FormatSpecification(co.cask.cdap.api.data.format.FormatSpecification) ViewSpecification(co.cask.cdap.proto.ViewSpecification) DatasetInstanceConfiguration(co.cask.cdap.proto.DatasetInstanceConfiguration) DatasetId(co.cask.cdap.proto.id.DatasetId) EntityTypeSimpleName(co.cask.cdap.proto.element.EntityTypeSimpleName) MetadataSearchResultRecord(co.cask.cdap.proto.metadata.MetadataSearchResultRecord) NamespaceMeta(co.cask.cdap.proto.NamespaceMeta) NamespaceId(co.cask.cdap.proto.id.NamespaceId) StreamViewId(co.cask.cdap.proto.id.StreamViewId) Test(org.junit.Test)

Aggregations

StreamId (co.cask.cdap.proto.id.StreamId)166 Test (org.junit.Test)88 DatasetId (co.cask.cdap.proto.id.DatasetId)33 ProgramId (co.cask.cdap.proto.id.ProgramId)30 NamespaceId (co.cask.cdap.proto.id.NamespaceId)27 Path (javax.ws.rs.Path)27 StreamEvent (co.cask.cdap.api.flow.flowlet.StreamEvent)24 ApplicationId (co.cask.cdap.proto.id.ApplicationId)22 IOException (java.io.IOException)20 StreamProperties (co.cask.cdap.proto.StreamProperties)17 FormatSpecification (co.cask.cdap.api.data.format.FormatSpecification)16 StreamViewId (co.cask.cdap.proto.id.StreamViewId)16 Location (org.apache.twill.filesystem.Location)15 StreamConfig (co.cask.cdap.data2.transaction.stream.StreamConfig)12 NamespaceMeta (co.cask.cdap.proto.NamespaceMeta)12 StreamAdmin (co.cask.cdap.data2.transaction.stream.StreamAdmin)11 ViewSpecification (co.cask.cdap.proto.ViewSpecification)10 MetadataSearchResultRecord (co.cask.cdap.proto.metadata.MetadataSearchResultRecord)10 Action (co.cask.cdap.proto.security.Action)10 GET (javax.ws.rs.GET)10