Search in sources :

Example 26 with KeyValueTable

use of co.cask.cdap.api.dataset.lib.KeyValueTable in project cdap by caskdata.

the class TestFrameworkTestRun method testTransactionHandlerService.

@Test
public void testTransactionHandlerService() throws Exception {
    ApplicationManager applicationManager = deployApplication(testSpace, AppWithServices.class);
    LOG.info("Deployed.");
    ServiceManager serviceManager = applicationManager.getServiceManager(AppWithServices.TRANSACTIONS_SERVICE_NAME).start();
    serviceManager.waitForStatus(true);
    LOG.info("Service Started");
    final URL baseUrl = serviceManager.getServiceURL(15, TimeUnit.SECONDS);
    Assert.assertNotNull(baseUrl);
    // Make a request to write in a separate thread and wait for it to return.
    ExecutorService executorService = Executors.newSingleThreadExecutor();
    Future<Integer> requestFuture = executorService.submit(new Callable<Integer>() {

        @Override
        public Integer call() throws Exception {
            try {
                URL url = new URL(String.format("%s/write/%s/%s/%d", baseUrl, AppWithServices.DATASET_TEST_KEY, AppWithServices.DATASET_TEST_VALUE, 10000));
                HttpRequest request = HttpRequest.get(url).build();
                HttpResponse response = HttpRequests.execute(request);
                return response.getResponseCode();
            } catch (Exception e) {
                LOG.error("Request thread got exception.", e);
                throw Throwables.propagate(e);
            }
        }
    });
    // The dataset should not be written by the time this request is made, since the transaction to write
    // has not been committed yet.
    URL url = new URL(String.format("%s/read/%s", baseUrl, AppWithServices.DATASET_TEST_KEY));
    HttpRequest request = HttpRequest.get(url).build();
    HttpResponse response = HttpRequests.execute(request);
    Assert.assertEquals(204, response.getResponseCode());
    // Wait for the transaction to commit.
    Integer writeStatusCode = requestFuture.get();
    Assert.assertEquals(200, writeStatusCode.intValue());
    // Make the same request again. By now the transaction should've completed.
    request = HttpRequest.get(url).build();
    response = HttpRequests.execute(request);
    Assert.assertEquals(200, response.getResponseCode());
    Assert.assertEquals(AppWithServices.DATASET_TEST_VALUE, new Gson().fromJson(response.getResponseBodyAsString(), String.class));
    executorService.shutdown();
    serviceManager.stop();
    serviceManager.waitForStatus(false);
    DataSetManager<KeyValueTable> dsManager = getDataset(testSpace.dataset(AppWithServices.TRANSACTIONS_DATASET_NAME));
    String value = Bytes.toString(dsManager.get().read(AppWithServices.DESTROY_KEY));
    Assert.assertEquals(AppWithServices.VALUE, value);
}
Also used : HttpRequest(co.cask.common.http.HttpRequest) ApplicationManager(co.cask.cdap.test.ApplicationManager) HttpResponse(co.cask.common.http.HttpResponse) Gson(com.google.gson.Gson) URL(java.net.URL) ConflictException(co.cask.cdap.common.ConflictException) IOException(java.io.IOException) TimeoutException(java.util.concurrent.TimeoutException) ServiceManager(co.cask.cdap.test.ServiceManager) KeyValueTable(co.cask.cdap.api.dataset.lib.KeyValueTable) ExecutorService(java.util.concurrent.ExecutorService) Test(org.junit.Test)

Example 27 with KeyValueTable

use of co.cask.cdap.api.dataset.lib.KeyValueTable in project cdap by caskdata.

the class TestFrameworkTestRun method verifyMapperJobOutput.

private void verifyMapperJobOutput(Class<?> appClass, DataSetManager<KeyValueTable> outTableManager) throws Exception {
    KeyValueTable outputTable = outTableManager.get();
    Assert.assertEquals("world", Bytes.toString(outputTable.read("hello")));
    // Verify dataset metrics
    String readCountName = "system." + Constants.Metrics.Name.Dataset.READ_COUNT;
    String writeCountName = "system." + Constants.Metrics.Name.Dataset.WRITE_COUNT;
    Collection<MetricTimeSeries> metrics = getMetricsManager().query(new MetricDataQuery(0, System.currentTimeMillis() / 1000, Integer.MAX_VALUE, ImmutableMap.of(readCountName, AggregationFunction.SUM, writeCountName, AggregationFunction.SUM), ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getNamespace(), Constants.Metrics.Tag.APP, appClass.getSimpleName(), Constants.Metrics.Tag.MAPREDUCE, DatasetWithMRApp.MAPREDUCE_PROGRAM), ImmutableList.<String>of()));
    // Transform the collection of metrics into a map from metrics name to aggregated sum
    Map<String, Long> aggs = Maps.transformEntries(Maps.uniqueIndex(metrics, new Function<MetricTimeSeries, String>() {

        @Override
        public String apply(MetricTimeSeries input) {
            return input.getMetricName();
        }
    }), new Maps.EntryTransformer<String, MetricTimeSeries, Long>() {

        @Override
        public Long transformEntry(String key, MetricTimeSeries value) {
            Preconditions.checkArgument(value.getTimeValues().size() == 1, "Expected one value for aggregated sum for metrics %s", key);
            return value.getTimeValues().get(0).getValue();
        }
    });
    Assert.assertEquals(Long.valueOf(1), aggs.get(readCountName));
    Assert.assertEquals(Long.valueOf(1), aggs.get(writeCountName));
}
Also used : AggregationFunction(co.cask.cdap.api.dataset.lib.cube.AggregationFunction) Function(com.google.common.base.Function) Maps(com.google.common.collect.Maps) KeyValueTable(co.cask.cdap.api.dataset.lib.KeyValueTable) MetricTimeSeries(co.cask.cdap.api.metrics.MetricTimeSeries) MetricDataQuery(co.cask.cdap.api.metrics.MetricDataQuery)

Example 28 with KeyValueTable

use of co.cask.cdap.api.dataset.lib.KeyValueTable in project cdap by caskdata.

the class TestFrameworkTestRun method testAppWithPlugin.

@Test
public void testAppWithPlugin() throws Exception {
    ArtifactId artifactId = NamespaceId.DEFAULT.artifact("app-with-plugin", "1.0.0-SNAPSHOT");
    addAppArtifact(artifactId, AppWithPlugin.class);
    ArtifactId pluginArtifactId = NamespaceId.DEFAULT.artifact("test-plugin", "1.0.0-SNAPSHOT");
    addPluginArtifact(pluginArtifactId, artifactId, ToStringPlugin.class);
    ApplicationId appId = NamespaceId.DEFAULT.app("AppWithPlugin");
    AppRequest createRequest = new AppRequest(new ArtifactSummary(artifactId.getArtifact(), artifactId.getVersion()));
    ApplicationManager appManager = deployApplication(appId, createRequest);
    final WorkerManager workerManager = appManager.getWorkerManager(AppWithPlugin.WORKER);
    workerManager.start();
    workerManager.waitForStatus(false, 5, 1);
    Tasks.waitFor(false, new Callable<Boolean>() {

        @Override
        public Boolean call() throws Exception {
            return workerManager.getHistory(ProgramRunStatus.COMPLETED).isEmpty();
        }
    }, 5, TimeUnit.SECONDS, 10, TimeUnit.MILLISECONDS);
    final ServiceManager serviceManager = appManager.getServiceManager(AppWithPlugin.SERVICE);
    serviceManager.start();
    serviceManager.waitForStatus(true, 1, 10);
    URL serviceURL = serviceManager.getServiceURL(5, TimeUnit.SECONDS);
    callServiceGet(serviceURL, "dummy");
    serviceManager.stop();
    serviceManager.waitForStatus(false, 1, 10);
    Tasks.waitFor(false, new Callable<Boolean>() {

        @Override
        public Boolean call() throws Exception {
            return serviceManager.getHistory(ProgramRunStatus.KILLED).isEmpty();
        }
    }, 5, TimeUnit.SECONDS, 10, TimeUnit.MILLISECONDS);
    WorkflowManager workflowManager = appManager.getWorkflowManager(AppWithPlugin.WORKFLOW);
    workflowManager.start();
    workflowManager.waitForRun(ProgramRunStatus.COMPLETED, 5, TimeUnit.MINUTES);
    List<RunRecord> runRecords = workflowManager.getHistory();
    Assert.assertNotEquals(ProgramRunStatus.FAILED, runRecords.get(0).getStatus());
    DataSetManager<KeyValueTable> workflowTableManager = getDataset(AppWithPlugin.WORKFLOW_TABLE);
    String value = Bytes.toString(workflowTableManager.get().read("val"));
    Assert.assertEquals(AppWithPlugin.TEST, value);
    Map<String, String> workflowTags = ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, NamespaceId.DEFAULT.getNamespace(), Constants.Metrics.Tag.APP, "AppWithPlugin", Constants.Metrics.Tag.WORKFLOW, AppWithPlugin.WORKFLOW, Constants.Metrics.Tag.RUN_ID, runRecords.get(0).getPid());
    getMetricsManager().waitForTotalMetricCount(workflowTags, String.format("user.destroy.%s", AppWithPlugin.WORKFLOW), 1, 60, TimeUnit.SECONDS);
    // Testing Spark Plugins. First send some data to stream for the Spark program to process
    StreamManager streamManager = getStreamManager(AppWithPlugin.SPARK_STREAM);
    for (int i = 0; i < 5; i++) {
        streamManager.send("Message " + i);
    }
    SparkManager sparkManager = appManager.getSparkManager(AppWithPlugin.SPARK).start();
    sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 2, TimeUnit.MINUTES);
    // Verify the Spark result.
    DataSetManager<Table> dataSetManager = getDataset(AppWithPlugin.SPARK_TABLE);
    Table table = dataSetManager.get();
    try (Scanner scanner = table.scan(null, null)) {
        for (int i = 0; i < 5; i++) {
            Row row = scanner.next();
            Assert.assertNotNull(row);
            String expected = "Message " + i + " " + AppWithPlugin.TEST;
            Assert.assertEquals(expected, Bytes.toString(row.getRow()));
            Assert.assertEquals(expected, Bytes.toString(row.get(expected)));
        }
        // There shouldn't be any more rows in the table.
        Assert.assertNull(scanner.next());
    }
}
Also used : Scanner(co.cask.cdap.api.dataset.table.Scanner) ApplicationManager(co.cask.cdap.test.ApplicationManager) ArtifactId(co.cask.cdap.proto.id.ArtifactId) WorkflowManager(co.cask.cdap.test.WorkflowManager) URL(java.net.URL) ServiceManager(co.cask.cdap.test.ServiceManager) SparkManager(co.cask.cdap.test.SparkManager) KeyValueTable(co.cask.cdap.api.dataset.lib.KeyValueTable) Table(co.cask.cdap.api.dataset.table.Table) ConflictException(co.cask.cdap.common.ConflictException) IOException(java.io.IOException) TimeoutException(java.util.concurrent.TimeoutException) AppRequest(co.cask.cdap.proto.artifact.AppRequest) WorkerManager(co.cask.cdap.test.WorkerManager) RunRecord(co.cask.cdap.proto.RunRecord) ArtifactSummary(co.cask.cdap.api.artifact.ArtifactSummary) KeyValueTable(co.cask.cdap.api.dataset.lib.KeyValueTable) StreamManager(co.cask.cdap.test.StreamManager) Row(co.cask.cdap.api.dataset.table.Row) ApplicationId(co.cask.cdap.proto.id.ApplicationId) Test(org.junit.Test)

Example 29 with KeyValueTable

use of co.cask.cdap.api.dataset.lib.KeyValueTable in project cdap by caskdata.

the class TestFrameworkTestRun method testClusterName.

@Test
public void testClusterName() throws Exception {
    String clusterName = getConfiguration().get(Constants.CLUSTER_NAME);
    ApplicationManager appManager = deployApplication(ClusterNameTestApp.class);
    final DataSetManager<KeyValueTable> datasetManager = getDataset(ClusterNameTestApp.CLUSTER_NAME_TABLE);
    final KeyValueTable clusterNameTable = datasetManager.get();
    // A callable for reading the cluster name from the ClusterNameTable.
    // It is used for Tasks.waitFor call down below.
    final AtomicReference<String> key = new AtomicReference<>();
    Callable<String> readClusterName = new Callable<String>() {

        @Nullable
        @Override
        public String call() throws Exception {
            datasetManager.flush();
            byte[] bytes = clusterNameTable.read(key.get());
            return bytes == null ? null : new String(bytes, StandardCharsets.UTF_8);
        }
    };
    // Service
    ServiceManager serviceManager = appManager.getServiceManager(ClusterNameTestApp.ClusterNameServiceHandler.class.getSimpleName()).start();
    Assert.assertEquals(clusterName, callServiceGet(serviceManager.getServiceURL(10, TimeUnit.SECONDS), "clusterName"));
    serviceManager.stop();
    // Worker
    WorkerManager workerManager = appManager.getWorkerManager(ClusterNameTestApp.ClusterNameWorker.class.getSimpleName()).start();
    key.set("worker.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    // The worker will stop by itself. No need to call stop
    workerManager.waitForRun(ProgramRunStatus.COMPLETED, 10, TimeUnit.SECONDS);
    // Flow
    FlowManager flowManager = appManager.getFlowManager(ClusterNameTestApp.ClusterNameFlow.class.getSimpleName()).start();
    key.set("flow.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    flowManager.stop();
    // MapReduce
    // Setup the input file used by MR
    Location location = this.<FileSet>getDataset(ClusterNameTestApp.INPUT_FILE_SET).get().getLocation("input");
    try (PrintStream printer = new PrintStream(location.getOutputStream(), true, "UTF-8")) {
        for (int i = 0; i < 10; i++) {
            printer.println("Hello World " + i);
        }
    }
    // Setup input and output dataset arguments
    Map<String, String> inputArgs = new HashMap<>();
    FileSetArguments.setInputPath(inputArgs, "input");
    Map<String, String> outputArgs = new HashMap<>();
    FileSetArguments.setOutputPath(outputArgs, "output");
    Map<String, String> args = new HashMap<>();
    args.putAll(RuntimeArguments.addScope(Scope.DATASET, ClusterNameTestApp.INPUT_FILE_SET, inputArgs));
    args.putAll(RuntimeArguments.addScope(Scope.DATASET, ClusterNameTestApp.OUTPUT_FILE_SET, outputArgs));
    MapReduceManager mrManager = appManager.getMapReduceManager(ClusterNameTestApp.ClusterNameMapReduce.class.getSimpleName()).start(args);
    key.set("mr.client.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    key.set("mapper.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    key.set("reducer.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    mrManager.waitForRun(ProgramRunStatus.COMPLETED, 60, TimeUnit.SECONDS);
    // Spark
    SparkManager sparkManager = appManager.getSparkManager(ClusterNameTestApp.ClusterNameSpark.class.getSimpleName()).start();
    key.set("spark.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 60, TimeUnit.SECONDS);
    // Workflow
    // Cleanup the output path for the MR job in the workflow first
    this.<FileSet>getDataset(ClusterNameTestApp.OUTPUT_FILE_SET).get().getLocation("output").delete(true);
    args = RuntimeArguments.addScope(Scope.MAPREDUCE, ClusterNameTestApp.ClusterNameMapReduce.class.getSimpleName(), args);
    WorkflowManager workflowManager = appManager.getWorkflowManager(ClusterNameTestApp.ClusterNameWorkflow.class.getSimpleName()).start(args);
    String prefix = ClusterNameTestApp.ClusterNameWorkflow.class.getSimpleName() + ".";
    key.set(prefix + "mr.client.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    key.set(prefix + "mapper.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    key.set(prefix + "reducer.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    key.set(prefix + "spark.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    key.set(prefix + "action.cluster.name");
    Tasks.waitFor(clusterName, readClusterName, 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
    workflowManager.waitForRun(ProgramRunStatus.COMPLETED, 120, TimeUnit.SECONDS);
}
Also used : FlowManager(co.cask.cdap.test.FlowManager) PrintStream(java.io.PrintStream) ApplicationManager(co.cask.cdap.test.ApplicationManager) SparkManager(co.cask.cdap.test.SparkManager) FileSet(co.cask.cdap.api.dataset.lib.FileSet) MapReduceManager(co.cask.cdap.test.MapReduceManager) HashMap(java.util.HashMap) WorkflowManager(co.cask.cdap.test.WorkflowManager) AtomicReference(java.util.concurrent.atomic.AtomicReference) Callable(java.util.concurrent.Callable) WorkerManager(co.cask.cdap.test.WorkerManager) KeyValueTable(co.cask.cdap.api.dataset.lib.KeyValueTable) ServiceManager(co.cask.cdap.test.ServiceManager) Location(org.apache.twill.filesystem.Location) Test(org.junit.Test)

Example 30 with KeyValueTable

use of co.cask.cdap.api.dataset.lib.KeyValueTable in project cdap by caskdata.

the class TestFrameworkTestRun method testByteCodeClassLoader.

@Category(XSlowTests.class)
@Test
public void testByteCodeClassLoader() throws Exception {
    // This test verify bytecode generated classes ClassLoading
    ApplicationManager appManager = deployApplication(testSpace, ClassLoaderTestApp.class);
    FlowManager flowManager = appManager.getFlowManager("BasicFlow").start();
    // Wait for at least 10 records being generated
    RuntimeMetrics flowMetrics = flowManager.getFlowletMetrics("Sink");
    flowMetrics.waitForProcessed(10, 5000, TimeUnit.MILLISECONDS);
    flowManager.stop();
    ServiceManager serviceManager = appManager.getServiceManager("RecordQuery").start();
    URL serviceURL = serviceManager.getServiceURL(15, TimeUnit.SECONDS);
    Assert.assertNotNull(serviceURL);
    // Query record
    URL url = new URL(serviceURL, "query?type=public");
    HttpRequest request = HttpRequest.get(url).build();
    HttpResponse response = HttpRequests.execute(request);
    Assert.assertEquals(200, response.getResponseCode());
    long count = Long.parseLong(response.getResponseBodyAsString());
    serviceManager.stop();
    // Verify the record count with dataset
    DataSetManager<KeyValueTable> recordsManager = getDataset(testSpace.dataset("records"));
    KeyValueTable records = recordsManager.get();
    Assert.assertTrue(count == Bytes.toLong(records.read("PUBLIC")));
}
Also used : HttpRequest(co.cask.common.http.HttpRequest) FlowManager(co.cask.cdap.test.FlowManager) ApplicationManager(co.cask.cdap.test.ApplicationManager) RuntimeMetrics(co.cask.cdap.api.metrics.RuntimeMetrics) ServiceManager(co.cask.cdap.test.ServiceManager) KeyValueTable(co.cask.cdap.api.dataset.lib.KeyValueTable) HttpResponse(co.cask.common.http.HttpResponse) URL(java.net.URL) Category(org.junit.experimental.categories.Category) Test(org.junit.Test)

Aggregations

KeyValueTable (co.cask.cdap.api.dataset.lib.KeyValueTable)84 Test (org.junit.Test)49 ApplicationManager (co.cask.cdap.test.ApplicationManager)45 SparkManager (co.cask.cdap.test.SparkManager)25 StreamManager (co.cask.cdap.test.StreamManager)16 IOException (java.io.IOException)16 TransactionExecutor (org.apache.tephra.TransactionExecutor)12 ApplicationWithPrograms (co.cask.cdap.internal.app.deploy.pipeline.ApplicationWithPrograms)11 HashMap (java.util.HashMap)11 ArrayList (java.util.ArrayList)10 FileSet (co.cask.cdap.api.dataset.lib.FileSet)8 KeyValue (co.cask.cdap.api.dataset.lib.KeyValue)8 Table (co.cask.cdap.api.dataset.table.Table)8 NamespaceMeta (co.cask.cdap.proto.NamespaceMeta)8 ObjectStore (co.cask.cdap.api.dataset.lib.ObjectStore)7 MapReduceManager (co.cask.cdap.test.MapReduceManager)7 ServiceManager (co.cask.cdap.test.ServiceManager)7 WorkflowManager (co.cask.cdap.test.WorkflowManager)7 Set (java.util.Set)7 Category (org.junit.experimental.categories.Category)7