Search in sources :

Example 61 with ProgramId

use of co.cask.cdap.proto.id.ProgramId in project cdap by caskdata.

the class RouteConfigHttpHandler method getRouteConfig.

@GET
@Path("/routeconfig")
public void getRouteConfig(HttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @PathParam("app-id") String appId, @PathParam("service-id") String serviceId) throws Exception {
    ProgramId programId = Ids.namespace(namespaceId).app(appId).service(serviceId);
    RouteConfig routeConfig = routeStore.fetch(programId);
    responder.sendJson(HttpResponseStatus.OK, routeConfig.getRoutes());
}
Also used : RouteConfig(co.cask.cdap.route.store.RouteConfig) ProgramId(co.cask.cdap.proto.id.ProgramId) Path(javax.ws.rs.Path) GET(javax.ws.rs.GET)

Example 62 with ProgramId

use of co.cask.cdap.proto.id.ProgramId in project cdap by caskdata.

the class DeleteAndCreateSchedulesStage method toProgramSchedule.

private ProgramSchedule toProgramSchedule(ApplicationId appId, ScheduleCreationSpec scheduleCreationSpec) {
    ProgramId programId = appId.workflow(scheduleCreationSpec.getProgramName());
    Trigger trigger = scheduleCreationSpec.getTrigger();
    return new ProgramSchedule(scheduleCreationSpec.getName(), scheduleCreationSpec.getDescription(), programId, scheduleCreationSpec.getProperties(), trigger, scheduleCreationSpec.getConstraints(), scheduleCreationSpec.getTimeoutMillis());
}
Also used : Trigger(co.cask.cdap.internal.schedule.trigger.Trigger) ProgramSchedule(co.cask.cdap.internal.app.runtime.schedule.ProgramSchedule) ProgramId(co.cask.cdap.proto.id.ProgramId)

Example 63 with ProgramId

use of co.cask.cdap.proto.id.ProgramId in project cdap by caskdata.

the class ExistingEntitySystemMetadataWriter method writeSystemMetadataForPrograms.

private void writeSystemMetadataForPrograms(ApplicationId app, ProgramType programType, Collection<? extends ProgramSpecification> programSpecs) {
    for (ProgramSpecification programSpec : programSpecs) {
        ProgramId programId = app.program(programType, programSpec.getName());
        SystemMetadataWriter writer = new ProgramSystemMetadataWriter(metadataStore, programId, programSpec, true);
        writer.write();
    }
}
Also used : ProgramSpecification(co.cask.cdap.api.ProgramSpecification) DatasetSystemMetadataWriter(co.cask.cdap.data2.metadata.system.DatasetSystemMetadataWriter) ProgramSystemMetadataWriter(co.cask.cdap.data2.metadata.system.ProgramSystemMetadataWriter) ViewSystemMetadataWriter(co.cask.cdap.data2.metadata.system.ViewSystemMetadataWriter) SystemMetadataWriter(co.cask.cdap.data2.metadata.system.SystemMetadataWriter) AppSystemMetadataWriter(co.cask.cdap.data2.metadata.system.AppSystemMetadataWriter) ArtifactSystemMetadataWriter(co.cask.cdap.data2.metadata.system.ArtifactSystemMetadataWriter) StreamSystemMetadataWriter(co.cask.cdap.data2.metadata.system.StreamSystemMetadataWriter) ProgramId(co.cask.cdap.proto.id.ProgramId) ProgramSystemMetadataWriter(co.cask.cdap.data2.metadata.system.ProgramSystemMetadataWriter)

Example 64 with ProgramId

use of co.cask.cdap.proto.id.ProgramId in project cdap by caskdata.

the class AppMetadataStoreTest method testScanRunningInRangeWithBatch.

@Test
public void testScanRunningInRangeWithBatch() throws Exception {
    DatasetId storeTable = NamespaceId.DEFAULT.dataset("testScanRunningInRange");
    datasetFramework.addInstance(Table.class.getName(), storeTable, DatasetProperties.EMPTY);
    Table table = datasetFramework.getDataset(storeTable, ImmutableMap.<String, String>of(), null);
    Assert.assertNotNull(table);
    final AppMetadataStore metadataStoreDataset = new AppMetadataStore(table, cConf, new AtomicBoolean(false));
    TransactionExecutor txnl = txExecutorFactory.createExecutor(Collections.singleton((TransactionAware) metadataStoreDataset));
    // Add some run records
    TreeSet<Long> expected = new TreeSet<>();
    for (int i = 0; i < 100; ++i) {
        ApplicationId application = NamespaceId.DEFAULT.app("app" + i);
        final ProgramId program = application.program(ProgramType.values()[i % ProgramType.values().length], "program" + i);
        final RunId runId = RunIds.generate((i + 1) * 10000);
        expected.add(RunIds.getTime(runId, TimeUnit.MILLISECONDS));
        // Start the program and stop it
        final int j = i;
        txnl.execute(new TransactionExecutor.Subroutine() {

            @Override
            public void apply() throws Exception {
                metadataStoreDataset.recordProgramStart(program, runId.getId(), RunIds.getTime(runId, TimeUnit.SECONDS), null, null, null);
                metadataStoreDataset.recordProgramStop(program, runId.getId(), RunIds.getTime(runId, TimeUnit.SECONDS), ProgramRunStatus.values()[j % ProgramRunStatus.values().length], null);
            }
        });
    }
    // Run full scan
    runScan(txnl, metadataStoreDataset, expected, 0, Long.MAX_VALUE);
    // In all below assertions, TreeSet and metadataStore both have start time inclusive and end time exclusive.
    // Run the scan with time limit
    runScan(txnl, metadataStoreDataset, expected.subSet(30 * 10000L, 90 * 10000L), TimeUnit.MILLISECONDS.toSeconds(30 * 10000), TimeUnit.MILLISECONDS.toSeconds(90 * 10000));
    runScan(txnl, metadataStoreDataset, expected.subSet(90 * 10000L, 101 * 10000L), TimeUnit.MILLISECONDS.toSeconds(90 * 10000), TimeUnit.MILLISECONDS.toSeconds(101 * 10000));
    // After range
    runScan(txnl, metadataStoreDataset, expected.subSet(101 * 10000L, 200 * 10000L), TimeUnit.MILLISECONDS.toSeconds(101 * 10000), TimeUnit.MILLISECONDS.toSeconds(200 * 10000));
    // Identical start and end time
    runScan(txnl, metadataStoreDataset, expected.subSet(31 * 10000L, 31 * 10000L), TimeUnit.MILLISECONDS.toSeconds(31 * 10000), TimeUnit.MILLISECONDS.toSeconds(31 * 10000));
    // One unit difference between start and end time
    runScan(txnl, metadataStoreDataset, expected.subSet(30 * 10000L, 31 * 10000L), TimeUnit.MILLISECONDS.toSeconds(30 * 10000), TimeUnit.MILLISECONDS.toSeconds(31 * 10000));
    // Before range
    runScan(txnl, metadataStoreDataset, expected.subSet(1000L, 10000L), TimeUnit.MILLISECONDS.toSeconds(1000), TimeUnit.MILLISECONDS.toSeconds(10000));
}
Also used : Table(co.cask.cdap.api.dataset.table.Table) TransactionExecutor(org.apache.tephra.TransactionExecutor) ProgramId(co.cask.cdap.proto.id.ProgramId) TransactionFailureException(org.apache.tephra.TransactionFailureException) DatasetId(co.cask.cdap.proto.id.DatasetId) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) TransactionAware(org.apache.tephra.TransactionAware) TreeSet(java.util.TreeSet) ApplicationId(co.cask.cdap.proto.id.ApplicationId) RunId(org.apache.twill.api.RunId) ProgramRunId(co.cask.cdap.proto.id.ProgramRunId) Test(org.junit.Test)

Example 65 with ProgramId

use of co.cask.cdap.proto.id.ProgramId in project cdap by caskdata.

the class AppMetadataStoreTest method testgetRuns.

@Test
public void testgetRuns() throws Exception {
    DatasetId storeTable = NamespaceId.DEFAULT.dataset("testgetRuns");
    datasetFramework.addInstance(Table.class.getName(), storeTable, DatasetProperties.EMPTY);
    Table table = datasetFramework.getDataset(storeTable, ImmutableMap.<String, String>of(), null);
    Assert.assertNotNull(table);
    final AppMetadataStore metadataStoreDataset = new AppMetadataStore(table, cConf, new AtomicBoolean(false));
    TransactionExecutor txnl = txExecutorFactory.createExecutor(Collections.singleton((TransactionAware) metadataStoreDataset));
    // Add some run records
    final Set<String> expected = new TreeSet<>();
    final Set<String> expectedHalf = new TreeSet<>();
    final Set<ProgramRunId> programRunIdSet = new HashSet<>();
    final Set<ProgramRunId> programRunIdSetHalf = new HashSet<>();
    for (int i = 0; i < 100; ++i) {
        ApplicationId application = NamespaceId.DEFAULT.app("app");
        final ProgramId program = application.program(ProgramType.FLOW, "program");
        final RunId runId = RunIds.generate((i + 1) * 10000);
        expected.add(runId.toString());
        final int index = i;
        // Add every other runId
        if ((i % 2) == 0) {
            expectedHalf.add(runId.toString());
        }
        ProgramRunId programRunId = new ProgramRunId(program.getNamespace(), program.getApplication(), program.getType(), program.getProgram(), runId.toString());
        programRunIdSet.add(programRunId);
        //Add every other programRunId
        if ((i % 2) == 0) {
            programRunIdSetHalf.add(programRunId);
        }
        txnl.execute(new TransactionExecutor.Subroutine() {

            @Override
            public void apply() throws Exception {
                // Start the program and stop it
                metadataStoreDataset.recordProgramStart(program, runId.getId(), RunIds.getTime(runId, TimeUnit.SECONDS), null, null, null);
                metadataStoreDataset.recordProgramStop(program, runId.getId(), RunIds.getTime(runId, TimeUnit.SECONDS), ProgramRunStatus.values()[index % ProgramRunStatus.values().length], null);
            }
        });
    }
    txnl.execute(new TransactionExecutor.Subroutine() {

        @Override
        public void apply() throws Exception {
            Map<ProgramRunId, RunRecordMeta> runMap = metadataStoreDataset.getRuns(programRunIdSet);
            Set<String> actual = new TreeSet<>();
            for (Map.Entry<ProgramRunId, RunRecordMeta> entry : runMap.entrySet()) {
                actual.add(entry.getValue().getPid());
            }
            Assert.assertEquals(expected, actual);
            Map<ProgramRunId, RunRecordMeta> runMapHalf = metadataStoreDataset.getRuns(programRunIdSetHalf);
            Set<String> actualHalf = new TreeSet<>();
            for (Map.Entry<ProgramRunId, RunRecordMeta> entry : runMapHalf.entrySet()) {
                actualHalf.add(entry.getValue().getPid());
            }
            Assert.assertEquals(expectedHalf, actualHalf);
        }
    });
}
Also used : Table(co.cask.cdap.api.dataset.table.Table) TreeSet(java.util.TreeSet) HashSet(java.util.HashSet) Set(java.util.Set) TransactionExecutor(org.apache.tephra.TransactionExecutor) ProgramId(co.cask.cdap.proto.id.ProgramId) TransactionFailureException(org.apache.tephra.TransactionFailureException) DatasetId(co.cask.cdap.proto.id.DatasetId) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) TransactionAware(org.apache.tephra.TransactionAware) TreeSet(java.util.TreeSet) ProgramRunId(co.cask.cdap.proto.id.ProgramRunId) ApplicationId(co.cask.cdap.proto.id.ApplicationId) RunId(org.apache.twill.api.RunId) ProgramRunId(co.cask.cdap.proto.id.ProgramRunId) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) HashSet(java.util.HashSet) Test(org.junit.Test)

Aggregations

ProgramId (co.cask.cdap.proto.id.ProgramId)209 Test (org.junit.Test)89 ApplicationId (co.cask.cdap.proto.id.ApplicationId)69 Path (javax.ws.rs.Path)45 StreamId (co.cask.cdap.proto.id.StreamId)35 DatasetId (co.cask.cdap.proto.id.DatasetId)34 RunId (org.apache.twill.api.RunId)34 ProgramRunId (co.cask.cdap.proto.id.ProgramRunId)31 NamespaceId (co.cask.cdap.proto.id.NamespaceId)29 ProgramType (co.cask.cdap.proto.ProgramType)25 ApplicationSpecification (co.cask.cdap.api.app.ApplicationSpecification)24 IOException (java.io.IOException)24 NotFoundException (co.cask.cdap.common.NotFoundException)22 HttpResponse (org.apache.http.HttpResponse)19 ArrayList (java.util.ArrayList)18 GET (javax.ws.rs.GET)18 Id (co.cask.cdap.proto.Id)16 File (java.io.File)15 POST (javax.ws.rs.POST)15 ArtifactId (co.cask.cdap.proto.id.ArtifactId)13