use of io.cdap.cdap.internal.app.deploy.pipeline.ApplicationWithPrograms in project cdap by caskdata.
the class ApplicationLifecycleService method deployApp.
private ApplicationWithPrograms deployApp(NamespaceId namespaceId, @Nullable String appName, @Nullable String appVersion, @Nullable String configStr, ProgramTerminator programTerminator, ArtifactDetail artifactDetail, @Nullable KerberosPrincipalId ownerPrincipal, boolean updateSchedules, boolean isPreview, Map<String, String> userProps) throws Exception {
// Now to deploy an app, we need ADMIN privilege on the owner principal if it is present, and also ADMIN on the app
// But since at this point, app name is unknown to us, so the enforcement on the app is happening in the deploy
// pipeline - LocalArtifactLoaderStage
// need to enforce on the principal id if impersonation is involved
KerberosPrincipalId effectiveOwner = SecurityUtil.getEffectiveOwner(ownerAdmin, namespaceId, ownerPrincipal == null ? null : ownerPrincipal.getPrincipal());
Principal requestingUser = authenticationContext.getPrincipal();
// impersonated principal
if (effectiveOwner != null) {
accessEnforcer.enforce(effectiveOwner, requestingUser, AccessPermission.SET_OWNER);
}
ApplicationClass appClass = Iterables.getFirst(artifactDetail.getMeta().getClasses().getApps(), null);
if (appClass == null) {
throw new InvalidArtifactException(String.format("No application class found in artifact '%s' in namespace '%s'.", artifactDetail.getDescriptor().getArtifactId(), namespaceId));
}
if (!NamespaceId.SYSTEM.equals(namespaceId)) {
capabilityReader.checkAllEnabled(appClass.getRequirements().getCapabilities());
}
// deploy application with newly added artifact
AppDeploymentInfo deploymentInfo = new AppDeploymentInfo(Artifacts.toProtoArtifactId(namespaceId, artifactDetail.getDescriptor().getArtifactId()), artifactDetail.getDescriptor().getLocation(), namespaceId, appClass, appName, appVersion, configStr, ownerPrincipal, updateSchedules, isPreview ? new AppDeploymentRuntimeInfo(null, userProps, Collections.emptyMap()) : null);
Manager<AppDeploymentInfo, ApplicationWithPrograms> manager = managerFactory.create(programTerminator);
// TODO: (CDAP-3258) Manager needs MUCH better error handling.
ApplicationWithPrograms applicationWithPrograms;
try {
applicationWithPrograms = manager.deploy(deploymentInfo).get();
} catch (ExecutionException e) {
Throwables.propagateIfPossible(e.getCause(), Exception.class);
throw Throwables.propagate(e.getCause());
}
adminEventPublisher.publishAppCreation(applicationWithPrograms.getApplicationId(), applicationWithPrograms.getSpecification());
return applicationWithPrograms;
}
use of io.cdap.cdap.internal.app.deploy.pipeline.ApplicationWithPrograms in project cdap by caskdata.
the class WorkerProgramRunnerTest method testWorkerWithMisbehavedDataset.
@Test
public void testWorkerWithMisbehavedDataset() throws Throwable {
final ApplicationWithPrograms app = AppFabricTestHelper.deployApplicationWithManager(AppWithMisbehavedDataset.class, TEMP_FOLDER_SUPPLIER);
final ProgramController controller = startProgram(app, AppWithMisbehavedDataset.TableWriter.class);
Tasks.waitFor(ProgramController.State.COMPLETED, new Callable<ProgramController.State>() {
@Override
public ProgramController.State call() throws Exception {
return controller.getState();
}
}, 30, TimeUnit.SECONDS);
// validate worker was able to execute its second transaction
final TransactionExecutor executor = txExecutorFactory.createExecutor(datasetCache);
executor.execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
Table table = datasetCache.getDataset(AppWithMisbehavedDataset.TABLE);
Row result = table.get(new Get(AppWithMisbehavedDataset.ROW, AppWithMisbehavedDataset.COLUMN));
Assert.assertEquals(AppWithMisbehavedDataset.VALUE, result.getString(AppWithMisbehavedDataset.COLUMN));
}
});
}
use of io.cdap.cdap.internal.app.deploy.pipeline.ApplicationWithPrograms in project cdap by caskdata.
the class WorkerProgramRunnerTest method testWorkerDatasetWithMetrics.
@Test
public void testWorkerDatasetWithMetrics() throws Throwable {
final ApplicationWithPrograms app = AppFabricTestHelper.deployApplicationWithManager(AppWithWorker.class, TEMP_FOLDER_SUPPLIER);
ProgramController controller = startProgram(app, AppWithWorker.TableWriter.class);
// validate worker wrote the "initialize" and "run" rows
final TransactionExecutor executor = txExecutorFactory.createExecutor(datasetCache);
// wait at most 5 seconds until the "RUN" row is set (indicates the worker has started running)
Tasks.waitFor(AppWithWorker.RUN, new Callable<String>() {
@Override
public String call() throws Exception {
return executor.execute(new Callable<String>() {
@Override
public String call() throws Exception {
KeyValueTable kvTable = datasetCache.getDataset(AppWithWorker.DATASET);
return Bytes.toString(kvTable.read(AppWithWorker.RUN));
}
});
}
}, 5, TimeUnit.SECONDS);
stopProgram(controller);
txExecutorFactory.createExecutor(datasetCache.getTransactionAwares()).execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() throws Exception {
KeyValueTable kvTable = datasetCache.getDataset(AppWithWorker.DATASET);
Assert.assertEquals(AppWithWorker.RUN, Bytes.toString(kvTable.read(AppWithWorker.RUN)));
Assert.assertEquals(AppWithWorker.INITIALIZE, Bytes.toString(kvTable.read(AppWithWorker.INITIALIZE)));
Assert.assertEquals(AppWithWorker.STOP, Bytes.toString(kvTable.read(AppWithWorker.STOP)));
}
});
// validate that the table emitted metrics
Tasks.waitFor(3L, new Callable<Long>() {
@Override
public Long call() throws Exception {
Collection<MetricTimeSeries> metrics = metricStore.query(new MetricDataQuery(0, System.currentTimeMillis() / 1000L, Integer.MAX_VALUE, "system." + Constants.Metrics.Name.Dataset.OP_COUNT, AggregationFunction.SUM, ImmutableMap.of(Constants.Metrics.Tag.NAMESPACE, DefaultId.NAMESPACE.getEntityName(), Constants.Metrics.Tag.APP, AppWithWorker.NAME, Constants.Metrics.Tag.WORKER, AppWithWorker.WORKER, Constants.Metrics.Tag.DATASET, AppWithWorker.DATASET), Collections.<String>emptyList()));
if (metrics.isEmpty()) {
return 0L;
}
Assert.assertEquals(1, metrics.size());
MetricTimeSeries ts = metrics.iterator().next();
Assert.assertEquals(1, ts.getTimeValues().size());
return ts.getTimeValues().get(0).getValue();
}
}, 5L, TimeUnit.SECONDS, 50L, TimeUnit.MILLISECONDS);
}
use of io.cdap.cdap.internal.app.deploy.pipeline.ApplicationWithPrograms in project cdap by caskdata.
the class LocalApplicationManagerTest method testGoodPipeline.
/**
* Good pipeline with good tests.
*/
@Test
public void testGoodPipeline() throws Exception {
Location deployedJar = AppJarHelper.createDeploymentJar(lf, AllProgramsApp.class);
ArtifactId artifactId = new ArtifactId("app", new ArtifactVersion("1.0.0-SNAPSHOT"), ArtifactScope.USER);
ApplicationClass applicationClass = new ApplicationClass(AllProgramsApp.class.getName(), "", null);
AppDeploymentInfo info = new AppDeploymentInfo(Artifacts.toProtoArtifactId(NamespaceId.DEFAULT, artifactId), deployedJar, NamespaceId.DEFAULT, applicationClass, null, null, null);
ApplicationWithPrograms input = AppFabricTestHelper.getLocalManager().deploy(info).get();
ApplicationSpecification appSpec = Specifications.from(new AllProgramsApp());
// Validate that all programs are being captured by the deployment pipeline
Map<ProgramType, Set<String>> programByTypes = new HashMap<>();
for (ProgramDescriptor desc : input.getPrograms()) {
ProgramId programId = desc.getProgramId();
programByTypes.computeIfAbsent(programId.getType(), k -> new HashSet<>()).add(programId.getProgram());
}
for (io.cdap.cdap.api.app.ProgramType programType : io.cdap.cdap.api.app.ProgramType.values()) {
Assert.assertEquals(appSpec.getProgramsByType(programType), programByTypes.getOrDefault(ProgramType.valueOf(programType.name()), Collections.emptySet()));
}
}
use of io.cdap.cdap.internal.app.deploy.pipeline.ApplicationWithPrograms in project cdap by caskdata.
the class MapReduceProgramRunnerTest method testSuccess.
private void testSuccess(boolean frequentFlushing) throws Exception {
final ApplicationWithPrograms app = deployApp(AppWithMapReduce.class);
// we need to start a tx context and do a "get" on all datasets so that they are in datasetCache
datasetCache.newTransactionContext();
final TimeseriesTable table = datasetCache.getDataset("timeSeries");
final KeyValueTable beforeSubmitTable = datasetCache.getDataset("beforeSubmit");
final KeyValueTable onFinishTable = datasetCache.getDataset("onFinish");
final Table counters = datasetCache.getDataset("counters");
final Table countersFromContext = datasetCache.getDataset("countersFromContext");
// 1) fill test data
fillTestInputData(txExecutorFactory, table, false);
// 2) run job
final long start = System.currentTimeMillis();
runProgram(app, AppWithMapReduce.AggregateTimeseriesByTag.class, frequentFlushing, true);
final long stop = System.currentTimeMillis();
// 3) verify results
Transactions.createTransactionExecutor(txExecutorFactory, datasetCache.getTransactionAwares()).execute(new TransactionExecutor.Subroutine() {
@Override
public void apply() {
Map<String, Long> expected = Maps.newHashMap();
// note: not all records add to the sum since filter by tag="tag1" and ts={1..3} is used
expected.put("tag1", 18L);
expected.put("tag2", 3L);
expected.put("tag3", 18L);
Iterator<TimeseriesTable.Entry> agg = table.read(AggregateMetricsByTag.BY_TAGS, start, stop);
int count = 0;
while (agg.hasNext()) {
TimeseriesTable.Entry entry = agg.next();
String tag = Bytes.toString(entry.getTags()[0]);
Assert.assertEquals((long) expected.get(tag), Bytes.toLong(entry.getValue()));
count++;
}
Assert.assertEquals(expected.size(), count);
Assert.assertArrayEquals(Bytes.toBytes("beforeSubmit:done"), beforeSubmitTable.read(Bytes.toBytes("beforeSubmit")));
Assert.assertArrayEquals(Bytes.toBytes("onFinish:done"), onFinishTable.read(Bytes.toBytes("onFinish")));
Assert.assertTrue(counters.get(new Get("mapper")).getLong("records", 0) > 0);
Assert.assertTrue(counters.get(new Get("reducer")).getLong("records", 0) > 0);
Assert.assertTrue(countersFromContext.get(new Get("mapper")).getLong("records", 0) > 0);
Assert.assertTrue(countersFromContext.get(new Get("reducer")).getLong("records", 0) > 0);
}
});
datasetCache.dismissTransactionContext();
// todo: verify metrics. Will be possible after refactor for CDAP-765
}
Aggregations