use of io.cdap.cdap.internal.app.runtime.BasicArguments in project cdap by cdapio.
the class MapReduceWithMultipleInputsTest method testSimpleJoin.
@Test
public void testSimpleJoin() throws Exception {
ApplicationWithPrograms app = deployApp(AppWithMapReduceUsingMultipleInputs.class);
FileSet fileSet = datasetCache.getDataset(AppWithMapReduceUsingMultipleInputs.PURCHASES);
Location inputFile = fileSet.getBaseLocation().append("inputFile");
inputFile.createNew();
try (PrintWriter writer = new PrintWriter(inputFile.getOutputStream())) {
// the PURCHASES dataset consists of purchase records in the format: <customerId> <spend>
writer.println("1 20");
writer.println("1 25");
writer.println("1 30");
writer.println("2 5");
}
// write some of the purchases to the second input file set
fileSet = datasetCache.getDataset(AppWithMapReduceUsingMultipleInputs.PURCHASES2);
inputFile = fileSet.getBaseLocation().append("inputFile");
inputFile.createNew();
try (PrintWriter writer = new PrintWriter(inputFile.getOutputStream())) {
// the PURCHASES dataset consists of purchase records in the format: <customerId> <spend>
writer.println("2 13");
writer.println("3 60");
}
FileSet fileSet2 = datasetCache.getDataset(AppWithMapReduceUsingMultipleInputs.CUSTOMERS);
inputFile = fileSet2.getBaseLocation().append("inputFile");
inputFile.createNew();
// the CUSTOMERS dataset consists of records in the format: <customerId> <customerName>
try (PrintWriter writer = new PrintWriter(inputFile.getOutputStream())) {
writer.println("1 Bob");
writer.println("2 Samuel");
writer.println("3 Joe");
}
// Using multiple inputs, this MapReduce will join on the two above datasets to get aggregate results.
// The records are expected to be in the form: <customerId> <customerName> <totalSpend>
runProgram(app, AppWithMapReduceUsingMultipleInputs.ComputeSum.class, new BasicArguments());
FileSet outputFileSet = datasetCache.getDataset(AppWithMapReduceUsingMultipleInputs.OUTPUT_DATASET);
// will only be 1 part file, due to the small amount of data
Location outputLocation = outputFileSet.getBaseLocation().append("output").append("part-r-00000");
List<String> lines = CharStreams.readLines(CharStreams.newReaderSupplier(Locations.newInputSupplier(outputLocation), Charsets.UTF_8));
Assert.assertEquals(ImmutableList.of("1 Bob 75", "2 Samuel 18", "3 Joe 60"), lines);
// assert that the mapper was initialized and destroyed (this doesn't happen when using hadoop's MultipleOutputs).
Assert.assertEquals("true", System.getProperty("mapper.initialized"));
Assert.assertEquals("true", System.getProperty("mapper.destroyed"));
}
use of io.cdap.cdap.internal.app.runtime.BasicArguments in project cdap by cdapio.
the class MapReduceConfigTest method testConfigIsolation.
@Test
public void testConfigIsolation() throws Exception {
ApplicationWithPrograms app = deployApp(AppWithSingleInputOutput.class);
Assert.assertTrue(runProgram(app, AppWithSingleInputOutput.SimpleMapReduce.class, new BasicArguments()));
}
use of io.cdap.cdap.internal.app.runtime.BasicArguments in project cdap by cdapio.
the class MapReduceWithMultipleOutputsTest method testAddingMultipleOutputsWithSameAlias.
@Test
public void testAddingMultipleOutputsWithSameAlias() throws Exception {
final ApplicationWithPrograms app = deployApp(AppWithMapReduceUsingMultipleOutputs.class);
// will fail because it configured two outputs with the same alias
Assert.assertFalse(runProgram(app, AppWithMapReduceUsingMultipleOutputs.InvalidMapReduce.class, new BasicArguments()));
}
use of io.cdap.cdap.internal.app.runtime.BasicArguments in project cdap by cdapio.
the class WorkflowDriver method executeCustomAction.
private void executeCustomAction(final WorkflowActionNode node, InstantiatorFactory instantiator, final ClassLoader classLoader, WorkflowToken token) throws Exception {
CustomActionExecutor customActionExecutor;
// Node has CustomActionSpecification, so it must represent the CustomAction added in 3.5.0
// Create instance of the CustomActionExecutor using CustomActionContext
WorkflowProgramInfo info = new WorkflowProgramInfo(workflowSpec.getName(), node.getNodeId(), workflowRunId.getRun(), node.getNodeId(), (BasicWorkflowToken) token, workflowContext.fieldLineageConsolidationEnabled());
ProgramOptions actionOptions = new SimpleProgramOptions(programOptions.getProgramId(), programOptions.getArguments(), new BasicArguments(RuntimeArguments.extractScope(ACTION_SCOPE, node.getNodeId(), programOptions.getUserArguments().asMap())));
BasicCustomActionContext context = new BasicCustomActionContext(program, actionOptions, cConf, node.getCustomActionSpecification(), info, metricsCollectionService, datasetFramework, txClient, discoveryServiceClient, pluginInstantiator, secureStore, secureStoreManager, messagingService, metadataReader, metadataPublisher, namespaceQueryAdmin, fieldLineageWriter, remoteClientFactory);
customActionExecutor = new CustomActionExecutor(context, instantiator, classLoader);
status.put(node.getNodeId(), node);
workflowStateWriter.addWorkflowNodeState(workflowRunId, new WorkflowNodeStateDetail(node.getNodeId(), NodeStatus.RUNNING));
Throwable failureCause = null;
try {
customActionExecutor.execute();
} catch (Throwable t) {
failureCause = t;
throw t;
} finally {
status.remove(node.getNodeId());
workflowStateWriter.setWorkflowToken(workflowRunId, token);
NodeStatus status = failureCause == null ? NodeStatus.COMPLETED : NodeStatus.FAILED;
if (failureCause == null) {
writeFieldLineage(context);
}
nodeStates.put(node.getNodeId(), new WorkflowNodeState(node.getNodeId(), status, null, failureCause));
BasicThrowable defaultThrowable = failureCause == null ? null : new BasicThrowable(failureCause);
workflowStateWriter.addWorkflowNodeState(workflowRunId, new WorkflowNodeStateDetail(node.getNodeId(), status, null, defaultThrowable));
}
}
use of io.cdap.cdap.internal.app.runtime.BasicArguments in project cdap by cdapio.
the class AbstractProgramRuntimeServiceTest method testScopingRuntimeArguments.
@Test
public void testScopingRuntimeArguments() throws Exception {
Map<ProgramId, Arguments> argumentsMap = new ConcurrentHashMap<>();
ProgramRunnerFactory runnerFactory = createProgramRunnerFactory(argumentsMap);
final Program program = createDummyProgram();
final ProgramRuntimeService runtimeService = new AbstractProgramRuntimeService(CConfiguration.create(), runnerFactory, null, new NoOpProgramStateWriter(), null) {
@Override
public ProgramLiveInfo getLiveInfo(ProgramId programId) {
return new ProgramLiveInfo(programId, "runtime") {
};
}
@Override
protected Program createProgram(CConfiguration cConf, ProgramRunner programRunner, ProgramDescriptor programDescriptor, ArtifactDetail artifactDetail, File tempDir) throws IOException {
return program;
}
@Override
protected ArtifactDetail getArtifactDetail(ArtifactId artifactId) throws IOException, ArtifactNotFoundException {
io.cdap.cdap.api.artifact.ArtifactId id = new io.cdap.cdap.api.artifact.ArtifactId("dummy", new ArtifactVersion("1.0"), ArtifactScope.USER);
return new ArtifactDetail(new ArtifactDescriptor(NamespaceId.DEFAULT.getEntityName(), id, Locations.toLocation(TEMP_FOLDER.newFile())), new ArtifactMeta(ArtifactClasses.builder().build()));
}
};
runtimeService.startAndWait();
try {
try {
ProgramDescriptor descriptor = new ProgramDescriptor(program.getId(), null, NamespaceId.DEFAULT.artifact("test", "1.0"));
// Set of scopes to test
String programScope = program.getType().getScope();
String clusterName = "c1";
List<String> scopes = Arrays.asList("cluster.*.", "cluster." + clusterName + ".", "cluster." + clusterName + ".app.*.", "app.*.", "app." + program.getApplicationId() + ".", "app." + program.getApplicationId() + "." + programScope + ".*.", "app." + program.getApplicationId() + "." + programScope + "." + program.getName() + ".", programScope + ".*.", programScope + "." + program.getName() + ".", "");
for (String scope : scopes) {
ProgramOptions programOptions = new SimpleProgramOptions(program.getId(), new BasicArguments(Collections.singletonMap(Constants.CLUSTER_NAME, clusterName)), new BasicArguments(Collections.singletonMap(scope + "size", Integer.toString(scope.length()))));
final ProgramController controller = runtimeService.run(descriptor, programOptions, RunIds.generate()).getController();
Tasks.waitFor(ProgramController.State.COMPLETED, controller::getState, 5, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Should get an argument
Arguments args = argumentsMap.get(program.getId());
Assert.assertNotNull(args);
Assert.assertEquals(scope.length(), Integer.parseInt(args.getOption("size")));
}
} finally {
runtimeService.stopAndWait();
}
} finally {
runtimeService.stopAndWait();
}
}
Aggregations