use of io.cdap.cdap.internal.app.runtime.BasicArguments in project cdap by cdapio.
the class OpenCloseDataSetTest method testDataSetsAreClosed.
@Test(timeout = 120000)
public void testDataSetsAreClosed() throws Exception {
final String tableName = "foo";
TrackingTable.resetTracker();
ApplicationWithPrograms app = AppFabricTestHelper.deployApplicationWithManager(DummyAppWithTrackingTable.class, TEMP_FOLDER_SUPPLIER);
List<ProgramController> controllers = Lists.newArrayList();
// start the programs
for (ProgramDescriptor programDescriptor : app.getPrograms()) {
if (programDescriptor.getProgramId().getType().equals(ProgramType.MAPREDUCE)) {
continue;
}
// Start service with 1 thread
Map<String, String> args = Collections.singletonMap(SystemArguments.SERVICE_THREADS, "1");
controllers.add(AppFabricTestHelper.submit(app, programDescriptor.getSpecification().getClassName(), new BasicArguments(args), TEMP_FOLDER_SUPPLIER));
}
DiscoveryServiceClient discoveryServiceClient = AppFabricTestHelper.getInjector().getInstance(DiscoveryServiceClient.class);
Discoverable discoverable = new RandomEndpointStrategy(() -> discoveryServiceClient.discover(String.format("%s.%s.%s.%s", ProgramType.SERVICE.getDiscoverableTypeName(), DefaultId.NAMESPACE.getEntityName(), "dummy", "DummyService"))).pick(5, TimeUnit.SECONDS);
Assert.assertNotNull(discoverable);
// write some data to the tracking table through the service
for (int i = 0; i < 4; i++) {
String msg = "x" + i;
URL url = URIScheme.createURI(discoverable, "v3/namespaces/default/apps/dummy/services/DummyService/methods/%s", msg).toURL();
HttpRequests.execute(HttpRequest.put(url).build(), new DefaultHttpRequestConfig(false));
}
// get the number of writes to the foo table
Assert.assertEquals(4, TrackingTable.getTracker(tableName, "write"));
// only 2 "open" calls should be tracked:
// 1. the service has started with one instance (service is loaded lazily on 1st request)
// 2. DatasetSystemMetadataWriter also instantiates the dataset because it needs to add some system tags
// for the dataset
Assert.assertEquals(2, TrackingTable.getTracker(tableName, "open"));
// now query data from the service
URL url = URIScheme.createURI(discoverable, "v3/namespaces/default/apps/dummy/services/DummyService/methods/x1").toURL();
HttpResponse response = HttpRequests.execute(HttpRequest.get(url).build(), new DefaultHttpRequestConfig(false));
String responseContent = new Gson().fromJson(response.getResponseBodyAsString(), String.class);
Assert.assertEquals("x1", responseContent);
// now the dataset must have a read and another open operation
Assert.assertEquals(1, TrackingTable.getTracker(tableName, "read"));
// since the same service instance is used, there shouldn't be any new open
Assert.assertEquals(2, TrackingTable.getTracker(tableName, "open"));
// The dataset that was instantiated by the DatasetSystemMetadataWriter should have been closed
Assert.assertEquals(1, TrackingTable.getTracker(tableName, "close"));
// stop all programs, they should both close the data set foo
for (ProgramController controller : controllers) {
controller.stop().get();
}
int timesOpened = TrackingTable.getTracker(tableName, "open");
Assert.assertTrue(timesOpened >= 2);
Assert.assertEquals(timesOpened, TrackingTable.getTracker(tableName, "close"));
// now start the m/r job
ProgramController controller = null;
for (ProgramDescriptor programDescriptor : app.getPrograms()) {
if (programDescriptor.getProgramId().getType().equals(ProgramType.MAPREDUCE)) {
controller = AppFabricTestHelper.submit(app, programDescriptor.getSpecification().getClassName(), new BasicArguments(), TEMP_FOLDER_SUPPLIER);
}
}
Assert.assertNotNull(controller);
while (!controller.getState().equals(ProgramController.State.COMPLETED)) {
TimeUnit.MILLISECONDS.sleep(100);
}
// M/r job is done, one mapper and the m/r client should have opened and closed the data set foo
// we don't know the exact number of times opened, but it is at least once, and it must be closed the same number
// of times.
Assert.assertTrue(timesOpened < TrackingTable.getTracker(tableName, "open"));
Assert.assertEquals(TrackingTable.getTracker(tableName, "open"), TrackingTable.getTracker(tableName, "close"));
Assert.assertTrue(0 < TrackingTable.getTracker("bar", "open"));
Assert.assertEquals(TrackingTable.getTracker("bar", "open"), TrackingTable.getTracker("bar", "close"));
}
use of io.cdap.cdap.internal.app.runtime.BasicArguments in project cdap by cdapio.
the class ProvisioningServiceTest method createTaskInfo.
private TaskFields createTaskInfo(ProvisionerInfo provisionerInfo) {
ProgramRunId programRunId = NamespaceId.DEFAULT.app("app").workflow("wf").run(RunIds.generate());
Map<String, String> systemArgs = new HashMap<>();
Map<String, String> userArgs = new HashMap<>();
Profile profile = new Profile(ProfileId.NATIVE.getProfile(), "label", "desc", provisionerInfo);
SystemArguments.addProfileArgs(systemArgs, profile);
systemArgs.put(Constants.APP_CDAP_VERSION, APP_CDAP_VERSION);
ProgramOptions programOptions = new SimpleProgramOptions(programRunId.getParent(), new BasicArguments(systemArgs), new BasicArguments(userArgs));
ArtifactId artifactId = NamespaceId.DEFAULT.artifact("testArtifact", "1.0").toApiArtifactId();
ApplicationSpecification appSpec = new DefaultApplicationSpecification("name", "1.0.0", APP_CDAP_VERSION, "desc", null, artifactId, Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap());
ProgramDescriptor programDescriptor = new ProgramDescriptor(programRunId.getParent(), appSpec);
return new TaskFields(programDescriptor, programOptions, programRunId);
}
use of io.cdap.cdap.internal.app.runtime.BasicArguments in project cdap by cdapio.
the class WorkflowTest method testOneActionWorkflow.
@Test(timeout = 120 * 1000L)
public void testOneActionWorkflow() throws Exception {
final ApplicationWithPrograms app = AppFabricTestHelper.deployApplicationWithManager(OneActionWorkflowApp.class, TEMP_FOLDER_SUPPLIER);
final Injector injector = AppFabricTestHelper.getInjector();
final ProgramDescriptor programDescriptor = Iterators.filter(app.getPrograms().iterator(), input -> input.getProgramId().getType() == ProgramType.WORKFLOW).next();
final SettableFuture<String> completion = SettableFuture.create();
final ProgramController controller = AppFabricTestHelper.submit(app, programDescriptor.getSpecification().getClassName(), new BasicArguments(), TEMP_FOLDER_SUPPLIER);
controller.addListener(new AbstractListener() {
@Override
public void init(ProgramController.State currentState, @Nullable Throwable cause) {
LOG.info("Initializing");
ArtifactId artifactId = controller.getProgramRunId().getNamespaceId().artifact("test", "1.0").toApiArtifactId();
setStartAndRunning(injector.getInstance(Store.class), controller.getProgramRunId().getParent(), controller.getProgramRunId().getRun(), artifactId);
}
@Override
public void completed() {
LOG.info("Completed");
completion.set("Completed");
}
@Override
public void error(Throwable cause) {
LOG.info("Error", cause);
completion.setException(cause);
}
}, Threads.SAME_THREAD_EXECUTOR);
String run = completion.get();
Assert.assertEquals("Completed", run);
}
use of io.cdap.cdap.internal.app.runtime.BasicArguments in project cdap by cdapio.
the class WorkflowTest method testWorkflow.
@Test(timeout = 120 * 1000L)
public void testWorkflow() throws Exception {
final ApplicationWithPrograms app = AppFabricTestHelper.deployApplicationWithManager(WorkflowApp.class, TEMP_FOLDER_SUPPLIER);
final Injector injector = AppFabricTestHelper.getInjector();
final ProgramDescriptor programDescriptor = Iterators.filter(app.getPrograms().iterator(), input -> input.getProgramId().getType() == ProgramType.WORKFLOW).next();
String inputPath = createInput();
String outputPath = new File(tmpFolder.newFolder(), "output").getAbsolutePath();
BasicArguments userArgs = new BasicArguments(ImmutableMap.of("inputPath", inputPath, "outputPath", outputPath));
final SettableFuture<String> completion = SettableFuture.create();
final ProgramController controller = AppFabricTestHelper.submit(app, programDescriptor.getSpecification().getClassName(), userArgs, TEMP_FOLDER_SUPPLIER);
controller.addListener(new AbstractListener() {
@Override
public void init(ProgramController.State currentState, @Nullable Throwable cause) {
LOG.info("Starting");
ArtifactId artifactId = controller.getProgramRunId().getNamespaceId().artifact("test", "1.0").toApiArtifactId();
setStartAndRunning(injector.getInstance(Store.class), controller.getProgramRunId().getParent(), controller.getProgramRunId().getRun(), artifactId);
}
@Override
public void completed() {
LOG.info("Completed");
completion.set("Completed");
}
@Override
public void error(Throwable cause) {
LOG.info("Error", cause);
completion.setException(cause);
}
}, Threads.SAME_THREAD_EXECUTOR);
completion.get();
}
use of io.cdap.cdap.internal.app.runtime.BasicArguments in project cdap by cdapio.
the class MapReduceTaskContextProvider method createCacheLoader.
/**
* Creates a {@link CacheLoader} for the task context cache.
*/
private CacheLoader<ContextCacheKey, BasicMapReduceTaskContext> createCacheLoader(final Injector injector) {
DiscoveryServiceClient discoveryServiceClient = injector.getInstance(DiscoveryServiceClient.class);
DatasetFramework datasetFramework = injector.getInstance(DatasetFramework.class);
SecureStore secureStore = injector.getInstance(SecureStore.class);
SecureStoreManager secureStoreManager = injector.getInstance(SecureStoreManager.class);
MessagingService messagingService = injector.getInstance(MessagingService.class);
// Multiple instances of BasicMapReduceTaskContext can share the same program.
AtomicReference<Program> programRef = new AtomicReference<>();
MetadataReader metadataReader = injector.getInstance(MetadataReader.class);
MetadataPublisher metadataPublisher = injector.getInstance(MetadataPublisher.class);
FieldLineageWriter fieldLineageWriter = injector.getInstance(FieldLineageWriter.class);
RemoteClientFactory remoteClientFactory = injector.getInstance(RemoteClientFactory.class);
return new CacheLoader<ContextCacheKey, BasicMapReduceTaskContext>() {
@Override
public BasicMapReduceTaskContext load(ContextCacheKey key) throws Exception {
TaskAttemptID taskAttemptId = key.getTaskAttemptID();
// taskAttemptId could be null if used from a org.apache.hadoop.mapreduce.Partitioner or
// from a org.apache.hadoop.io.RawComparator, in which case we can get the JobId from the conf. Note that the
// JobId isn't in the conf for the OutputCommitter#setupJob method, in which case we use the taskAttemptId
Path txFile = MainOutputCommitter.getTxFile(key.getConfiguration(), taskAttemptId != null ? taskAttemptId.getJobID() : null);
FileSystem fs = txFile.getFileSystem(key.getConfiguration());
Transaction transaction = null;
if (fs.exists(txFile)) {
try (FSDataInputStream txFileInputStream = fs.open(txFile)) {
transaction = new TransactionCodec().decode(ByteStreams.toByteArray(txFileInputStream));
}
}
MapReduceContextConfig contextConfig = new MapReduceContextConfig(key.getConfiguration());
MapReduceClassLoader classLoader = MapReduceClassLoader.getFromConfiguration(key.getConfiguration());
Program program = programRef.get();
if (program == null) {
// Creation of program is relatively cheap, so just create and do compare and set.
programRef.compareAndSet(null, createProgram(contextConfig, classLoader.getProgramClassLoader()));
program = programRef.get();
}
WorkflowProgramInfo workflowInfo = contextConfig.getWorkflowProgramInfo();
DatasetFramework programDatasetFramework = workflowInfo == null ? datasetFramework : NameMappedDatasetFramework.createFromWorkflowProgramInfo(datasetFramework, workflowInfo, program.getApplicationSpecification());
// Setup dataset framework context, if required
if (programDatasetFramework instanceof ProgramContextAware) {
ProgramRunId programRunId = program.getId().run(ProgramRunners.getRunId(contextConfig.getProgramOptions()));
((ProgramContextAware) programDatasetFramework).setContext(new BasicProgramContext(programRunId));
}
MapReduceSpecification spec = program.getApplicationSpecification().getMapReduce().get(program.getName());
MetricsCollectionService metricsCollectionService = null;
MapReduceMetrics.TaskType taskType = null;
String taskId = null;
ProgramOptions options = contextConfig.getProgramOptions();
// from a org.apache.hadoop.io.RawComparator
if (taskAttemptId != null) {
taskId = taskAttemptId.getTaskID().toString();
if (MapReduceMetrics.TaskType.hasType(taskAttemptId.getTaskType())) {
taskType = MapReduceMetrics.TaskType.from(taskAttemptId.getTaskType());
// if this is not for a mapper or a reducer, we don't need the metrics collection service
metricsCollectionService = injector.getInstance(MetricsCollectionService.class);
options = new SimpleProgramOptions(options.getProgramId(), options.getArguments(), new BasicArguments(RuntimeArguments.extractScope("task", taskType.toString().toLowerCase(), contextConfig.getProgramOptions().getUserArguments().asMap())), options.isDebug());
}
}
CConfiguration cConf = injector.getInstance(CConfiguration.class);
TransactionSystemClient txClient = injector.getInstance(TransactionSystemClient.class);
NamespaceQueryAdmin namespaceQueryAdmin = injector.getInstance(NamespaceQueryAdmin.class);
return new BasicMapReduceTaskContext(program, options, cConf, taskType, taskId, spec, workflowInfo, discoveryServiceClient, metricsCollectionService, txClient, transaction, programDatasetFramework, classLoader.getPluginInstantiator(), contextConfig.getLocalizedResources(), secureStore, secureStoreManager, accessEnforcer, authenticationContext, messagingService, mapReduceClassLoader, metadataReader, metadataPublisher, namespaceQueryAdmin, fieldLineageWriter, remoteClientFactory);
}
};
}
Aggregations