use of co.cask.cdap.test.ApplicationManager in project cdap by caskdata.
the class AuthorizationTest method testWorkerStreamAuth.
@Test
@Category(SlowTests.class)
public void testWorkerStreamAuth() throws Exception {
createAuthNamespace();
Authorizer authorizer = getAuthorizer();
setUpPrivilegeToDeployStreamAuthApp();
StreamId streamId = AUTH_NAMESPACE.stream(StreamAuthApp.STREAM);
Map<EntityId, Set<Action>> additionalPrivileges = ImmutableMap.<EntityId, Set<Action>>builder().put(streamId, EnumSet.of(Action.READ, Action.WRITE)).put(AUTH_NAMESPACE.app(StreamAuthApp.APP).worker(StreamAuthApp.WORKER), EnumSet.of(Action.EXECUTE)).build();
setUpPrivilegeAndRegisterForDeletion(ALICE, additionalPrivileges);
ApplicationManager appManager = deployApplication(AUTH_NAMESPACE, StreamAuthApp.class);
WorkerManager workerManager = appManager.getWorkerManager(StreamAuthApp.WORKER);
workerManager.start();
workerManager.waitForRun(ProgramRunStatus.COMPLETED, 60, TimeUnit.SECONDS);
StreamManager streamManager = getStreamManager(AUTH_NAMESPACE.stream(StreamAuthApp.STREAM));
Assert.assertEquals(5, streamManager.getEvents(0, Long.MAX_VALUE, Integer.MAX_VALUE).size());
// Now revoke write permission for Alice on that stream
authorizer.revoke(Authorizable.fromEntityId(streamId), ALICE, EnumSet.of(Action.WRITE));
workerManager.start();
workerManager.waitForRuns(ProgramRunStatus.FAILED, 1, 60, TimeUnit.SECONDS);
Assert.assertEquals(5, streamManager.getEvents(0, Long.MAX_VALUE, Integer.MAX_VALUE).size());
appManager.delete();
}
use of co.cask.cdap.test.ApplicationManager in project cdap by caskdata.
the class ServiceLifeCycleTestRun method testLifecycleWithThreadTerminates.
@Test
public void testLifecycleWithThreadTerminates() throws Exception {
// Set the http server properties to speed up test
System.setProperty(AbstractServiceHttpServer.HANDLER_CLEANUP_PERIOD_MILLIS, "100");
try {
ApplicationManager appManager = deployWithArtifact(ServiceLifecycleApp.class, artifactJar);
serviceManager = appManager.getServiceManager("test").start(ImmutableMap.of(SystemArguments.SERVICE_THREADS, "1", SystemArguments.SERVICE_THREAD_KEEPALIVE_SECS, "1"));
// Make a call to the service, expect an init state
Multimap<Integer, String> states = getStates(serviceManager);
Assert.assertEquals(1, states.size());
int handlerHashCode = states.keySet().iterator().next();
Assert.assertEquals(ImmutableList.of("INIT"), ImmutableList.copyOf(states.get(handlerHashCode)));
// Sleep for 3 seconds for the thread going IDLE, gets terminated and cleanup
TimeUnit.SECONDS.sleep(3);
states = getStates(serviceManager);
// Size of states keys should be two, since the old instance must get destroy and there is a new
// one created to handle the getStates request.
Assert.assertEquals(2, states.keySet().size());
// For the state changes for the old handler, it should have INIT, DESTROY
Assert.assertEquals(ImmutableList.of("INIT", "DESTROY"), ImmutableList.copyOf(states.get(handlerHashCode)));
// For the state changes for the new handler, it should be INIT
for (int key : states.keys()) {
if (key != handlerHashCode) {
Assert.assertEquals(ImmutableList.of("INIT"), ImmutableList.copyOf(states.get(key)));
}
}
} finally {
serviceManager.stop();
serviceManager.waitForStatus(false);
// Reset the http server properties to speed up test
System.clearProperty(AbstractServiceHttpServer.HANDLER_CLEANUP_PERIOD_MILLIS);
}
}
use of co.cask.cdap.test.ApplicationManager in project cdap by caskdata.
the class SparkTest method testClassicSpark.
@Test
public void testClassicSpark() throws Exception {
ApplicationManager appManager = deploy(TestSparkApp.class);
for (Class<?> sparkClass : Arrays.asList(TestSparkApp.ClassicSpark.class, TestSparkApp.ScalaClassicSpark.class)) {
final SparkManager sparkManager = appManager.getSparkManager(sparkClass.getSimpleName()).start();
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 5, TimeUnit.MINUTES);
}
KeyValueTable resultTable = this.<KeyValueTable>getDataset("ResultTable").get();
Assert.assertEquals(1L, Bytes.toLong(resultTable.read(ClassicSparkProgram.class.getName())));
Assert.assertEquals(1L, Bytes.toLong(resultTable.read(ScalaClassicSparkProgram.class.getName())));
}
use of co.cask.cdap.test.ApplicationManager in project cdap by caskdata.
the class SparkTest method testSparkProgramStatusSchedule.
@Test
public void testSparkProgramStatusSchedule() throws Exception {
ApplicationManager appManager = deploy(TestSparkApp.class);
ScheduleId scheduleId = new ScheduleId(NamespaceId.DEFAULT.getNamespace(), TestSparkApp.class.getSimpleName(), "schedule");
appManager.enableSchedule(scheduleId);
// Start the upstream program
appManager.getSparkManager(TestSparkApp.ScalaClassicSpark.class.getSimpleName()).start();
// Wait for the downstream to complete
WorkflowManager workflowManager = appManager.getWorkflowManager(TestSparkApp.TriggeredWorkflow.class.getSimpleName());
workflowManager.waitForRun(ProgramRunStatus.COMPLETED, 5, TimeUnit.MINUTES);
// Run again with the kryo serializer
appManager.getSparkManager(TestSparkApp.ScalaClassicSpark.class.getSimpleName()).start(Collections.singletonMap("spark.serializer", "org.apache.spark.serializer.KryoSerializer"));
// Wait for the downstream to complete again
workflowManager.waitForRuns(ProgramRunStatus.COMPLETED, 2, 5, TimeUnit.MINUTES);
}
use of co.cask.cdap.test.ApplicationManager in project cdap by caskdata.
the class SparkTest method testDatasetSQL.
@Test
public void testDatasetSQL() throws Exception {
ApplicationManager appManager = deploy(TestSparkApp.class);
DataSetManager<ObjectMappedTable<Person>> tableManager = getDataset("PersonTable");
ObjectMappedTable<Person> table = tableManager.get();
table.write("1", new Person("Bob", 10));
table.write("2", new Person("Bill", 20));
table.write("3", new Person("Berry", 30));
tableManager.flush();
SparkManager sparkManager = appManager.getSparkManager(DatasetSQLSpark.class.getSimpleName()).start();
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 2, TimeUnit.MINUTES);
// The program executes "SELECT * FROM Person WHERE age > 10", hence expected two new entries for Bill and Berry.
tableManager.flush();
Person person = table.read("new:2");
Assert.assertEquals("Bill", person.name());
Assert.assertEquals(20, person.age());
person = table.read("new:3");
Assert.assertEquals("Berry", person.name());
Assert.assertEquals(30, person.age());
// Shouldn't have new Bob
Assert.assertNull(table.read("new:1"));
}
Aggregations