use of io.cdap.cdap.proto.id.EntityId in project cdap by caskdata.
the class RemotePermissionsTestBase method testVisibility.
@Test
public void testVisibility() throws Exception {
ApplicationId app1 = NS.app("app1");
ProgramId program1 = app1.program(ProgramType.SERVICE, "service1");
ApplicationId app2 = NS.app("app2");
ProgramId program2 = app2.program(ProgramType.MAPREDUCE, "service2");
DatasetId ds = NS.dataset("ds");
DatasetId ds1 = NS.dataset("ds1");
DatasetId ds2 = NS.dataset("ds2");
// Grant permissions on non-numbered entities to ALICE
permissionManager.grant(Authorizable.fromEntityId(PROGRAM), ALICE, Collections.singleton(ApplicationPermission.EXECUTE));
permissionManager.grant(Authorizable.fromEntityId(ds), ALICE, EnumSet.of(StandardPermission.GET, StandardPermission.UPDATE));
// Grant permissions on entities ending with 2 to BOB
permissionManager.grant(Authorizable.fromEntityId(program2), BOB, Collections.singleton(StandardPermission.UPDATE));
permissionManager.grant(Authorizable.fromEntityId(ds2), BOB, EnumSet.of(StandardPermission.GET, StandardPermission.UPDATE));
Set<? extends EntityId> allEntities = ImmutableSet.of(NS, APP, PROGRAM, ds, app1, program1, ds1, app2, program2, ds2);
Assert.assertEquals(ImmutableSet.of(NS, APP, PROGRAM, ds), accessEnforcer.isVisible(allEntities, ALICE));
Assert.assertEquals(ImmutableSet.of(NS, app2, program2, ds2), accessEnforcer.isVisible(allEntities, BOB));
Assert.assertEquals(ImmutableSet.of(), accessEnforcer.isVisible(allEntities, CAROL));
Assert.assertEquals(ImmutableSet.of(), accessEnforcer.isVisible(ImmutableSet.<EntityId>of(), ALICE));
Assert.assertEquals(ImmutableSet.of(ds, APP), accessEnforcer.isVisible(ImmutableSet.of(ds, APP), ALICE));
for (EntityId entityId : allEntities) {
permissionManager.revoke(Authorizable.fromEntityId(entityId));
}
}
use of io.cdap.cdap.proto.id.EntityId in project cdap by caskdata.
the class ProfileServiceTest method testAddDeleteAssignments.
@Test
public void testAddDeleteAssignments() throws Exception {
ProfileId myProfile = NamespaceId.DEFAULT.profile("MyProfile");
Profile profile1 = new Profile("MyProfile", Profile.NATIVE.getLabel(), Profile.NATIVE.getDescription(), Profile.NATIVE.getScope(), Profile.NATIVE.getProvisioner());
profileService.saveProfile(myProfile, profile1);
// add a profile assignment and verify
Set<EntityId> expected = new HashSet<>();
expected.add(NamespaceId.DEFAULT);
profileService.addProfileAssignment(myProfile, NamespaceId.DEFAULT);
Assert.assertEquals(expected, profileService.getProfileAssignments(myProfile));
// add more and verify
InstanceId instanceId = new InstanceId("");
ApplicationId myApp = NamespaceId.DEFAULT.app("myApp");
ProgramId myProgram = myApp.workflow("myProgram");
expected.add(instanceId);
expected.add(myApp);
expected.add(myProgram);
profileService.addProfileAssignment(myProfile, instanceId);
profileService.addProfileAssignment(myProfile, myApp);
profileService.addProfileAssignment(myProfile, myProgram);
Assert.assertEquals(expected, profileService.getProfileAssignments(myProfile));
// add same entity id should not affect
profileService.addProfileAssignment(myProfile, myApp);
Assert.assertEquals(expected, profileService.getProfileAssignments(myProfile));
// delete one and verify
expected.remove(myApp);
profileService.removeProfileAssignment(myProfile, myApp);
Assert.assertEquals(expected, profileService.getProfileAssignments(myProfile));
// delete all
for (EntityId entityId : expected) {
profileService.removeProfileAssignment(myProfile, entityId);
}
expected.clear();
Assert.assertEquals(expected, profileService.getProfileAssignments(myProfile));
// delete again should not affect
profileService.removeProfileAssignment(myProfile, myApp);
Assert.assertEquals(expected, profileService.getProfileAssignments(myProfile));
profileService.disableProfile(myProfile);
profileService.deleteProfile(myProfile);
}
use of io.cdap.cdap.proto.id.EntityId in project cdap by caskdata.
the class MetadataSubscriberServiceTest method testSubscriber.
@Test
public void testSubscriber() throws InterruptedException, ExecutionException, TimeoutException {
LineageStoreReader lineageReader = getInjector().getInstance(LineageStoreReader.class);
ProgramRunId run1 = service1.run(RunIds.generate());
// Try to read lineage, which should be empty since we haven't start the MetadataSubscriberService yet.
Set<NamespacedEntityId> entities = lineageReader.getEntitiesForRun(run1);
Assert.assertTrue(entities.isEmpty());
// Write out some lineage information
LineageWriter lineageWriter = getInjector().getInstance(MessagingLineageWriter.class);
lineageWriter.addAccess(run1, dataset1, AccessType.READ);
lineageWriter.addAccess(run1, dataset2, AccessType.WRITE);
// Write the field level lineage
FieldLineageWriter fieldLineageWriter = getInjector().getInstance(MessagingLineageWriter.class);
ProgramRunId spark1Run1 = spark1.run(RunIds.generate(100));
ReadOperation read = new ReadOperation("read", "some read", EndPoint.of("ns", "endpoint1"), "offset", "body");
TransformOperation parse = new TransformOperation("parse", "parse body", Collections.singletonList(InputField.of("read", "body")), "name", "address");
WriteOperation write = new WriteOperation("write", "write data", EndPoint.of("ns", "endpoint2"), Arrays.asList(InputField.of("read", "offset"), InputField.of("parse", "name"), InputField.of("parse", "address")));
List<Operation> operations = new ArrayList<>();
operations.add(read);
operations.add(write);
operations.add(parse);
FieldLineageInfo info1 = new FieldLineageInfo(operations);
fieldLineageWriter.write(spark1Run1, info1);
ProgramRunId spark1Run2 = spark1.run(RunIds.generate(200));
fieldLineageWriter.write(spark1Run2, info1);
List<Operation> operations2 = new ArrayList<>();
operations2.add(read);
operations2.add(parse);
TransformOperation normalize = new TransformOperation("normalize", "normalize address", Collections.singletonList(InputField.of("parse", "address")), "address");
operations2.add(normalize);
WriteOperation anotherWrite = new WriteOperation("anotherwrite", "write data", EndPoint.of("ns", "endpoint2"), Arrays.asList(InputField.of("read", "offset"), InputField.of("parse", "name"), InputField.of("normalize", "address")));
operations2.add(anotherWrite);
FieldLineageInfo info2 = new FieldLineageInfo(operations2);
ProgramRunId spark1Run3 = spark1.run(RunIds.generate(300));
fieldLineageWriter.write(spark1Run3, info2);
// Emit some usages
UsageWriter usageWriter = getInjector().getInstance(MessagingUsageWriter.class);
usageWriter.register(spark1, dataset1);
usageWriter.registerAll(Collections.singleton(spark1), dataset3);
// Verifies lineage has been written
Set<NamespacedEntityId> expectedLineage = new HashSet<>(Arrays.asList(run1.getParent(), dataset1, dataset2));
Tasks.waitFor(true, () -> expectedLineage.equals(lineageReader.getEntitiesForRun(run1)), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// There shouldn't be any lineage for the "spark1" program, as only usage has been emitted.
Assert.assertTrue(lineageReader.getRelations(spark1, 0L, Long.MAX_VALUE, x -> true).isEmpty());
FieldLineageReader fieldLineageReader = getInjector().getInstance(FieldLineageReader.class);
Set<Operation> expectedOperations = new HashSet<>();
expectedOperations.add(read);
expectedOperations.add(anotherWrite);
List<ProgramRunOperations> expected = new ArrayList<>();
// Descending order of program execution
expected.add(new ProgramRunOperations(Collections.singleton(spark1Run3), expectedOperations));
expectedOperations = new HashSet<>();
expectedOperations.add(read);
expectedOperations.add(write);
expected.add(new ProgramRunOperations(new HashSet<>(Arrays.asList(spark1Run1, spark1Run2)), expectedOperations));
EndPointField endPointField = new EndPointField(EndPoint.of("ns", "endpoint2"), "offset");
Tasks.waitFor(expected, () -> fieldLineageReader.getIncomingOperations(endPointField, 1L, Long.MAX_VALUE - 1), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
// Verifies usage has been written
Set<EntityId> expectedUsage = new HashSet<>(Arrays.asList(dataset1, dataset3));
UsageRegistry usageRegistry = getInjector().getInstance(UsageRegistry.class);
Tasks.waitFor(true, () -> expectedUsage.equals(usageRegistry.getDatasets(spark1)), 10, TimeUnit.SECONDS, 100, TimeUnit.MILLISECONDS);
}
use of io.cdap.cdap.proto.id.EntityId in project cdap by caskdata.
the class ProgramLifecycleServiceAuthorizationTest method testProgramList.
@Test
public void testProgramList() throws Exception {
SecurityRequestContext.setUserId(ALICE.getName());
ApplicationId applicationId = NamespaceId.DEFAULT.app(AllProgramsApp.NAME);
Map<EntityId, Set<? extends Permission>> neededPrivileges = ImmutableMap.<EntityId, Set<? extends Permission>>builder().put(applicationId, EnumSet.allOf(StandardPermission.class)).put(NamespaceId.DEFAULT, EnumSet.of(StandardPermission.GET)).put(NamespaceId.DEFAULT.artifact(AllProgramsApp.class.getSimpleName(), "1.0-SNAPSHOT"), EnumSet.allOf(StandardPermission.class)).put(NamespaceId.DEFAULT.dataset(AllProgramsApp.DATASET_NAME), EnumSet.allOf(StandardPermission.class)).put(NamespaceId.DEFAULT.dataset(AllProgramsApp.DATASET_NAME2), EnumSet.allOf(StandardPermission.class)).put(NamespaceId.DEFAULT.dataset(AllProgramsApp.DATASET_NAME3), EnumSet.allOf(StandardPermission.class)).put(NamespaceId.DEFAULT.dataset(AllProgramsApp.DS_WITH_SCHEMA_NAME), EnumSet.allOf(StandardPermission.class)).build();
setUpPrivilegesAndExpectFailedDeploy(neededPrivileges);
// now we should be able to deploy
AppFabricTestHelper.deployApplication(Id.Namespace.DEFAULT, AllProgramsApp.class, null, cConf);
// no auto grant now, the list will be empty for all program types
for (ProgramType type : ProgramType.values()) {
if (!ProgramType.CUSTOM_ACTION.equals(type)) {
Assert.assertTrue(programLifecycleService.list(NamespaceId.DEFAULT, type).isEmpty());
}
}
// no auto grant now, need to have privileges on the program to be able to see the programs
accessController.grant(Authorizable.fromEntityId(applicationId.program(ProgramType.SERVICE, AllProgramsApp.NoOpService.NAME)), ALICE, Collections.singleton(ApplicationPermission.EXECUTE));
accessController.grant(Authorizable.fromEntityId(applicationId.program(ProgramType.WORKER, AllProgramsApp.NoOpWorker.NAME)), ALICE, Collections.singleton(ApplicationPermission.EXECUTE));
accessController.grant(Authorizable.fromEntityId(applicationId.program(ProgramType.SPARK, AllProgramsApp.NoOpSpark.NAME)), ALICE, Collections.singleton(ApplicationPermission.EXECUTE));
accessController.grant(Authorizable.fromEntityId(applicationId.program(ProgramType.MAPREDUCE, AllProgramsApp.NoOpMR.NAME)), ALICE, Collections.singleton(ApplicationPermission.EXECUTE));
accessController.grant(Authorizable.fromEntityId(applicationId.program(ProgramType.MAPREDUCE, AllProgramsApp.NoOpMR2.NAME)), ALICE, Collections.singleton(ApplicationPermission.EXECUTE));
accessController.grant(Authorizable.fromEntityId(applicationId.program(ProgramType.WORKFLOW, AllProgramsApp.NoOpWorkflow.NAME)), ALICE, Collections.singleton(ApplicationPermission.EXECUTE));
for (ProgramType type : ProgramType.values()) {
// Skip flow (until flow is completely removed from ProgramType)
if (!ProgramType.CUSTOM_ACTION.equals(type)) {
Assert.assertFalse(programLifecycleService.list(NamespaceId.DEFAULT, type).isEmpty());
SecurityRequestContext.setUserId("bob");
Assert.assertTrue(programLifecycleService.list(NamespaceId.DEFAULT, type).isEmpty());
SecurityRequestContext.setUserId("alice");
}
}
}
use of io.cdap.cdap.proto.id.EntityId in project cdap by caskdata.
the class MetadataHttpHandlerTestRun method assertDataEntitySearch.
private void assertDataEntitySearch() throws Exception {
DatasetId datasetInstance = NamespaceId.DEFAULT.dataset(AllProgramsApp.DATASET_NAME);
DatasetId datasetInstance2 = NamespaceId.DEFAULT.dataset(AllProgramsApp.DATASET_NAME2);
DatasetId datasetInstance3 = NamespaceId.DEFAULT.dataset(AllProgramsApp.DATASET_NAME3);
DatasetId datasetInstance4 = NamespaceId.DEFAULT.dataset(AllProgramsApp.DATASET_NAME4);
DatasetId datasetInstance5 = NamespaceId.DEFAULT.dataset(AllProgramsApp.DATASET_NAME5);
DatasetId datasetInstance6 = NamespaceId.DEFAULT.dataset(AllProgramsApp.DATASET_NAME6);
DatasetId datasetInstance7 = NamespaceId.DEFAULT.dataset(AllProgramsApp.DATASET_NAME7);
DatasetId dsWithSchema = NamespaceId.DEFAULT.dataset(AllProgramsApp.DS_WITH_SCHEMA_NAME);
// schema search for a field with the given fieldname:fieldtype
assertSearch(searchMetadata(NamespaceId.DEFAULT, "body:STRING+field1:STRING"), dsWithSchema);
// add a user property with "schema" as key
Map<String, String> datasetProperties = ImmutableMap.of("schema", "schemaValue");
addProperties(datasetInstance, datasetProperties);
// search all entities that have a defined schema
assertSearch(searchMetadata(NamespaceId.DEFAULT, "schema:*"), datasetInstance, dsWithSchema);
// search datasets
EntityId[] expectedKvTables = { datasetInstance, datasetInstance2, datasetInstance3, myds };
EntityId[] expectedExplorableDatasets = concat(expectedKvTables, new EntityId[] { datasetInstance4, datasetInstance5, dsWithSchema });
EntityId[] expectedAllDatasets = concat(expectedExplorableDatasets, new EntityId[] { datasetInstance6, datasetInstance7 });
assertSearch(searchMetadata(NamespaceId.DEFAULT, "explore"), expectedExplorableDatasets);
assertSearch(searchMetadata(NamespaceId.DEFAULT, KeyValueTable.class.getName()), expectedKvTables);
assertSearch(searchMetadata(NamespaceId.DEFAULT, "type:*"), expectedAllDatasets);
assertSearch(searchMetadata(NamespaceId.DEFAULT, AllProgramsApp.DATASET_NAME), datasetInstance);
assertSearch(searchMetadata(NamespaceId.DEFAULT, AllProgramsApp.DS_WITH_SCHEMA_NAME), dsWithSchema);
}
Aggregations