use of co.cask.cdap.proto.id.EntityId in project cdap by caskdata.
the class AuthorizationTest method testCrossNSDatasetAccessWithAuthMapReduce.
private void testCrossNSDatasetAccessWithAuthMapReduce(MapReduceManager mrManager) throws Exception {
NamespaceMeta inputDatasetNS = new NamespaceMeta.Builder().setName("inputNS").build();
NamespaceId inputDatasetNSId = inputDatasetNS.getNamespaceId();
NamespaceMeta outputDatasetNS = new NamespaceMeta.Builder().setName("outputNS").build();
NamespaceId outputDatasetNSId = outputDatasetNS.getNamespaceId();
DatasetId table1Id = inputDatasetNSId.dataset("table1");
DatasetId table2Id = outputDatasetNSId.dataset("table2");
Map<EntityId, Set<Action>> neededPrivileges = ImmutableMap.<EntityId, Set<Action>>builder().put(inputDatasetNSId, EnumSet.of(Action.ADMIN)).put(outputDatasetNSId, EnumSet.of(Action.ADMIN)).put(table1Id, EnumSet.of(Action.ADMIN, Action.WRITE)).put(table2Id, EnumSet.of(Action.ADMIN, Action.READ)).put(inputDatasetNSId.datasetType("keyValueTable"), EnumSet.of(Action.ADMIN)).put(outputDatasetNSId.datasetType("keyValueTable"), EnumSet.of(Action.ADMIN)).build();
setUpPrivilegeAndRegisterForDeletion(ALICE, neededPrivileges);
getNamespaceAdmin().create(inputDatasetNS);
getNamespaceAdmin().create(outputDatasetNS);
addDatasetInstance(table1Id, "keyValueTable").create();
addDatasetInstance(table2Id, "keyValueTable").create();
addDummyData(inputDatasetNSId, "table1");
Map<String, String> argsForMR = ImmutableMap.of(DatasetCrossNSAccessWithMAPApp.INPUT_DATASET_NS, inputDatasetNS.getNamespaceId().getNamespace(), DatasetCrossNSAccessWithMAPApp.INPUT_DATASET_NAME, "table1", DatasetCrossNSAccessWithMAPApp.OUTPUT_DATASET_NS, outputDatasetNS.getNamespaceId().getNamespace(), DatasetCrossNSAccessWithMAPApp.OUTPUT_DATASET_NAME, "table2");
// Switch to BOB and run the mapreduce job. The job will fail at the runtime since BOB does not have permission
// on the input and output datasets in another namespaces.
SecurityRequestContext.setUserId(BOB.getName());
assertProgramFailure(argsForMR, mrManager);
// Switch back to Alice
SecurityRequestContext.setUserId(ALICE.getName());
// Verify nothing write to the output dataset
assertDatasetIsEmpty(outputDatasetNS.getNamespaceId(), "table2");
// give privilege to BOB on the input dataset
grantAndAssertSuccess(inputDatasetNS.getNamespaceId().dataset("table1"), BOB, EnumSet.of(Action.READ));
// switch back to bob and try running again. this will still fail since bob does not have access on the output
// dataset
SecurityRequestContext.setUserId(BOB.getName());
assertProgramFailure(argsForMR, mrManager);
// Switch back to Alice
SecurityRequestContext.setUserId(ALICE.getName());
// Verify nothing write to the output dataset
assertDatasetIsEmpty(outputDatasetNS.getNamespaceId(), "table2");
// give privilege to BOB on the output dataset
grantAndAssertSuccess(outputDatasetNS.getNamespaceId().dataset("table2"), BOB, EnumSet.of(Action.WRITE));
// switch back to BOB and run MR again. this should work
SecurityRequestContext.setUserId(BOB.getName());
mrManager.start(argsForMR);
mrManager.waitForRun(ProgramRunStatus.COMPLETED, 60, TimeUnit.SECONDS);
// Verify results as alice
SecurityRequestContext.setUserId(ALICE.getName());
verifyDummyData(outputDatasetNS.getNamespaceId(), "table2");
getNamespaceAdmin().delete(inputDatasetNS.getNamespaceId());
getNamespaceAdmin().delete(outputDatasetNS.getNamespaceId());
}
use of co.cask.cdap.proto.id.EntityId in project cdap by caskdata.
the class AuthorizationTest method deployDummyAppWithImpersonation.
private void deployDummyAppWithImpersonation(NamespaceMeta nsMeta, @Nullable String appOwner) throws Exception {
NamespaceId namespaceId = nsMeta.getNamespaceId();
ApplicationId dummyAppId = namespaceId.app(DummyApp.class.getSimpleName());
ArtifactId artifactId = namespaceId.artifact(DummyApp.class.getSimpleName(), "1.0-SNAPSHOT");
DatasetId datasetId = namespaceId.dataset("whom");
DatasetTypeId datasetTypeId = namespaceId.datasetType(KeyValueTable.class.getName());
StreamId streamId = namespaceId.stream("who");
String owner = appOwner != null ? appOwner : nsMeta.getConfig().getPrincipal();
KerberosPrincipalId principalId = new KerberosPrincipalId(owner);
Principal principal = new Principal(owner, Principal.PrincipalType.USER);
DatasetId dummyDatasetId = namespaceId.dataset("customDataset");
DatasetTypeId dummyTypeId = namespaceId.datasetType(DummyApp.CustomDummyDataset.class.getName());
DatasetModuleId dummyModuleId = namespaceId.datasetModule((DummyApp.CustomDummyDataset.class.getName()));
// these are the privileges that are needed to deploy the app if no impersonation is involved,
// can check testApps() for more info
Map<EntityId, Set<Action>> neededPrivileges = ImmutableMap.<EntityId, Set<Action>>builder().put(dummyAppId, EnumSet.of(Action.ADMIN)).put(artifactId, EnumSet.of(Action.ADMIN)).put(datasetId, EnumSet.of(Action.ADMIN)).put(streamId, EnumSet.of(Action.ADMIN)).put(datasetTypeId, EnumSet.of(Action.ADMIN)).put(principalId, EnumSet.of(Action.ADMIN)).put(dummyDatasetId, EnumSet.of(Action.ADMIN)).put(dummyTypeId, EnumSet.of(Action.ADMIN)).put(dummyModuleId, EnumSet.of(Action.ADMIN)).build();
setUpPrivilegeAndRegisterForDeletion(ALICE, neededPrivileges);
// add the artifact
addAppArtifact(artifactId, DummyApp.class);
AppRequest<? extends Config> appRequest = new AppRequest<>(new ArtifactSummary(artifactId.getArtifact(), artifactId.getVersion()), null, appOwner);
try {
deployApplication(dummyAppId, appRequest);
Assert.fail();
} catch (Exception e) {
// expected
}
// revoke privileges on datasets and streams from alice, she does not need these privileges to deploy the app
// the owner will need these privileges to deploy
revokeAndAssertSuccess(datasetId);
revokeAndAssertSuccess(datasetTypeId);
revokeAndAssertSuccess(streamId);
revokeAndAssertSuccess(dummyDatasetId);
revokeAndAssertSuccess(dummyTypeId);
revokeAndAssertSuccess(dummyModuleId);
// grant ADMIN privileges to owner
grantAndAssertSuccess(datasetId, principal, EnumSet.of(Action.ADMIN));
grantAndAssertSuccess(datasetTypeId, principal, EnumSet.of(Action.ADMIN));
grantAndAssertSuccess(streamId, principal, EnumSet.of(Action.ADMIN));
grantAndAssertSuccess(dummyDatasetId, principal, EnumSet.of(Action.ADMIN));
grantAndAssertSuccess(dummyTypeId, principal, EnumSet.of(Action.ADMIN));
grantAndAssertSuccess(dummyModuleId, principal, EnumSet.of(Action.ADMIN));
// this time it should be successful
deployApplication(dummyAppId, appRequest);
// clean up the privilege on the owner principal id
revokeAndAssertSuccess(principalId);
}
use of co.cask.cdap.proto.id.EntityId in project cdap by caskdata.
the class AuthorizationTest method testCrossNSDatasetAccessFromFlowlet.
private void testCrossNSDatasetAccessFromFlowlet(final FlowManager flowManager) throws Exception {
NamespaceMeta outputDatasetNS = new NamespaceMeta.Builder().setName("outputNS").build();
NamespaceId outputDatasetNSId = outputDatasetNS.getNamespaceId();
DatasetId datasetId = outputDatasetNSId.dataset("store");
Map<EntityId, Set<Action>> neededPrivileges = ImmutableMap.<EntityId, Set<Action>>builder().put(outputDatasetNSId, EnumSet.of(Action.ADMIN)).put(datasetId, EnumSet.of(Action.ADMIN, Action.READ)).put(outputDatasetNSId.datasetType("keyValueTable"), EnumSet.of(Action.ADMIN)).build();
setUpPrivilegeAndRegisterForDeletion(ALICE, neededPrivileges);
getNamespaceAdmin().create(outputDatasetNS);
addDatasetInstance(datasetId, "keyValueTable");
// switch to BOB
SecurityRequestContext.setUserId(BOB.getName());
Map<String, String> args = ImmutableMap.of(CrossNsDatasetAccessApp.OUTPUT_DATASET_NS, outputDatasetNS.getNamespaceId().getNamespace(), CrossNsDatasetAccessApp.OUTPUT_DATASET_NAME, "store");
// But trying to run a flow as BOB will fail since this flow writes to a dataset in another namespace in which
// is not accessible to BOB.
flowManager.start(args);
// wait for flow to be running
flowManager.waitForRun(ProgramRunStatus.RUNNING, 12, TimeUnit.SECONDS);
// The above will be a runtime failure after the flow start since it will not be able to use the dataset in the
// another namespace. Since the failure will lead to no metrics being emitted we cannot actually check it tried
// processing or not. So stop the flow and check that the output dataset is empty
flowManager.stop();
flowManager.waitForStatus(false);
SecurityRequestContext.setUserId(ALICE.getName());
assertDatasetIsEmpty(outputDatasetNS.getNamespaceId(), "store");
// Give BOB permission to write to the dataset in another namespace
grantAndAssertSuccess(datasetId, BOB, EnumSet.of(Action.WRITE));
// switch back to BOB to run flow again
SecurityRequestContext.setUserId(BOB.getName());
// running the flow now should pass and write data in another namespace successfully
flowManager.start(args);
flowManager.waitForRun(ProgramRunStatus.RUNNING, 12, TimeUnit.SECONDS);
flowManager.getFlowletMetrics("saver").waitForProcessed(10, 30, TimeUnit.SECONDS);
flowManager.stop();
waitForStoppedPrograms(flowManager);
// switch back to alice and verify the data its fine now to verify the run record here because if the flow failed
// to write we will not see any data
SecurityRequestContext.setUserId(ALICE.getName());
DataSetManager<KeyValueTable> dataSetManager = getDataset(outputDatasetNS.getNamespaceId().dataset("store"));
KeyValueTable results = dataSetManager.get();
for (int i = 0; i < 10; i++) {
byte[] key = String.valueOf(i).getBytes(Charsets.UTF_8);
Assert.assertArrayEquals(key, results.read(key));
}
getNamespaceAdmin().delete(outputDatasetNS.getNamespaceId());
}
use of co.cask.cdap.proto.id.EntityId in project cdap by caskdata.
the class AuthorizationTest method testCrossNSDatasetAccessWithAuthSpark.
private void testCrossNSDatasetAccessWithAuthSpark(SparkManager sparkManager) throws Exception {
NamespaceMeta inputDatasetNSMeta = new NamespaceMeta.Builder().setName("inputDatasetNS").build();
NamespaceMeta outputDatasetNSMeta = new NamespaceMeta.Builder().setName("outputDatasetNS").build();
NamespaceId inputDatasetNSMetaId = inputDatasetNSMeta.getNamespaceId();
DatasetId inputTableId = inputDatasetNSMetaId.dataset("input");
NamespaceId outputDatasetNSMetaId = outputDatasetNSMeta.getNamespaceId();
DatasetId outputTableId = outputDatasetNSMetaId.dataset("output");
Map<EntityId, Set<Action>> neededPrivileges = ImmutableMap.<EntityId, Set<Action>>builder().put(inputDatasetNSMetaId, EnumSet.of(Action.ADMIN)).put(outputDatasetNSMetaId, EnumSet.of(Action.ADMIN)).put(inputTableId, EnumSet.of(Action.ADMIN, Action.WRITE)).put(inputDatasetNSMetaId.datasetType("keyValueTable"), EnumSet.of(Action.ADMIN)).put(outputTableId, EnumSet.of(Action.ADMIN, Action.READ)).put(outputDatasetNSMetaId.datasetType("keyValueTable"), EnumSet.of(Action.ADMIN)).build();
setUpPrivilegeAndRegisterForDeletion(ALICE, neededPrivileges);
getNamespaceAdmin().create(inputDatasetNSMeta);
getNamespaceAdmin().create(outputDatasetNSMeta);
addDatasetInstance(inputTableId, "keyValueTable").create();
addDatasetInstance(outputTableId, "keyValueTable").create();
// write sample stuff in input dataset
addDummyData(inputDatasetNSMeta.getNamespaceId(), "input");
// Switch to Bob and run the spark program. this will fail because bob does not have access to either input or
// output dataset
SecurityRequestContext.setUserId(BOB.getName());
Map<String, String> args = ImmutableMap.of(TestSparkCrossNSDatasetApp.INPUT_DATASET_NAMESPACE, inputDatasetNSMeta.getNamespaceId().getNamespace(), TestSparkCrossNSDatasetApp.INPUT_DATASET_NAME, "input", TestSparkCrossNSDatasetApp.OUTPUT_DATASET_NAMESPACE, outputDatasetNSMeta.getNamespaceId().getNamespace(), TestSparkCrossNSDatasetApp.OUTPUT_DATASET_NAME, "output");
assertProgramFailure(args, sparkManager);
SecurityRequestContext.setUserId(ALICE.getName());
// Verify nothing write to the output dataset
assertDatasetIsEmpty(outputDatasetNSMeta.getNamespaceId(), "output");
// give privilege to BOB on the input dataset
grantAndAssertSuccess(inputDatasetNSMeta.getNamespaceId().dataset("input"), BOB, EnumSet.of(Action.READ));
// switch back to bob and try running again. this will still fail since bob does not have access on the output
// dataset
SecurityRequestContext.setUserId(BOB.getName());
assertProgramFailure(args, sparkManager);
// Switch back to Alice
SecurityRequestContext.setUserId(ALICE.getName());
// Verify nothing write to the output dataset
assertDatasetIsEmpty(outputDatasetNSMeta.getNamespaceId(), "output");
// give privilege to BOB on the output dataset
grantAndAssertSuccess(outputDatasetNSMeta.getNamespaceId().dataset("output"), BOB, EnumSet.of(Action.WRITE));
// switch back to BOB and run spark again. this should work
SecurityRequestContext.setUserId(BOB.getName());
sparkManager.start(args);
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 120, TimeUnit.SECONDS);
waitForStoppedPrograms(sparkManager);
// Verify the results as alice
SecurityRequestContext.setUserId(ALICE.getName());
verifyDummyData(outputDatasetNSMeta.getNamespaceId(), "output");
getNamespaceAdmin().delete(inputDatasetNSMeta.getNamespaceId());
getNamespaceAdmin().delete(outputDatasetNSMeta.getNamespaceId());
}
use of co.cask.cdap.proto.id.EntityId in project cdap by caskdata.
the class AuthorizationTest method testCrossNSSystemDatasetAccessWithAuthSpark.
private void testCrossNSSystemDatasetAccessWithAuthSpark(SparkManager sparkManager) throws Exception {
addDatasetInstance(NamespaceId.SYSTEM.dataset("table1"), "keyValueTable").create();
addDatasetInstance(NamespaceId.SYSTEM.dataset("table2"), "keyValueTable").create();
NamespaceMeta otherNS = new NamespaceMeta.Builder().setName("otherNS").build();
NamespaceId otherNSId = otherNS.getNamespaceId();
DatasetId otherTableId = otherNSId.dataset("otherTable");
Map<EntityId, Set<Action>> neededPrivileges = ImmutableMap.<EntityId, Set<Action>>builder().put(otherNSId, EnumSet.of(Action.ADMIN)).put(otherTableId, EnumSet.of(Action.ADMIN)).put(otherNSId.datasetType("keyValueTable"), EnumSet.of(Action.ADMIN)).build();
setUpPrivilegeAndRegisterForDeletion(ALICE, neededPrivileges);
getNamespaceAdmin().create(otherNS);
addDatasetInstance(otherTableId, "keyValueTable").create();
addDummyData(NamespaceId.SYSTEM, "table1");
// give privilege to BOB on all the datasets
grantAndAssertSuccess(NamespaceId.SYSTEM.dataset("table1"), BOB, EnumSet.of(Action.READ));
grantAndAssertSuccess(NamespaceId.SYSTEM.dataset("table2"), BOB, EnumSet.of(Action.WRITE));
grantAndAssertSuccess(otherNS.getNamespaceId().dataset("otherTable"), BOB, ALL_ACTIONS);
// Switch to Bob and run the spark program. this will fail because bob is trying to read from a system dataset
SecurityRequestContext.setUserId(BOB.getName());
Map<String, String> args = ImmutableMap.of(TestSparkCrossNSDatasetApp.INPUT_DATASET_NAMESPACE, NamespaceId.SYSTEM.getNamespace(), TestSparkCrossNSDatasetApp.INPUT_DATASET_NAME, "table1", TestSparkCrossNSDatasetApp.OUTPUT_DATASET_NAMESPACE, otherNS.getNamespaceId().getNamespace(), TestSparkCrossNSDatasetApp.OUTPUT_DATASET_NAME, "otherTable");
assertProgramFailure(args, sparkManager);
assertDatasetIsEmpty(otherNS.getNamespaceId(), "otherTable");
// try running spark job with valid input namespace but writing to system namespace this should fail too
args = ImmutableMap.of(TestSparkCrossNSDatasetApp.INPUT_DATASET_NAMESPACE, otherNS.getNamespaceId().getNamespace(), TestSparkCrossNSDatasetApp.INPUT_DATASET_NAME, "otherTable", TestSparkCrossNSDatasetApp.OUTPUT_DATASET_NAMESPACE, NamespaceId.SYSTEM.getNamespace(), TestSparkCrossNSDatasetApp.OUTPUT_DATASET_NAME, "table2");
addDummyData(otherNS.getNamespaceId(), "otherTable");
assertProgramFailure(args, sparkManager);
assertDatasetIsEmpty(NamespaceId.SYSTEM, "table2");
// switch to back to ALICE
SecurityRequestContext.setUserId(ALICE.getName());
// cleanup
deleteDatasetInstance(NamespaceId.SYSTEM.dataset("table1"));
deleteDatasetInstance(NamespaceId.SYSTEM.dataset("table2"));
getNamespaceAdmin().delete(otherNS.getNamespaceId());
}
Aggregations