use of co.cask.cdap.proto.id.EntityId in project cdap by caskdata.
the class AuditMessageTypeAdapter method deserialize.
@Override
public AuditMessage deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException {
JsonObject jsonObj = json.getAsJsonObject();
long timeMillis = jsonObj.get("time").getAsLong();
EntityId entityId = context.deserialize(jsonObj.getAsJsonObject("entityId"), EntityId.class);
String user = jsonObj.get("user").getAsString();
AuditType auditType = context.deserialize(jsonObj.getAsJsonPrimitive("type"), AuditType.class);
AuditPayload payload;
JsonObject jsonPayload = jsonObj.getAsJsonObject("payload");
switch(auditType) {
case METADATA_CHANGE:
payload = context.deserialize(jsonPayload, MetadataPayload.class);
break;
case ACCESS:
payload = context.deserialize(jsonPayload, AccessPayload.class);
break;
default:
payload = AuditPayload.EMPTY_PAYLOAD;
}
return new AuditMessage(timeMillis, entityId, user, auditType, payload);
}
use of co.cask.cdap.proto.id.EntityId in project cdap by caskdata.
the class LineageDataset method toRelation.
private Relation toRelation(Row row) {
Map<Character, EntityId> rowInfo = new HashMap<>(4);
MDSKey.Splitter splitter = new MDSKey(row.getRow()).split();
char marker = (char) splitter.getInt();
LOG.trace("Got marker {}", marker);
EntityId id1 = toEntityId(splitter, marker);
LOG.trace("Got id1 {}", id1);
rowInfo.put(marker, id1);
// inverted time - not required for relation
splitter.skipLong();
marker = (char) splitter.getInt();
LOG.trace("Got marker {}", marker);
EntityId id2 = toEntityId(splitter, marker);
LOG.trace("Got id2 {}", id1);
rowInfo.put(marker, id2);
RunId runId = RunIds.fromString(splitter.getString());
LOG.trace("Got runId {}", runId);
AccessType accessType = AccessType.fromType((char) splitter.getInt());
LOG.trace("Got access type {}", accessType);
DatasetId datasetInstance = (DatasetId) rowInfo.get(DATASET_MARKER);
LOG.trace("Got datasetInstance {}", datasetInstance);
StreamId stream = (StreamId) rowInfo.get(STREAM_MARKER);
LOG.trace("Got stream {}", stream);
ProgramId program = (ProgramId) rowInfo.get(PROGRAM_MARKER);
LOG.trace("Got program {}", program);
NamespacedEntityId component = toComponent(splitter, program);
LOG.trace("Got component {}", component);
if (stream == null) {
return new Relation(datasetInstance, program, accessType, runId, component == null ? ImmutableSet.<NamespacedEntityId>of() : ImmutableSet.of((NamespacedEntityId) component));
}
return new Relation(stream, program, accessType, runId, component == null ? ImmutableSet.<NamespacedEntityId>of() : ImmutableSet.of((NamespacedEntityId) component));
}
use of co.cask.cdap.proto.id.EntityId in project cdap by caskdata.
the class AuthorizationTest method testSparkStreamAuth.
@Test
@Category(SlowTests.class)
public void testSparkStreamAuth() throws Exception {
createAuthNamespace();
Authorizer authorizer = getAuthorizer();
setUpPrivilegeToDeployStreamAuthApp();
StreamId streamId = AUTH_NAMESPACE.stream(StreamAuthApp.STREAM);
Map<EntityId, Set<Action>> additionalPrivileges = ImmutableMap.<EntityId, Set<Action>>builder().put(streamId, EnumSet.of(Action.READ, Action.WRITE)).put(AUTH_NAMESPACE.app(StreamAuthApp.APP).spark(StreamAuthApp.SPARK), EnumSet.of(Action.EXECUTE)).put(AUTH_NAMESPACE.dataset(StreamAuthApp.KVTABLE), EnumSet.of(Action.READ, Action.WRITE)).build();
setUpPrivilegeAndRegisterForDeletion(ALICE, additionalPrivileges);
ApplicationManager appManager = deployApplication(AUTH_NAMESPACE, StreamAuthApp.class);
StreamManager streamManager = getStreamManager(streamId);
streamManager.send("Hello");
final SparkManager sparkManager = appManager.getSparkManager(StreamAuthApp.SPARK);
sparkManager.start();
sparkManager.waitForRun(ProgramRunStatus.COMPLETED, 1, TimeUnit.MINUTES);
DataSetManager<KeyValueTable> kvManager = getDataset(AUTH_NAMESPACE.dataset(StreamAuthApp.KVTABLE));
try (KeyValueTable kvTable = kvManager.get()) {
byte[] value = kvTable.read("Hello");
Assert.assertArrayEquals(Bytes.toBytes("Hello"), value);
}
streamManager.send("World");
// Revoke READ permission on STREAM for Alice
authorizer.revoke(Authorizable.fromEntityId(streamId), ALICE, EnumSet.of(Action.READ));
sparkManager.start();
sparkManager.waitForRun(ProgramRunStatus.FAILED, 1, TimeUnit.MINUTES);
kvManager = getDataset(AUTH_NAMESPACE.dataset(StreamAuthApp.KVTABLE));
try (KeyValueTable kvTable = kvManager.get()) {
byte[] value = kvTable.read("World");
Assert.assertNull(value);
}
// Grant ALICE READ permission on STREAM and now Spark job should run successfully
authorizer.grant(Authorizable.fromEntityId(streamId), ALICE, ImmutableSet.of(Action.READ));
sparkManager.start();
sparkManager.waitForRuns(ProgramRunStatus.COMPLETED, 2, 1, TimeUnit.MINUTES);
kvManager = getDataset(AUTH_NAMESPACE.dataset(StreamAuthApp.KVTABLE));
try (KeyValueTable kvTable = kvManager.get()) {
byte[] value = kvTable.read("World");
Assert.assertArrayEquals(Bytes.toBytes("World"), value);
}
appManager.delete();
}
use of co.cask.cdap.proto.id.EntityId in project cdap by caskdata.
the class AuthorizationTest method testCrossNSMapReduce.
@Test
public void testCrossNSMapReduce() throws Exception {
createAuthNamespace();
ApplicationId appId = AUTH_NAMESPACE.app(DatasetCrossNSAccessWithMAPApp.class.getSimpleName());
Map<EntityId, Set<Action>> neededPrivileges = ImmutableMap.<EntityId, Set<Action>>builder().put(appId, EnumSet.of(Action.ADMIN)).put(AUTH_NAMESPACE.artifact(DatasetCrossNSAccessWithMAPApp.class.getSimpleName(), "1.0-SNAPSHOT"), EnumSet.of(Action.ADMIN)).build();
setUpPrivilegeAndRegisterForDeletion(ALICE, neededPrivileges);
ProgramId programId = appId.program(ProgramType.MAPREDUCE, DatasetCrossNSAccessWithMAPApp.MAPREDUCE_PROGRAM);
// bob will be executing the program
grantAndAssertSuccess(programId, BOB, EnumSet.of(Action.EXECUTE));
cleanUpEntities.add(programId);
ApplicationManager appManager = deployApplication(AUTH_NAMESPACE, DatasetCrossNSAccessWithMAPApp.class);
MapReduceManager mrManager = appManager.getMapReduceManager(DatasetCrossNSAccessWithMAPApp.MAPREDUCE_PROGRAM);
testCrossNSSystemDatasetAccessWithAuthMapReduce(mrManager);
testCrossNSDatasetAccessWithAuthMapReduce(mrManager);
}
use of co.cask.cdap.proto.id.EntityId in project cdap by caskdata.
the class AuthorizationTest method testMRStreamAuth.
@Test
@Category(SlowTests.class)
public void testMRStreamAuth() throws Exception {
createAuthNamespace();
Authorizer authorizer = getAuthorizer();
setUpPrivilegeToDeployStreamAuthApp();
ApplicationManager appManager = deployApplication(AUTH_NAMESPACE, StreamAuthApp.class);
StreamId streamId = AUTH_NAMESPACE.stream(StreamAuthApp.STREAM);
DatasetId datasetId = AUTH_NAMESPACE.dataset(StreamAuthApp.KVTABLE);
Map<EntityId, Set<Action>> additionalPrivileges = ImmutableMap.<EntityId, Set<Action>>builder().put(streamId, EnumSet.of(Action.READ, Action.WRITE)).put(AUTH_NAMESPACE.app(StreamAuthApp.APP).mr(StreamAuthApp.MAPREDUCE), EnumSet.of(Action.EXECUTE)).put(AUTH_NAMESPACE.dataset(StreamAuthApp.KVTABLE), EnumSet.of(Action.READ, Action.WRITE)).build();
setUpPrivilegeAndRegisterForDeletion(ALICE, additionalPrivileges);
StreamManager streamManager = getStreamManager(streamId);
streamManager.send("Hello");
final MapReduceManager mrManager = appManager.getMapReduceManager(StreamAuthApp.MAPREDUCE);
mrManager.start();
// Since Alice had the required permissions, she should be able to execute the MR job successfully
mrManager.waitForRun(ProgramRunStatus.COMPLETED, 1, TimeUnit.MINUTES);
DataSetManager<KeyValueTable> kvManager = getDataset(datasetId);
try (KeyValueTable kvTable = kvManager.get()) {
byte[] value = kvTable.read("Hello");
Assert.assertArrayEquals(Bytes.toBytes("Hello"), value);
}
ProgramId mrId = AUTH_NAMESPACE.app(StreamAuthApp.APP).mr(StreamAuthApp.MAPREDUCE);
authorizer.grant(Authorizable.fromEntityId(mrId.getNamespaceId()), BOB, ImmutableSet.of(Action.ADMIN));
authorizer.grant(Authorizable.fromEntityId(mrId), BOB, EnumSet.of(Action.EXECUTE));
authorizer.grant(Authorizable.fromEntityId(AUTH_NAMESPACE.stream(StreamAuthApp.STREAM)), BOB, EnumSet.of(Action.ADMIN));
authorizer.grant(Authorizable.fromEntityId(AUTH_NAMESPACE.dataset(StreamAuthApp.KVTABLE)), BOB, EnumSet.of(Action.READ, Action.WRITE));
streamManager.send("World");
// Switch user to Bob. Note that he doesn't have READ access on the stream.
SecurityRequestContext.setUserId(BOB.getName());
mrManager.start();
mrManager.waitForRun(ProgramRunStatus.FAILED, 1, TimeUnit.MINUTES);
kvManager = getDataset(datasetId);
try (KeyValueTable kvTable = kvManager.get()) {
byte[] value = kvTable.read("World");
Assert.assertNull(value);
}
// Now grant Bob, READ access on the stream. MR job should execute successfully now.
authorizer.grant(Authorizable.fromEntityId(AUTH_NAMESPACE.stream(StreamAuthApp.STREAM)), BOB, ImmutableSet.of(Action.READ));
mrManager.start();
mrManager.waitForRuns(ProgramRunStatus.COMPLETED, 2, 1, TimeUnit.MINUTES);
kvManager = getDataset(datasetId);
try (KeyValueTable kvTable = kvManager.get()) {
byte[] value = kvTable.read("World");
Assert.assertEquals("World", Bytes.toString(value));
}
SecurityRequestContext.setUserId(ALICE.getName());
appManager.delete();
}
Aggregations