use of datawave.security.authorization.DatawavePrincipal in project datawave by NationalSecurityAgency.
the class DatawaveUsersRolesLoginModuleTest method testReverseDnSuccessfulLogin.
@Test
public void testReverseDnSuccessfulLogin() throws Exception {
String name = SUBJECT_DN_WITH_CN_LAST + "<" + ISSUER_DN_WITH_CN_LAST + ">";
callbackHandler.setSecurityInfo(new SimplePrincipal(name), new DatawaveCredential(SUBJECT_DN_WITH_CN_LAST, ISSUER_DN_WITH_CN_LAST, null, null).toString());
boolean success = loginModule.login();
assertTrue("Login didn't succeed for alias in users/roles.properties", success);
DatawavePrincipal principal = (DatawavePrincipal) field(DatawaveUsersRolesLoginModule.class, "identity").get(loginModule);
assertEquals(NORMALIZED_SUBJECT_DN_WITH_ISSUER_DN, principal.getName());
}
use of datawave.security.authorization.DatawavePrincipal in project datawave by NationalSecurityAgency.
the class MapReduceBean method canRunJob.
protected void canRunJob(Principal principal, MultivaluedMap<String, String> queryParameters, List<String> requiredRoles, List<String> requiredAuths) throws UnauthorizedQueryException {
if (principal instanceof DatawavePrincipal == false) {
throw new UnauthorizedQueryException(DatawaveErrorCode.JOB_EXECUTION_UNAUTHORIZED, "Principal must be DatawavePrincipal");
}
DatawavePrincipal datawavePrincipal = (DatawavePrincipal) principal;
if (requiredRoles != null && !requiredRoles.isEmpty()) {
Set<String> usersRoles = new HashSet<>(datawavePrincipal.getPrimaryUser().getRoles());
if (!usersRoles.containsAll(requiredRoles)) {
throw new UnauthorizedQueryException(DatawaveErrorCode.JOB_EXECUTION_UNAUTHORIZED, MessageFormat.format("Requires the following roles: {0}", requiredRoles));
}
}
if (null != queryParameters) {
if (requiredAuths != null && !requiredAuths.isEmpty()) {
String authsString = queryParameters.getFirst("auths");
List<String> authorizations = AuthorizationsUtil.splitAuths(authsString);
if (!authorizations.containsAll(requiredAuths)) {
throw new UnauthorizedQueryException(DatawaveErrorCode.JOB_EXECUTION_UNAUTHORIZED, MessageFormat.format("Requires the following auths: {0}", requiredAuths));
}
}
}
}
use of datawave.security.authorization.DatawavePrincipal in project datawave by NationalSecurityAgency.
the class MapReduceBean method submit.
/**
* Execute a MapReduce job with the given name and runtime parameters
*
* @param jobName
* Name of the map reduce job configuration
* @param parameters
* A semi-colon separated list name:value pairs. These are the required and optional parameters listed in the MapReduceConfiguration objects
* returned in the call to list()
* @return {@code datawave.webservice.result.GenericResponse<String>} job id
* @RequestHeader X-ProxiedEntitiesChain use when proxying request for user by specifying a chain of DNs of the identities to proxy
* @RequestHeader X-ProxiedIssuersChain required when using X-ProxiedEntitiesChain, specify one issuer DN per subject DN listed in X-ProxiedEntitiesChain
* @ResponseHeader X-OperationTimeInMS time spent on the server performing the operation, does not account for network or result serialization
* @HTTP 200 success
* @HTTP 204 if no data was found
* @HTTP 400 if jobName is invalid
* @HTTP 401 if user does not have correct roles
* @HTTP 500 error starting the job
*/
@POST
@Produces({ "application/xml", "text/xml", "application/json", "text/yaml", "text/x-yaml", "application/x-yaml", "application/x-protobuf", "application/x-protostuff" })
@javax.ws.rs.Path("/submit")
@GZIP
public GenericResponse<String> submit(@FormParam("jobName") String jobName, @FormParam("parameters") String parameters) {
GenericResponse<String> response = new GenericResponse<>();
// Find out who/what called this method
Principal p = ctx.getCallerPrincipal();
String sid;
Set<Collection<String>> cbAuths = new HashSet<>();
DatawavePrincipal datawavePrincipal = null;
if (p instanceof DatawavePrincipal) {
datawavePrincipal = (DatawavePrincipal) p;
sid = datawavePrincipal.getShortName();
cbAuths.addAll(datawavePrincipal.getAuthorizations());
} else {
QueryException qe = new QueryException(DatawaveErrorCode.UNEXPECTED_PRINCIPAL_ERROR, MessageFormat.format("Class: {0}", p.getClass().getName()));
response.addException(qe);
throw new DatawaveWebApplicationException(qe, response);
}
// Get the MapReduceJobConfiguration from the configuration
MapReduceJobConfiguration job;
try {
job = this.mapReduceConfiguration.getConfiguration(jobName);
} catch (IllegalArgumentException e) {
BadRequestQueryException qe = new BadRequestQueryException(DatawaveErrorCode.JOB_CONFIGURATION_ERROR, e);
response.addException(qe);
throw new BadRequestException(qe, response);
}
// Ensure that the user has the required roles and has passed the required auths
if (null != job.getRequiredRoles() || null != job.getRequiredAuths()) {
try {
canRunJob(datawavePrincipal, new MultivaluedMapImpl<>(), job.getRequiredRoles(), job.getRequiredAuths());
} catch (UnauthorizedQueryException qe) {
// user does not have all of the required roles or did not pass the required auths
response.addException(qe);
throw new UnauthorizedException(qe, response);
}
}
// Parse the parameters
Map<String, String> runtimeParameters = new HashMap<>();
if (null != parameters) {
String[] param = parameters.split(PARAMETER_SEPARATOR);
for (String yyy : param) {
String[] parts = yyy.split(PARAMETER_NAME_VALUE_SEPARATOR);
if (parts.length == 2) {
runtimeParameters.put(parts[0], parts[1]);
}
}
}
// Check to see if the job configuration class implements specific interfaces.
if (job instanceof NeedCallerDetails) {
((NeedCallerDetails) job).setUserSid(sid);
((NeedCallerDetails) job).setPrincipal(p);
}
if (job instanceof NeedAccumuloConnectionFactory) {
((NeedAccumuloConnectionFactory) job).setAccumuloConnectionFactory(this.connectionFactory);
}
if (job instanceof NeedAccumuloDetails) {
((NeedAccumuloDetails) job).setUsername(this.connectionPoolsConfiguration.getPools().get(this.connectionPoolsConfiguration.getDefaultPool()).getUsername());
((NeedAccumuloDetails) job).setPassword(this.connectionPoolsConfiguration.getPools().get(this.connectionPoolsConfiguration.getDefaultPool()).getPassword());
((NeedAccumuloDetails) job).setInstanceName(this.connectionPoolsConfiguration.getPools().get(this.connectionPoolsConfiguration.getDefaultPool()).getInstance());
((NeedAccumuloDetails) job).setZookeepers(this.connectionPoolsConfiguration.getPools().get(this.connectionPoolsConfiguration.getDefaultPool()).getZookeepers());
}
if (job instanceof NeedQueryLogicFactory) {
((NeedQueryLogicFactory) job).setQueryLogicFactory(this.queryLogicFactory);
}
if (job instanceof NeedQueryPersister) {
((NeedQueryPersister) job).setPersister(this.queryPersister);
}
if (job instanceof NeedQueryCache) {
((NeedQueryCache) job).setQueryCache(cache);
}
if (job instanceof NeedSecurityDomain) {
((NeedSecurityDomain) job).setSecurityDomain(this.jsseSecurityDomain);
}
// If this job is being restarted, then the jobId will be the same. The restart method
// puts the id into the runtime parameters
String id = runtimeParameters.get(JOB_ID);
if (null == id)
id = UUID.randomUUID().toString();
org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
StringBuilder name = new StringBuilder().append(jobName).append("_sid_").append(sid).append("_id_").append(id);
Job j;
try {
j = createJob(conf, name);
job.initializeConfiguration(id, j, runtimeParameters, serverPrincipal);
} catch (WebApplicationException waEx) {
throw waEx;
} catch (Exception e) {
QueryException qe = new QueryException(DatawaveErrorCode.LOGIC_CONFIGURATION_ERROR, e);
log.error(qe.getMessage(), e);
response.addException(qe.getBottomQueryException());
throw new DatawaveWebApplicationException(qe, response);
}
// Enforce that certain InputFormat classes are being used here.
if (this.mapReduceConfiguration.isRestrictInputFormats()) {
// Make sure that the job input format is in the list
Class<? extends InputFormat<?, ?>> ifClass;
try {
ifClass = j.getInputFormatClass();
} catch (ClassNotFoundException e1) {
QueryException qe = new QueryException(DatawaveErrorCode.INPUT_FORMAT_CLASS_ERROR, e1);
log.error(qe);
response.addException(qe);
throw new DatawaveWebApplicationException(qe, response);
}
if (!this.mapReduceConfiguration.getValidInputFormats().contains(ifClass)) {
IllegalArgumentException e = new IllegalArgumentException("Invalid input format class specified. Must use one of " + this.mapReduceConfiguration.getValidInputFormats());
QueryException qe = new QueryException(DatawaveErrorCode.INVALID_FORMAT, e);
log.error(qe);
response.addException(qe.getBottomQueryException());
throw new DatawaveWebApplicationException(qe, response);
}
}
try {
j.submit();
} catch (Exception e) {
QueryException qe = new QueryException(DatawaveErrorCode.MAPREDUCE_JOB_START_ERROR, e);
log.error(qe.getMessage(), qe);
response.addException(qe.getBottomQueryException());
throw new DatawaveWebApplicationException(qe, response);
}
JobID mapReduceJobId = j.getJobID();
log.info("JOB ID: " + mapReduceJobId);
// Create an entry in the state table
boolean restarted = (runtimeParameters.get(JOB_ID) != null);
try {
if (!restarted)
mapReduceState.create(id, job.getHdfsUri(), job.getJobTracker(), job.getJobDir(), mapReduceJobId.toString(), job.getResultsDir(), parameters, jobName);
else
mapReduceState.addJob(id, mapReduceJobId.toString());
} catch (Exception e) {
QueryException qe = new QueryException(DatawaveErrorCode.MAPREDUCE_STATE_PERSISTENCE_ERROR, e);
log.error(qe);
response.addException(qe.getBottomQueryException());
try {
j.killJob();
} catch (IOException ioe) {
QueryException qe2 = new QueryException(DatawaveErrorCode.MAPREDUCE_JOB_KILL_ERROR, ioe);
response.addException(qe2);
}
throw new DatawaveWebApplicationException(qe, response);
}
response.setResult(id);
return response;
}
use of datawave.security.authorization.DatawavePrincipal in project datawave by NationalSecurityAgency.
the class MapReduceStatePersisterBean method addJob.
/**
* Adds a new job to the history for this BulkResults id
*
* @param id
* bulk results id
* @param mapReduceJobId
* map reduce job id
* @throws QueryException
*/
public void addJob(String id, String mapReduceJobId) throws QueryException {
// Find out who/what called this method
Principal p = ctx.getCallerPrincipal();
String sid = p.getName();
if (p instanceof DatawavePrincipal) {
DatawavePrincipal dp = (DatawavePrincipal) p;
sid = dp.getShortName();
}
Connector c = null;
try {
Map<String, String> trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace());
c = connectionFactory.getConnection(AccumuloConnectionFactory.Priority.ADMIN, trackingMap);
tableCheck(c);
// Not using a MultiTableBatchWriter here because its not implemented yet
// in Mock Accumulo.
BatchWriterConfig bwCfg = new BatchWriterConfig().setMaxLatency(10, TimeUnit.SECONDS).setMaxMemory(10240L).setMaxWriteThreads(1);
try (BatchWriter tableWriter = c.createBatchWriter(TABLE_NAME, bwCfg);
BatchWriter indexWriter = c.createBatchWriter(INDEX_TABLE_NAME, bwCfg)) {
Mutation m = new Mutation(id);
m.put(sid, STATE + NULL + mapReduceJobId, new Value(MapReduceState.STARTED.toString().getBytes()));
tableWriter.addMutation(m);
Mutation i = new Mutation(mapReduceJobId);
i.put(sid, id, NULL_VALUE);
indexWriter.addMutation(i);
}
} catch (Exception e) {
QueryException qe = new QueryException(DatawaveErrorCode.BULK_RESULTS_ENTRY_ERROR, e);
log.error(qe);
throw qe;
} finally {
try {
connectionFactory.returnConnection(c);
} catch (Exception e) {
log.error("Error closing writers", e);
}
}
}
use of datawave.security.authorization.DatawavePrincipal in project datawave by NationalSecurityAgency.
the class MapReduceStatePersisterBean method create.
/**
* @param id
* map reduce id
* @param hdfsUri
* @param jobTracker
* @param workingDirectory
* map reduce job working directory
* @param mapReduceJobId
* map reduce job id
* @param resultsDirectory
* either HDFS directory name or some other location (i.e. table name)
* @param runtimeParameters
* parameters
* @param jobName
*/
public void create(String id, String hdfsUri, String jobTracker, String workingDirectory, String mapReduceJobId, String resultsDirectory, String runtimeParameters, String jobName) throws QueryException {
// Find out who/what called this method
Principal p = ctx.getCallerPrincipal();
String sid = p.getName();
if (p instanceof DatawavePrincipal) {
DatawavePrincipal cp = (DatawavePrincipal) p;
sid = cp.getShortName();
}
Connector c = null;
try {
Map<String, String> trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace());
c = connectionFactory.getConnection(AccumuloConnectionFactory.Priority.ADMIN, trackingMap);
tableCheck(c);
// Not using a MultiTableBatchWriter here because its not implemented yet
// in Mock Accumulo.
BatchWriterConfig bwCfg = new BatchWriterConfig().setMaxLatency(10, TimeUnit.SECONDS).setMaxMemory(10240L).setMaxWriteThreads(1);
try (BatchWriter tableWriter = c.createBatchWriter(TABLE_NAME, bwCfg);
BatchWriter indexWriter = c.createBatchWriter(INDEX_TABLE_NAME, bwCfg)) {
Mutation m = new Mutation(id);
m.put(sid, WORKING_DIRECTORY, workingDirectory);
m.put(sid, HDFS, hdfsUri);
m.put(sid, JT, jobTracker);
m.put(sid, NAME, jobName);
m.put(sid, RESULTS_LOCATION, resultsDirectory);
m.put(sid, PARAMS, runtimeParameters);
m.put(sid, STATE + NULL + mapReduceJobId, new Value(MapReduceState.STARTED.toString().getBytes()));
tableWriter.addMutation(m);
Mutation i = new Mutation(mapReduceJobId);
i.put(sid, id, NULL_VALUE);
indexWriter.addMutation(i);
}
} catch (Exception e) {
QueryException qe = new QueryException(DatawaveErrorCode.BULK_RESULTS_ENTRY_ERROR, e);
log.error(qe);
throw qe;
} finally {
try {
connectionFactory.returnConnection(c);
} catch (Exception e) {
log.error("Error closing writers", e);
}
}
}
Aggregations