use of datawave.security.authorization.DatawavePrincipal in project datawave by NationalSecurityAgency.
the class MapReduceStatePersisterBean method remove.
/**
* Removes Bulk Results information and related directory in HDFS for the given job id.
*
* @param id
* bulk results id
*/
public void remove(String id) throws QueryException {
// Find out who/what called this method
Principal p = ctx.getCallerPrincipal();
String sid = p.getName();
if (p instanceof DatawavePrincipal) {
DatawavePrincipal dp = (DatawavePrincipal) p;
sid = dp.getShortName();
}
MapReduceInfoResponseList results = findById(id);
if (null == results)
throw new NotFoundQueryException(DatawaveErrorCode.NO_QUERY_OBJECT_MATCH);
if (results.getResults().size() > 1)
throw new NotFoundQueryException(DatawaveErrorCode.TOO_MANY_QUERY_OBJECT_MATCHES);
else {
MapReduceInfoResponse r = results.getResults().get(0);
List<Mutation> indexEntries = new ArrayList<>();
Mutation m = new Mutation(r.getId());
m.putDelete(sid, WORKING_DIRECTORY);
m.putDelete(sid, HDFS);
m.putDelete(sid, JT);
m.putDelete(sid, NAME);
m.putDelete(sid, RESULTS_LOCATION);
m.putDelete(sid, PARAMS);
for (JobExecution job : r.getJobExecutions()) {
m.putDelete(sid, STATE + NULL + job.getMapReduceJobId());
Mutation i = new Mutation(job.getMapReduceJobId());
i.putDelete(sid, r.getId());
indexEntries.add(i);
}
Connector c = null;
try {
Map<String, String> trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace());
c = connectionFactory.getConnection(AccumuloConnectionFactory.Priority.ADMIN, trackingMap);
tableCheck(c);
// using BatchWriter instead of MultiTableBatchWriter because Mock CB does not support
// MultiTableBatchWriter
BatchWriterConfig bwCfg = new BatchWriterConfig().setMaxLatency(10, TimeUnit.SECONDS).setMaxMemory(10240L).setMaxWriteThreads(1);
try (BatchWriter tableWriter = c.createBatchWriter(TABLE_NAME, bwCfg);
BatchWriter indexWriter = c.createBatchWriter(INDEX_TABLE_NAME, bwCfg)) {
tableWriter.addMutation(m);
for (Mutation i : indexEntries) indexWriter.addMutation(i);
}
} catch (RuntimeException re) {
throw re;
} catch (Exception e) {
QueryException qe = new QueryException(DatawaveErrorCode.JOB_STATE_UPDATE_ERROR, e, MessageFormat.format("job_id: {0}", id));
log.error(qe);
throw new QueryException(qe);
} finally {
try {
connectionFactory.returnConnection(c);
} catch (Exception e) {
log.error("Error creating query", e);
}
}
}
}
use of datawave.security.authorization.DatawavePrincipal in project datawave by NationalSecurityAgency.
the class MapReduceBeanTest method setup.
@Before
public void setup() throws Exception {
System.setProperty(NpeUtils.NPE_OU_PROPERTY, "iamnotaperson");
System.setProperty("dw.metadatahelper.all.auths", "A,B,C,D");
DatawaveUser user = new DatawaveUser(SubjectIssuerDNPair.of(userDN, "CN=ca, OU=acme"), UserType.USER, Arrays.asList(auths), Collections.singleton("AuthorizedUser"), null, 0L);
principal = new DatawavePrincipal(Collections.singletonList(user));
applicationContext = new ClassPathXmlApplicationContext("classpath:*datawave/mapreduce/MapReduceJobs.xml");
Whitebox.setInternalState(bean, MapReduceConfiguration.class, applicationContext.getBean(MapReduceConfiguration.class));
}
use of datawave.security.authorization.DatawavePrincipal in project datawave by NationalSecurityAgency.
the class MapReduceBeanTest method testInvalidUserAuthorization.
@Test(expected = UnauthorizedException.class)
public void testInvalidUserAuthorization() throws Exception {
// Create principal that does not have AuthorizedUser role
DatawaveUser user = new DatawaveUser(SubjectIssuerDNPair.of(userDN, "CN=ca, OU=acme"), UserType.USER, Arrays.asList(auths), Collections.singleton("Administrator"), null, 0L);
DatawavePrincipal p = new DatawavePrincipal(Collections.singletonList(user));
expect(ctx.getCallerPrincipal()).andReturn(p);
replayAll();
bean.submit("TestJob", "queryId:1243");
verifyAll();
}
use of datawave.security.authorization.DatawavePrincipal in project datawave by NationalSecurityAgency.
the class MapReduceBeanTest method testNoResults.
@Test(expected = NoResultsException.class)
public void testNoResults() throws Exception {
Job mockJob = createMock(Job.class);
bean.setJob(mockJob);
MapReduceJobConfiguration cfg = new MapReduceJobConfiguration() {
@Override
public final void initializeConfiguration(String jobId, Job job, Map<String, String> runtimeParameters, DatawavePrincipal serverPrincipal) throws Exception {
throw new NoResultsException(new QueryException(DatawaveErrorCode.NO_RANGES));
}
};
MapReduceConfiguration mrConfig = applicationContext.getBean(MapReduceConfiguration.class);
mrConfig.getJobConfiguration().clear();
mrConfig.getJobConfiguration().put("TestJob", cfg);
// BulkResultsJob uses AccumuloInputFormat, MapReduceJobs.xml in
// src/test/resources specifies something else
expect(ctx.getCallerPrincipal()).andReturn(principal);
replayAll();
bean.submit("TestJob", "queryId:1243;format:XML");
verifyAll();
}
use of datawave.security.authorization.DatawavePrincipal in project datawave by NationalSecurityAgency.
the class MapReduceStatePersisterTest method testDontFindSomeoneElsesJob.
@Test
public void testDontFindSomeoneElsesJob() throws Exception {
// create some entries
testPersistentCreate();
PowerMock.resetAll();
DatawaveUser user = new DatawaveUser(SubjectIssuerDNPair.of("CN=Gal Some Other sogal, OU=acme", "CN=ca, OU=acme"), UserType.USER, Arrays.asList(auths), null, null, 0L);
principal = new DatawavePrincipal(Collections.singletonList(user));
EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal);
HashMap<String, String> trackingMap = new HashMap<>();
expect(connectionFactory.getTrackingMap(EasyMock.anyObject())).andReturn(trackingMap);
expect(connectionFactory.getConnection(EasyMock.eq(AccumuloConnectionFactory.Priority.ADMIN), EasyMock.eq(trackingMap))).andReturn(connection);
connectionFactory.returnConnection(connection);
replayAll();
MapReduceInfoResponseList result = bean.findById(id);
verifyAll();
assertEquals(0, result.getResults().size());
}
Aggregations