use of datawave.security.authorization.DatawavePrincipal in project datawave by NationalSecurityAgency.
the class ModelBeanTest method setup.
@Before
public void setup() throws Exception {
System.setProperty(NpeUtils.NPE_OU_PROPERTY, "iamnotaperson");
System.setProperty("dw.metadatahelper.all.auths", "A,B,C,D");
bean = new ModelBean();
connectionFactory = createStrictMock(AccumuloConnectionFactory.class);
ctx = createMock(EJBContext.class);
cache = createMock(AccumuloTableCache.class);
Whitebox.setInternalState(bean, EJBContext.class, ctx);
Whitebox.setInternalState(bean, AccumuloConnectionFactory.class, connectionFactory);
Whitebox.setInternalState(bean, AccumuloTableCache.class, cache);
instance = new InMemoryInstance("test");
connector = instance.getConnector("root", new PasswordToken(""));
DatawaveUser user = new DatawaveUser(SubjectIssuerDNPair.of(userDN, issuerDN), UserType.USER, Arrays.asList(auths), null, null, 0L);
principal = new DatawavePrincipal(Collections.singletonList(user));
URL m1Url = ModelBeanTest.class.getResource("/ModelBeanTest_m1.xml");
URL m2Url = ModelBeanTest.class.getResource("/ModelBeanTest_m2.xml");
JAXBContext ctx = JAXBContext.newInstance(datawave.webservice.model.Model.class);
Unmarshaller u = ctx.createUnmarshaller();
MODEL_ONE = (datawave.webservice.model.Model) u.unmarshal(m1Url);
MODEL_TWO = (datawave.webservice.model.Model) u.unmarshal(m2Url);
Logger.getLogger(ModelBean.class).setLevel(Level.OFF);
PowerMock.mockStatic(System.class, System.class.getMethod("currentTimeMillis"));
}
use of datawave.security.authorization.DatawavePrincipal in project datawave by NationalSecurityAgency.
the class BulkResultsJobConfiguration method setupJob.
/**
* Common MapReduce setup methods
*
* @param job
* the job to configure
* @param jobDir
* the directory in HDFS where aux job files are stored
* @param queryConfig
* the query configuration for this job's query input format
* @param logic
* the query logic for this job's query input format
* @param base64EncodedQuery
* the query, encoded using Base64
* @param queryImplClass
* the class of query in {@code base64EncodedQuery}
* @param runtimeQueryAuthorizations
* the authorizations to use for input format query scanners
* @param serverPrincipal
* the {@link Principal} of the server running DATAWAVE
* @throws IOException
* @throws AccumuloSecurityException
*/
private void setupJob(Job job, Path jobDir, GenericQueryConfiguration queryConfig, QueryLogic<?> logic, String base64EncodedQuery, Class<? extends Query> queryImplClass, Set<Authorizations> runtimeQueryAuthorizations, DatawavePrincipal serverPrincipal) throws IOException, AccumuloSecurityException {
job.setInputFormatClass(BulkInputFormat.class);
QueryData queryData = null;
Collection<Range> ranges = new ArrayList<>();
if (!queryConfig.canRunQuery()) {
throw new UnsupportedOperationException("Unable to run query");
}
Iterator<QueryData> iter = queryConfig.getQueries();
while (iter.hasNext()) {
queryData = iter.next();
ranges.addAll(queryData.getRanges());
}
if (ranges.isEmpty()) {
throw new NoResultsException(new QueryException("No scan ranges produced for query."));
}
BulkInputFormat.setWorkingDirectory(job.getConfiguration(), jobDir.toString());
// Copy the information from the GenericQueryConfiguration to the job.
BulkInputFormat.setRanges(job, ranges);
for (IteratorSetting cfg : queryData.getSettings()) {
BulkInputFormat.addIterator(job.getConfiguration(), cfg);
}
BulkInputFormat.setZooKeeperInstance(job.getConfiguration(), this.instanceName, this.zookeepers);
Iterator<Authorizations> authsIter = (runtimeQueryAuthorizations == null || runtimeQueryAuthorizations.isEmpty()) ? null : runtimeQueryAuthorizations.iterator();
Authorizations auths = (authsIter == null) ? null : authsIter.next();
BulkInputFormat.setInputInfo(job, this.user, this.password.getBytes(), logic.getTableName(), auths);
for (int priority = 10; authsIter != null && authsIter.hasNext(); ++priority) {
IteratorSetting cfg = new IteratorSetting(priority, ConfigurableVisibilityFilter.class);
cfg.setName("visibilityFilter" + priority);
cfg.addOption(ConfigurableVisibilityFilter.AUTHORIZATIONS_OPT, authsIter.next().toString());
BulkInputFormat.addIterator(job.getConfiguration(), cfg);
}
job.getConfiguration().set(BulkResultsFileOutputMapper.QUERY_LOGIC_SETTINGS, base64EncodedQuery);
job.getConfiguration().set(BulkResultsFileOutputMapper.QUERY_IMPL_CLASS, queryImplClass.getName());
job.getConfiguration().set(BulkResultsFileOutputMapper.QUERY_LOGIC_NAME, logic.getLogicName());
job.getConfiguration().set(BulkResultsFileOutputMapper.APPLICATION_CONTEXT_PATH, "classpath*:datawave/configuration/spring/CDIBeanPostProcessor.xml," + "classpath*:datawave/query/*QueryLogicFactory.xml," + "classpath*:/MarkingFunctionsContext.xml," + "classpath*:/MetadataHelperContext.xml," + "classpath*:/CacheContext.xml");
job.getConfiguration().set(BulkResultsFileOutputMapper.SPRING_CONFIG_LOCATIONS, job.getConfiguration().get(BulkResultsFileOutputMapper.APPLICATION_CONTEXT_PATH));
// Tell the Mapper/Reducer to use a specific set of application context files when doing Spring-CDI integration.
String cdiOpts = "'-Dcdi.spring.configs=" + job.getConfiguration().get(BulkResultsFileOutputMapper.APPLICATION_CONTEXT_PATH) + "'";
// Pass our server DN along to the child VM so it can be made available for injection.
cdiOpts += " '-Dserver.principal=" + encodePrincipal(serverPrincipal) + "'";
cdiOpts += " '-Dcaller.principal=" + encodePrincipal((DatawavePrincipal) principal) + "'";
String javaOpts = job.getConfiguration().get("mapreduce.map.java.opts");
javaOpts = (javaOpts == null) ? cdiOpts : (javaOpts + " " + cdiOpts);
job.getConfiguration().set("mapreduce.map.java.opts", javaOpts);
job.setMapOutputKeyClass(Key.class);
job.setMapOutputValueClass(Value.class);
job.setWorkingDirectory(jobDir);
}
use of datawave.security.authorization.DatawavePrincipal in project datawave by NationalSecurityAgency.
the class AccumuloConnectionFactoryBean method getCurrentUserDN.
public String getCurrentUserDN() {
String currentUserDN = null;
Principal p = context.getCallerPrincipal();
if (p != null && p instanceof DatawavePrincipal) {
currentUserDN = ((DatawavePrincipal) p).getUserDN().subjectDN();
}
return currentUserDN;
}
use of datawave.security.authorization.DatawavePrincipal in project datawave by NationalSecurityAgency.
the class AccumuloConnectionFactoryBean method getCurrentProxyServers.
public Collection<String> getCurrentProxyServers() {
List<String> currentProxyServers = null;
Principal p = context.getCallerPrincipal();
if (p != null && p instanceof DatawavePrincipal) {
currentProxyServers = ((DatawavePrincipal) p).getProxyServers();
}
return currentProxyServers;
}
use of datawave.security.authorization.DatawavePrincipal in project datawave by NationalSecurityAgency.
the class AbstractFunctionalQuery method querySetUp.
@Before
public void querySetUp() throws IOException {
log.debug("--------- querySetUp ---------");
this.logic = createQueryLogic();
QueryTestTableHelper.configureLogicToScanTables(this.logic);
this.logic.setFullTableScanEnabled(false);
this.logic.setIncludeDataTypeAsField(true);
this.logic.setDateIndexHelperFactory(new DateIndexHelperFactory());
this.logic.setMarkingFunctions(new Default());
this.logic.setMetadataHelperFactory(new MetadataHelperFactory());
this.logic.setQueryPlanner(new DefaultQueryPlanner());
this.logic.setResponseObjectFactory(new DefaultResponseObjectFactory());
this.logic.setCollectTimingDetails(true);
this.logic.setLogTimingDetails(true);
this.logic.setMinimumSelectivity(0.03D);
this.logic.setMaxIndexScanTimeMillis(5000);
// count logic
countLogic.setIncludeDataTypeAsField(true);
countLogic.setFullTableScanEnabled(false);
countLogic.setDateIndexHelperFactory(new DateIndexHelperFactory());
countLogic.setMarkingFunctions(new Default());
countLogic.setMetadataHelperFactory(new MetadataHelperFactory());
countLogic.setQueryPlanner(new DefaultQueryPlanner());
countLogic.setResponseObjectFactory(new DefaultResponseObjectFactory());
QueryTestTableHelper.configureLogicToScanTables(countLogic);
// init must set auths
testInit();
Assert.assertNotNull(this.auths);
authSet.clear();
authSet.add(this.auths);
SubjectIssuerDNPair dn = SubjectIssuerDNPair.of("userDn", "issuerDn");
DatawaveUser user = new DatawaveUser(dn, DatawaveUser.UserType.USER, Sets.newHashSet(this.auths.toString().split(",")), null, null, -1L);
this.principal = new DatawavePrincipal(Collections.singleton(user));
this.testHarness = new QueryLogicTestHarness(this);
}
Aggregations