use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.
the class JexlStringBuildingVisitor method buildQuery.
/**
* Build a String that is the equivalent JEXL query.
*
* @param script
* An ASTJexlScript
* @param sortDedupeChildren
* Whether or not to sort the child nodes, and dedupe them. Note: Only siblings (children with the same parent node) will be deduped. Flatten
* beforehand for maximum 'dedupeage'.
* @return
*/
public static String buildQuery(JexlNode script, boolean sortDedupeChildren) {
JexlStringBuildingVisitor visitor = new JexlStringBuildingVisitor(sortDedupeChildren);
String s = null;
try {
StringBuilder sb = (StringBuilder) script.jjtAccept(visitor, new StringBuilder());
s = sb.toString();
try {
JexlASTHelper.parseJexlQuery(s);
} catch (ParseException e) {
log.error("Could not parse JEXL AST after performing transformations to run the query", e);
for (String line : PrintingVisitor.formattedQueryStringList(script)) {
log.error(line);
}
log.error("");
QueryException qe = new QueryException(DatawaveErrorCode.QUERY_EXECUTION_ERROR, e);
throw new DatawaveFatalQueryException(qe);
}
} catch (StackOverflowError e) {
throw e;
}
return s;
}
use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.
the class ExpandCompositeTerms method expandTerms.
/**
* Expand all nodes which have multiple dataTypes for the field.
*
* @param config
* Configuration parameters relevant to our query
* @param script
* The jexl node representing the query
* @return An expanded version of the passed-in script containing composite nodes
*/
@SuppressWarnings("unchecked")
public static <T extends JexlNode> T expandTerms(ShardQueryConfiguration config, T script) {
ExpandCompositeTerms visitor = new ExpandCompositeTerms(config);
// need to flatten the tree so i get all and nodes at the same level
script = TreeFlatteningRebuildingVisitor.flatten(script);
if (null == visitor.config.getCompositeToFieldMap()) {
QueryException qe = new QueryException(DatawaveErrorCode.DATATYPESFORINDEXFIELDS_MULTIMAP_MISSING);
throw new DatawaveFatalQueryException(qe);
}
return (T) script.jjtAccept(visitor, new ExpandData());
}
use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.
the class ExpandMultiNormalizedTerms method expandTerms.
/**
* Expand all nodes which have multiple dataTypes for the field.
*
* @param config
* @param script
* @return
*/
@SuppressWarnings("unchecked")
public static <T extends JexlNode> T expandTerms(ShardQueryConfiguration config, MetadataHelper helper, T script) {
ExpandMultiNormalizedTerms visitor = new ExpandMultiNormalizedTerms(config, helper);
if (null == visitor.config.getQueryFieldsDatatypes()) {
QueryException qe = new QueryException(DatawaveErrorCode.DATATYPESFORINDEXFIELDS_MULTIMAP_MISSING);
throw new DatawaveFatalQueryException(qe);
}
script = TreeFlatteningRebuildingVisitor.flatten(script);
return (T) script.jjtAccept(visitor, null);
}
use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.
the class FetchDataTypesVisitor method visit.
@Override
public Object visit(ASTFunctionNode node, Object data) {
JexlArgumentDescriptor desc = JexlFunctionArgumentDescriptorFactory.F.getArgumentDescriptor(node);
Multimap<String, Type<?>> mm = (Multimap<String, Type<?>>) data;
for (String field : desc.fields(this.helper, this.datatypeFilter)) {
final String fieldName = JexlASTHelper.deconstructIdentifier(field);
try {
Set<Type<?>> dataTypesForField = Collections.emptySet();
if (useCache) {
Tuple2<String, Set<String>> cacheKey = new Tuple2<>(fieldName, datatypeFilter);
Set<Type<?>> types = typeCache.getIfPresent(cacheKey);
if (null == types) {
dataTypesForField = this.helper.getDatatypesForField(fieldName, datatypeFilter);
typeCache.put(cacheKey, dataTypesForField);
} else {
if (log.isDebugEnabled()) {
log.debug("using cached types for " + fieldName + " " + datatypeFilter);
}
dataTypesForField = types;
}
} else
dataTypesForField = this.helper.getDatatypesForField(fieldName, datatypeFilter);
mm.putAll(field, dataTypesForField);
} catch (TableNotFoundException e) {
QueryException qe = new QueryException(DatawaveErrorCode.METADATA_TABLE_FETCH_ERROR, e);
log.error(qe);
throw new DatawaveFatalQueryException(qe);
} catch (InstantiationException | IllegalAccessException e) {
QueryException qe = new QueryException(DatawaveErrorCode.METADATA_TABLE_RECORD_FETCH_ERROR, e);
log.error(qe);
throw new DatawaveFatalQueryException(qe);
}
}
return data;
}
use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.
the class QueryExecutorBeanTest method testCloseActuallyCloses.
@SuppressWarnings("unchecked")
@Test(timeout = 5000)
public void testCloseActuallyCloses() throws Exception {
QueryImpl q = createNewQuery();
final MultivaluedMap<String, String> queryParameters = createNewQueryParameterMap();
queryParameters.putSingle(QueryParameters.QUERY_LOGIC_NAME, "EventQueryLogic");
final Thread createQuery = new Thread(() -> {
try {
bean.createQuery("EventQueryLogic", queryParameters);
} catch (Exception e) {
// ok if we fail the call
log.debug("createQuery terminated with " + e);
}
});
final Throwable[] createQueryException = { null };
createQuery.setUncaughtExceptionHandler((t, e) -> createQueryException[0] = e);
@SuppressWarnings("rawtypes") QueryLogic logic = createMock(BaseQueryLogic.class);
DatawaveUser user = new DatawaveUser(SubjectIssuerDNPair.of(userDN, "<CN=MY_CA, OU=MY_SUBDIVISION, OU=MY_DIVISION, O=ORG, C=US>"), UserType.USER, Arrays.asList(auths), null, null, 0L);
DatawavePrincipal principal = new DatawavePrincipal(Collections.singletonList(user));
principal.getShortName();
String[] dns = principal.getDNs();
Arrays.sort(dns);
List<String> dnList = Arrays.asList(dns);
InMemoryInstance instance = new InMemoryInstance();
Connector c = instance.getConnector("root", new PasswordToken(""));
MultivaluedMap<String, String> optionalParameters = createNewQueryParameters(q, queryParameters);
PowerMock.resetAll();
EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal).anyTimes();
EasyMock.expect(logic.getAuditType(null)).andReturn(AuditType.NONE);
EasyMock.expect(persister.create(principal.getUserDN().subjectDN(), dnList, Whitebox.getInternalState(bean, SecurityMarking.class), queryLogicName, Whitebox.getInternalState(bean, QueryParameters.class), optionalParameters)).andReturn(q);
EasyMock.expect(persister.findById(EasyMock.anyString())).andReturn(null).anyTimes();
EasyMock.expect(connectionFactory.getTrackingMap(anyObject())).andReturn(Maps.newHashMap()).anyTimes();
BaseQueryMetric metric = new QueryMetricFactoryImpl().createMetric();
metric.populate(q);
EasyMock.expectLastCall();
metric.setQueryType(RunningQuery.class.getSimpleName());
metric.setLifecycle(Lifecycle.DEFINED);
System.out.println(metric);
Set<Prediction> predictions = new HashSet<>();
predictions.add(new Prediction("source", 1));
EasyMock.expect(predictor.predict(metric)).andReturn(predictions);
connectionRequestBean.requestBegin(q.getId().toString());
EasyMock.expectLastCall();
EasyMock.expect(connectionFactory.getConnection(eq("connPool1"), anyObject(), anyObject())).andReturn(c).anyTimes();
connectionRequestBean.requestEnd(q.getId().toString());
EasyMock.expectLastCall();
connectionFactory.returnConnection(c);
EasyMock.expectLastCall();
EasyMock.expect(queryLogicFactory.getQueryLogic(queryLogicName, principal)).andReturn(logic);
EasyMock.expect(logic.getRequiredQueryParameters()).andReturn(Collections.emptySet());
EasyMock.expect(logic.getConnectionPriority()).andReturn(AccumuloConnectionFactory.Priority.NORMAL).atLeastOnce();
EasyMock.expect(logic.containsDNWithAccess(dnList)).andReturn(true);
EasyMock.expect(logic.getMaxPageSize()).andReturn(0);
EasyMock.expect(logic.getAuditType(q)).andReturn(AuditType.NONE);
EasyMock.expect(logic.getConnPoolName()).andReturn("connPool1");
EasyMock.expect(logic.getResultLimit(eq(q.getDnList()))).andReturn(-1L).anyTimes();
EasyMock.expect(logic.getMaxResults()).andReturn(-1L).anyTimes();
EasyMock.expect(connectionRequestBean.cancelConnectionRequest(q.getId().toString(), principal)).andReturn(false).anyTimes();
connectionFactory.returnConnection(EasyMock.isA(Connector.class));
final AtomicBoolean initializeLooping = new AtomicBoolean(false);
// During initialize, mark that we get here, and then sleep
final IAnswer<GenericQueryConfiguration> initializeAnswer = () -> {
initializeLooping.set(true);
try {
while (true) {
Thread.sleep(1000);
log.debug("Initialize: woke up");
}
} catch (InterruptedException e) {
throw new QueryException("EXPECTED EXCEPTION: initialize interrupted");
}
};
EasyMock.expect(logic.initialize(anyObject(Connector.class), anyObject(Query.class), anyObject(Set.class))).andAnswer(initializeAnswer);
EasyMock.expect(logic.getCollectQueryMetrics()).andReturn(Boolean.FALSE);
// On close, interrupt the thread to simulate the ScannerFactory cleaning up
final IAnswer<Object> closeAnswer = () -> {
if (null != createQuery) {
log.debug("createQuery thread is not null. interrupting");
createQuery.interrupt();
} else {
log.debug("createQuery thread is null. not interrupting");
}
return null;
};
logic.close();
EasyMock.expectLastCall().andAnswer(closeAnswer).anyTimes();
// Make the QueryLogic mock not threadsafe, otherwise it will be blocked infinitely
// trying to get the lock on the infinite loop
EasyMock.makeThreadSafe(logic, false);
metrics.updateMetric(EasyMock.isA(QueryMetric.class));
PowerMock.replayAll();
try {
createQuery.start();
// Wait for the create call to get to initialize
while (!initializeLooping.get()) {
if (!createQuery.isAlive() && !initializeLooping.get()) {
Assert.fail("createQuery thread died before reaching initialize: " + createQueryException[0]);
}
Thread.sleep(50);
}
// initialize has not completed yet so it will not appear in the cache
Object cachedRunningQuery = cache.get(q.getId().toString());
Assert.assertNull(cachedRunningQuery);
Pair<QueryLogic<?>, Connector> pair = qlCache.poll(q.getId().toString());
Assert.assertNotNull(pair);
Assert.assertEquals(logic, pair.getFirst());
Assert.assertEquals(c, pair.getSecond());
// Have to add these back because poll was destructive
qlCache.add(q.getId().toString(), principal.getShortName(), pair.getFirst(), pair.getSecond());
// Call close
bean.close(q.getId().toString());
// Make sure that it's gone from the qlCache
pair = qlCache.poll(q.getId().toString());
Assert.assertNull("Still found an entry in the qlCache: " + pair, pair);
// Should have already joined by now, but just to be sure
createQuery.join();
} finally {
if (null != createQuery && createQuery.isAlive()) {
createQuery.interrupt();
}
}
}
Aggregations