use of datawave.webservice.query.exception.BadRequestQueryException in project datawave by NationalSecurityAgency.
the class ContentFunctionsDescriptor method getArgumentDescriptor.
@Override
public ContentJexlArgumentDescriptor getArgumentDescriptor(ASTFunctionNode node) {
FunctionJexlNodeVisitor fvis = new FunctionJexlNodeVisitor();
fvis.visit(node, null);
Class<?> functionClass = (Class<?>) ArithmeticJexlEngines.functions().get(fvis.namespace());
if (!CONTENT_FUNCTION_NAMESPACE.equals(node.jjtGetChild(0).image)) {
BadRequestQueryException qe = new BadRequestQueryException(DatawaveErrorCode.JEXLNODEDESCRIPTOR_NAMESPACE_UNEXPECTED, MessageFormat.format("Class: {0}, Namespace: {1}", this.getClass().getSimpleName(), node.jjtGetChild(0).image));
throw new IllegalArgumentException(qe);
}
if (!functionClass.equals(ContentFunctions.class)) {
BadRequestQueryException qe = new BadRequestQueryException(DatawaveErrorCode.JEXLNODEDESCRIPTOR_NODE_FOR_FUNCTION, MessageFormat.format("Class: {0}, Function: {1}", this.getClass().getSimpleName(), functionClass));
throw new IllegalArgumentException(qe);
}
return new ContentJexlArgumentDescriptor(node, fvis.namespace(), fvis.name(), fvis.args());
}
use of datawave.webservice.query.exception.BadRequestQueryException in project datawave-query-metric-service by NationalSecurityAgency.
the class QueryMetricTest method testSetError.
@Test
public void testSetError() {
BadRequestQueryException qe = new BadRequestQueryException(DatawaveErrorCode.FIELDS_NOT_IN_DATA_DICTIONARY, "test");
Exception e = new Exception(qe);
queryMetric.setError(e);
assertEquals("The query contained fields which do not exist in the data dictionary for any specified datatype. test", queryMetric.getErrorMessage());
assertEquals("400-16", queryMetric.getErrorCode());
queryMetric.setErrorCode("");
Throwable t = new Throwable("non-datawave error");
queryMetric.setError(t);
assertEquals("non-datawave error", queryMetric.getErrorMessage());
assertEquals("", queryMetric.getErrorCode());
}
use of datawave.webservice.query.exception.BadRequestQueryException in project datawave by NationalSecurityAgency.
the class EdgeTableRangeBuildingVisitor method visit.
/*
* This is treated as the root node of the tree it can only have one child
*
* The job of this node is to take the results of its child and create the visitation context to be returned
*/
public Object visit(ASTJexlScript node, Object data) {
int numChildren = node.jjtGetNumChildren();
if (numChildren != 1) {
log.error("JexlScript node had an unexpected number of children: " + numChildren);
BadRequestQueryException qe = new BadRequestQueryException(DatawaveErrorCode.NODE_PROCESSING_ERROR);
throw new RuntimeException(qe);
}
@SuppressWarnings("unchecked") List<? extends EdgeContext> context = (List<? extends EdgeContext>) node.jjtGetChild(0).jjtAccept(this, null);
if (context.get(0) instanceof IdentityContext) {
// this can only happen if there is no AND node in the query
// Build singleton list of QueryContexts then create VisitationContext
QueryContext qContext = new QueryContext();
qContext.packageIdentities((List<IdentityContext>) context);
return computeVisitaionContext(Collections.singletonList(qContext));
} else if (context.get(0) instanceof QueryContext) {
return computeVisitaionContext((List<QueryContext>) context);
// return context;
} else {
log.error("JexlScript node recieved unexpected return type: " + context);
BadRequestQueryException qe = new BadRequestQueryException(DatawaveErrorCode.NODE_PROCESSING_ERROR);
throw new RuntimeException(qe);
}
}
use of datawave.webservice.query.exception.BadRequestQueryException in project datawave by NationalSecurityAgency.
the class MapReduceBean method submit.
/**
* Execute a MapReduce job with the given name and runtime parameters
*
* @param jobName
* Name of the map reduce job configuration
* @param parameters
* A semi-colon separated list name:value pairs. These are the required and optional parameters listed in the MapReduceConfiguration objects
* returned in the call to list()
* @return {@code datawave.webservice.result.GenericResponse<String>} job id
* @RequestHeader X-ProxiedEntitiesChain use when proxying request for user by specifying a chain of DNs of the identities to proxy
* @RequestHeader X-ProxiedIssuersChain required when using X-ProxiedEntitiesChain, specify one issuer DN per subject DN listed in X-ProxiedEntitiesChain
* @ResponseHeader X-OperationTimeInMS time spent on the server performing the operation, does not account for network or result serialization
* @HTTP 200 success
* @HTTP 204 if no data was found
* @HTTP 400 if jobName is invalid
* @HTTP 401 if user does not have correct roles
* @HTTP 500 error starting the job
*/
@POST
@Produces({ "application/xml", "text/xml", "application/json", "text/yaml", "text/x-yaml", "application/x-yaml", "application/x-protobuf", "application/x-protostuff" })
@javax.ws.rs.Path("/submit")
@GZIP
public GenericResponse<String> submit(@FormParam("jobName") String jobName, @FormParam("parameters") String parameters) {
GenericResponse<String> response = new GenericResponse<>();
// Find out who/what called this method
Principal p = ctx.getCallerPrincipal();
String sid;
Set<Collection<String>> cbAuths = new HashSet<>();
DatawavePrincipal datawavePrincipal = null;
if (p instanceof DatawavePrincipal) {
datawavePrincipal = (DatawavePrincipal) p;
sid = datawavePrincipal.getShortName();
cbAuths.addAll(datawavePrincipal.getAuthorizations());
} else {
QueryException qe = new QueryException(DatawaveErrorCode.UNEXPECTED_PRINCIPAL_ERROR, MessageFormat.format("Class: {0}", p.getClass().getName()));
response.addException(qe);
throw new DatawaveWebApplicationException(qe, response);
}
// Get the MapReduceJobConfiguration from the configuration
MapReduceJobConfiguration job;
try {
job = this.mapReduceConfiguration.getConfiguration(jobName);
} catch (IllegalArgumentException e) {
BadRequestQueryException qe = new BadRequestQueryException(DatawaveErrorCode.JOB_CONFIGURATION_ERROR, e);
response.addException(qe);
throw new BadRequestException(qe, response);
}
// Ensure that the user has the required roles and has passed the required auths
if (null != job.getRequiredRoles() || null != job.getRequiredAuths()) {
try {
canRunJob(datawavePrincipal, new MultivaluedMapImpl<>(), job.getRequiredRoles(), job.getRequiredAuths());
} catch (UnauthorizedQueryException qe) {
// user does not have all of the required roles or did not pass the required auths
response.addException(qe);
throw new UnauthorizedException(qe, response);
}
}
// Parse the parameters
Map<String, String> runtimeParameters = new HashMap<>();
if (null != parameters) {
String[] param = parameters.split(PARAMETER_SEPARATOR);
for (String yyy : param) {
String[] parts = yyy.split(PARAMETER_NAME_VALUE_SEPARATOR);
if (parts.length == 2) {
runtimeParameters.put(parts[0], parts[1]);
}
}
}
// Check to see if the job configuration class implements specific interfaces.
if (job instanceof NeedCallerDetails) {
((NeedCallerDetails) job).setUserSid(sid);
((NeedCallerDetails) job).setPrincipal(p);
}
if (job instanceof NeedAccumuloConnectionFactory) {
((NeedAccumuloConnectionFactory) job).setAccumuloConnectionFactory(this.connectionFactory);
}
if (job instanceof NeedAccumuloDetails) {
((NeedAccumuloDetails) job).setUsername(this.connectionPoolsConfiguration.getPools().get(this.connectionPoolsConfiguration.getDefaultPool()).getUsername());
((NeedAccumuloDetails) job).setPassword(this.connectionPoolsConfiguration.getPools().get(this.connectionPoolsConfiguration.getDefaultPool()).getPassword());
((NeedAccumuloDetails) job).setInstanceName(this.connectionPoolsConfiguration.getPools().get(this.connectionPoolsConfiguration.getDefaultPool()).getInstance());
((NeedAccumuloDetails) job).setZookeepers(this.connectionPoolsConfiguration.getPools().get(this.connectionPoolsConfiguration.getDefaultPool()).getZookeepers());
}
if (job instanceof NeedQueryLogicFactory) {
((NeedQueryLogicFactory) job).setQueryLogicFactory(this.queryLogicFactory);
}
if (job instanceof NeedQueryPersister) {
((NeedQueryPersister) job).setPersister(this.queryPersister);
}
if (job instanceof NeedQueryCache) {
((NeedQueryCache) job).setQueryCache(cache);
}
if (job instanceof NeedSecurityDomain) {
((NeedSecurityDomain) job).setSecurityDomain(this.jsseSecurityDomain);
}
// If this job is being restarted, then the jobId will be the same. The restart method
// puts the id into the runtime parameters
String id = runtimeParameters.get(JOB_ID);
if (null == id)
id = UUID.randomUUID().toString();
org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
StringBuilder name = new StringBuilder().append(jobName).append("_sid_").append(sid).append("_id_").append(id);
Job j;
try {
j = createJob(conf, name);
job.initializeConfiguration(id, j, runtimeParameters, serverPrincipal);
} catch (WebApplicationException waEx) {
throw waEx;
} catch (Exception e) {
QueryException qe = new QueryException(DatawaveErrorCode.LOGIC_CONFIGURATION_ERROR, e);
log.error(qe.getMessage(), e);
response.addException(qe.getBottomQueryException());
throw new DatawaveWebApplicationException(qe, response);
}
// Enforce that certain InputFormat classes are being used here.
if (this.mapReduceConfiguration.isRestrictInputFormats()) {
// Make sure that the job input format is in the list
Class<? extends InputFormat<?, ?>> ifClass;
try {
ifClass = j.getInputFormatClass();
} catch (ClassNotFoundException e1) {
QueryException qe = new QueryException(DatawaveErrorCode.INPUT_FORMAT_CLASS_ERROR, e1);
log.error(qe);
response.addException(qe);
throw new DatawaveWebApplicationException(qe, response);
}
if (!this.mapReduceConfiguration.getValidInputFormats().contains(ifClass)) {
IllegalArgumentException e = new IllegalArgumentException("Invalid input format class specified. Must use one of " + this.mapReduceConfiguration.getValidInputFormats());
QueryException qe = new QueryException(DatawaveErrorCode.INVALID_FORMAT, e);
log.error(qe);
response.addException(qe.getBottomQueryException());
throw new DatawaveWebApplicationException(qe, response);
}
}
try {
j.submit();
} catch (Exception e) {
QueryException qe = new QueryException(DatawaveErrorCode.MAPREDUCE_JOB_START_ERROR, e);
log.error(qe.getMessage(), qe);
response.addException(qe.getBottomQueryException());
throw new DatawaveWebApplicationException(qe, response);
}
JobID mapReduceJobId = j.getJobID();
log.info("JOB ID: " + mapReduceJobId);
// Create an entry in the state table
boolean restarted = (runtimeParameters.get(JOB_ID) != null);
try {
if (!restarted)
mapReduceState.create(id, job.getHdfsUri(), job.getJobTracker(), job.getJobDir(), mapReduceJobId.toString(), job.getResultsDir(), parameters, jobName);
else
mapReduceState.addJob(id, mapReduceJobId.toString());
} catch (Exception e) {
QueryException qe = new QueryException(DatawaveErrorCode.MAPREDUCE_STATE_PERSISTENCE_ERROR, e);
log.error(qe);
response.addException(qe.getBottomQueryException());
try {
j.killJob();
} catch (IOException ioe) {
QueryException qe2 = new QueryException(DatawaveErrorCode.MAPREDUCE_JOB_KILL_ERROR, ioe);
response.addException(qe2);
}
throw new DatawaveWebApplicationException(qe, response);
}
response.setResult(id);
return response;
}
use of datawave.webservice.query.exception.BadRequestQueryException in project datawave by NationalSecurityAgency.
the class QueryExecutorBean method duplicateQuery.
/**
* Duplicates a query and allows modification of optional properties
*
* @param id
* - the ID of the query to copy (required)
* @param newQueryName
* - name of the new query (@Required)
* @param newQueryLogicName
* - defaults to old logic, name of class that this query should be run with (optional)
* @param newQuery
* - defaults to old query, string used in lookup (optional, auditing required if changed)
* @param newColumnVisibility
* - defaults to old column visibility, for query AND justification (optional, auditing required if changed)
* @param newBeginDate
* - defaults to old begin date, begin range for the query (optional, auditing required if changed)
* @param newEndDate
* - defaults to old end date, end range for the query (optional, auditing required if changed)
* @param newQueryAuthorizations
* - defaults to old authorizations, use in the query (optional, auditing required if changed)
* @param newExpirationDate
* - defaults to old expiration, meaningless if transient (optional)
* @param newPagesize
* - defaults to old pagesize, number of results to return on each call to next() (optional)
* @param newPageTimeout
* - specify timeout (in minutes) for each call to next(), defaults to -1 indicating disabled (optional)
* @param newMaxResultsOverride
* - specify max results (optional)
* @param newPersistenceMode
* - defaults to PERSISTENT, indicates whether or not the query is persistent (optional)
* @param newParameters
* - defaults to old, optional parameters to the query, a semi-colon separated list name=value pairs (optional, auditing required if changed)
* @param trace
* - optional (defaults to {@code false}) indication of whether or not the query should be traced using the distributed tracing mechanism
* @see datawave.webservice.query.runner.QueryExecutorBean#duplicateQuery(String, String, String, String, String, Date, Date, String, Date, Integer,
* Integer, Long, QueryPersistence, String, boolean)
*
* @return {@code datawave.webservice.result.GenericResponse<String>}
* @RequestHeader X-ProxiedEntitiesChain use when proxying request for user, by specifying a chain of DNs of the identities to proxy
* @RequestHeader X-ProxiedIssuersChain required when using X-ProxiedEntitiesChain, specify one issuer DN per subject DN listed in X-ProxiedEntitiesChain
* @ResponseHeader X-OperationTimeInMS time spent on the server performing the operation, does not account for network or result serialization
*
* @HTTP 200 success
* @HTTP 400 if invalid params or missing queryName param
* @HTTP 404 if query not found
* @HTTP 500 internal server error
*/
@POST
@Produces({ "application/xml", "text/xml", "application/json", "text/yaml", "text/x-yaml", "application/x-yaml", "application/x-protobuf", "application/x-protostuff" })
@Path("/{id}/duplicate")
@GZIP
@Interceptors({ ResponseInterceptor.class, RequiredInterceptor.class })
@Override
@Timed(name = "dw.query.duplicateQuery", absolute = true)
public GenericResponse<String> duplicateQuery(@PathParam("id") String id, @Required("queryName") @FormParam("queryName") String newQueryName, @FormParam("logicName") String newQueryLogicName, @FormParam("query") String newQuery, @FormParam("columnVisibility") String newColumnVisibility, @FormParam("begin") @DateFormat(defaultTime = "000000", defaultMillisec = "000") Date newBeginDate, @FormParam("end") @DateFormat(defaultTime = "235959", defaultMillisec = "999") Date newEndDate, @FormParam("auths") String newQueryAuthorizations, @FormParam("expiration") @DateFormat(defaultTime = "235959", defaultMillisec = "999") Date newExpirationDate, @FormParam("pagesize") Integer newPagesize, @FormParam("pageTimeout") Integer newPageTimeout, @FormParam("maxResultsOverride") Long newMaxResultsOverride, @FormParam("persistence") QueryPersistence newPersistenceMode, @FormParam("params") String newParameters, @FormParam("trace") @DefaultValue("false") boolean trace) {
GenericResponse<String> response = new GenericResponse<>();
try {
if (null == newQueryName || newQueryName.length() < 1) {
throw new BadRequestQueryException(DatawaveErrorCode.QUERY_NAME_REQUIRED);
}
RunningQuery templateQuery = getQueryById(id);
Query q = templateQuery.getSettings().duplicate(newQueryName);
// default value
QueryPersistence persistence = QueryPersistence.PERSISTENT;
// TODO: figure out a way to set this to the same as the existing query
if (null != newPersistenceMode) {
persistence = newPersistenceMode;
}
// maybe set variables instead of stuffing in query
if (newQueryLogicName != null) {
q.setQueryLogicName(queryLogicFactory.getQueryLogic(newQueryLogicName, ctx.getCallerPrincipal()).getLogicName());
}
if (newQuery != null) {
q.setQuery(newQuery);
}
if (newBeginDate != null) {
q.setBeginDate(newBeginDate);
}
if (newEndDate != null) {
q.setEndDate(newEndDate);
}
if (newQueryAuthorizations != null) {
q.setQueryAuthorizations(newQueryAuthorizations);
}
if (newExpirationDate != null) {
q.setExpirationDate(newExpirationDate);
}
if (newPagesize != null) {
q.setPagesize(newPagesize);
}
if (newMaxResultsOverride != null) {
q.setMaxResultsOverride(newMaxResultsOverride);
}
if (newPageTimeout != null) {
q.setPageTimeout(newPageTimeout);
}
Set<Parameter> params = new HashSet<>();
if (newParameters != null) {
String[] param = newParameters.split(QueryImpl.PARAMETER_SEPARATOR);
for (String yyy : param) {
String[] parts = yyy.split(QueryImpl.PARAMETER_NAME_VALUE_SEPARATOR);
if (parts.length == 2) {
params.add(new Parameter(parts[0], parts[1]));
}
}
}
MultivaluedMap<String, String> newSettings = new MultivaluedMapImpl<>();
newSettings.putAll(q.toMap());
newSettings.putSingle(QueryParameters.QUERY_PERSISTENCE, persistence.name());
return createQuery(q.getQueryLogicName(), newSettings);
} catch (DatawaveWebApplicationException e) {
throw e;
} catch (Exception e) {
QueryException qe = new QueryException(DatawaveErrorCode.QUERY_DUPLICATION_ERROR, e);
log.error(qe, e);
response.addException(qe.getBottomQueryException());
if (e.getClass() == IllegalArgumentException.class) {
throw new BadRequestException(qe, response);
}
int statusCode = qe.getBottomQueryException().getStatusCode();
throw new DatawaveWebApplicationException(qe, response, statusCode);
}
}
Aggregations