use of datawave.webservice.result.GenericResponse in project datawave by NationalSecurityAgency.
the class LookupUUIDUtil method validateUUIDTerm.
/*
* Validate the specified token as a UUID lookup term, either as a LUCENE-formatted field/value or a UIDQuery field/value. Tokens missing the appropriate
* delimiter are ignored and return with a null UUIDType.
*
* @param uuidTypeValueTerm A token to evaluate as a possible UUID field/value term
*
* @param logicName The existing assigned query logic name, if any
*
* @return A valid UUIDType, or null if the specified token is obviously not a UUID field/value term
*/
private UUIDType validateUUIDTerm(final String possibleUUIDTerm, final String logicName) {
// Declare the return value
final UUIDType matchingUuidType;
// Check for the expected type/value delimiter (i.e., UUIDType:UUID)
if (possibleUUIDTerm.contains(UUID_TERM_DELIMITER)) {
final String[] splitPair = possibleUUIDTerm.split(UUID_TERM_DELIMITER);
final String uuidType = splitPair[0].trim().toUpperCase();
final String uuid;
if (splitPair.length > 1) {
uuid = splitPair[1].trim();
} else {
uuid = null;
}
// Get the matching UUID type
matchingUuidType = this.uuidTypes.get(uuidType.toUpperCase());
// Validate UUID type
if (null == matchingUuidType) {
final String message = "Invalid type '" + uuidType + "' for UUID " + uuid + " not supported with the LuceneToJexlUUIDQueryParser";
final GenericResponse<String> errorReponse = new GenericResponse<>();
errorReponse.addMessage(message);
throw new DatawaveWebApplicationException(new IllegalArgumentException(message), errorReponse);
} else // Validate the UUID value
if ((null == uuid) || uuid.isEmpty()) {
final String message = "Undefined UUID using type '" + uuidType + "' not supported with the LuceneToJexlUUIDQueryParser";
final GenericResponse<String> errorReponse = new GenericResponse<>();
errorReponse.addMessage(message);
throw new DatawaveWebApplicationException(new IllegalArgumentException(message), errorReponse);
} else // Reject conflicting logic name
if ((null != logicName) && !logicName.equals(matchingUuidType.getDefinedView())) {
final String message = "Multiple UUID types '" + logicName + "' and '" + matchingUuidType.getDefinedView() + "' not " + " supported within the same lookup request";
final GenericResponse<String> errorReponse = new GenericResponse<>();
errorReponse.addMessage(message);
throw new DatawaveWebApplicationException(new IllegalArgumentException(message), errorReponse);
}
} else {
matchingUuidType = null;
}
return matchingUuidType;
}
use of datawave.webservice.result.GenericResponse in project datawave by NationalSecurityAgency.
the class LookupUUIDUtil method validateLookupCriteria.
private AbstractUUIDLookupCriteria validateLookupCriteria(final AbstractUUIDLookupCriteria criteria, boolean validateUUIDTerms) {
// Initialize the validated logic name, which is only necessary for UUID lookup and
// OK to be a null value when paging through content results.
String logicName = null;
// Conditionally validate UUID type/value pairs
if (validateUUIDTerms) {
// Get the unvalidated LUCENE query for UUID lookup
final String unvalidatedQuery = criteria.getRawQueryString();
// Initialize the counter for validating against the maximum number of allowed UUIDs
int uuidPairCount = 0;
int eventTypeCountForContentLookup = 0;
// Reformat the query into a tokenizable series of UUID type/value pairs
String tokenizablePairs;
if (null != unvalidatedQuery) {
tokenizablePairs = unvalidatedQuery;
// Replace grouping characters with whitespace
tokenizablePairs = tokenizablePairs.replaceAll(REGEX_GROUPING_CHARS, SPACE);
// Remove most, but not all, non-word characters
tokenizablePairs = tokenizablePairs.replaceAll(REGEX_NONWORD_CHARS, EMPTY_STRING);
// Remove OR operators
tokenizablePairs = tokenizablePairs.replaceAll(REGEX_OR_OPERATOR, SPACE);
} else {
tokenizablePairs = EMPTY_STRING;
}
// Validate each UUID type and value
final String[] uuidTypeValuePairs = tokenizablePairs.split(REGEX_WHITESPACE_CHARS);
for (final String potentialUUIDTerm : uuidTypeValuePairs) {
// Validate the "potential" UUID term. It's potential because it could be an OR operator
// or some other query syntax that would be validated with more scrutiny once the query
// executor is invoked.
final UUIDType uuidType = this.validateUUIDTerm(potentialUUIDTerm.trim(), logicName);
if (null != uuidType) {
// Assign the query logic name if undefined
if (null == logicName) {
logicName = uuidType.getDefinedView();
}
// Increment the UUID type/value count
uuidPairCount++;
// Increment the counter for specialized "event" UUID types in the case of content lookups
if (criteria.isContentLookup() && EVENT_TYPE_NAME.equals(uuidType.getFieldName())) {
eventTypeCountForContentLookup++;
}
}
}
// Validate at least one UUID was specified in the query string
if (null == logicName) {
final String message = "Undefined UUID types not supported with the LuceneToJexlUUIDQueryParser";
final GenericResponse<String> errorReponse = new GenericResponse<>();
errorReponse.addMessage(message);
throw new DatawaveWebApplicationException(new IllegalArgumentException(message), errorReponse);
}
// Validate the number of specified UUIDs did not exceed the upper limit, if any
if ((this.maxAllowedBatchLookupUUIDs > 0) && (uuidPairCount > this.maxAllowedBatchLookupUUIDs)) {
final String message = "The " + uuidPairCount + " specified UUIDs exceed the maximum number of " + this.maxAllowedBatchLookupUUIDs + " allowed for a given lookup request";
final GenericResponse<String> errorReponse = new GenericResponse<>();
errorReponse.addMessage(message);
throw new DatawaveWebApplicationException(new IllegalArgumentException(message), errorReponse);
}
// Set the flag if we know we're dealing with an all-event UID lookup that has not exceeded the max page size
if ((eventTypeCountForContentLookup > 0) && (uuidPairCount == eventTypeCountForContentLookup) && (uuidPairCount <= Integer.parseInt(criteria.getQueryParameters().getFirst(QueryParameters.QUERY_PAGESIZE)))) {
criteria.setAllEventLookup(true);
}
}
// Set the query logic
criteria.getQueryParameters().put(QueryParameters.QUERY_LOGIC_NAME, Collections.singletonList(logicName));
List<String> paramList = criteria.getQueryParameters().remove(QueryParameters.QUERY_PARAMS);
String params = null;
if (paramList != null && !paramList.isEmpty()) {
params = paramList.get(0);
}
// Add Lucene syntax to the parameters, except during a call for next content
if (!(criteria instanceof NextContentCriteria)) {
params = params + PARAM_LUCENE_QUERY_SYNTAX;
}
// Conditionally add content.lookup syntax to parameters to indicate content lookup during "next" calls
if (criteria.isContentLookup() && !criteria.isAllEventLookup()) {
params = params + ';' + PARAM_CONTENT_LOOKUP + ':' + true;
}
criteria.getQueryParameters().putSingle(QueryParameters.QUERY_PARAMS, params);
// All is well, so return the validated criteria
return criteria;
}
use of datawave.webservice.result.GenericResponse in project datawave by NationalSecurityAgency.
the class MapReduceBean method restart.
/**
* Kill any job running associated with the BulkResults id and start a new job.
*
* @param jobId
* @return {@code datawave.webservice.result.GenericResponse<String>}
* @RequestHeader X-ProxiedEntitiesChain use when proxying request for user by specifying a chain of DNs of the identities to proxy
* @RequestHeader X-ProxiedIssuersChain required when using X-ProxiedEntitiesChain, specify one issuer DN per subject DN listed in X-ProxiedEntitiesChain
* @ResponseHeader X-OperationTimeInMS time spent on the server performing the operation, does not account for network or result serialization
* @HTTP 200 success
* @HTTP 404 if jobId is invalid or cannot be found
* @HTTP 500 error restarting the job
*/
@PUT
@Produces({ "application/xml", "text/xml", "application/json", "text/yaml", "text/x-yaml", "application/x-yaml", "application/x-protobuf", "application/x-protostuff" })
@javax.ws.rs.Path("/{jobId}/restart")
@GZIP
public GenericResponse<String> restart(@PathParam("jobId") String jobId) {
GenericResponse<String> response = new GenericResponse<>();
// Find all potential running jobs
MapReduceInfoResponseList list = mapReduceState.findById(jobId);
// Should contain zero or one job
if (list.getResults().isEmpty()) {
NotFoundQueryException qe = new NotFoundQueryException(DatawaveErrorCode.NO_MAPREDUCE_OBJECT_MATCH);
response.addException(qe);
throw new NotFoundException(qe, response);
} else if (list.getResults().size() > 1) {
NotFoundQueryException qe = new NotFoundQueryException(DatawaveErrorCode.TOO_MANY_MAPREDUCE_OBJECT_MATCHES);
response.addException(qe);
throw new NotFoundException(qe, response);
} else {
MapReduceInfoResponse thisJob = list.getResults().get(0);
// Call cancel for this job. This will kill any running jobs and remove the results directory
cancel(jobId);
// Now re-submit this job after adding the JOB_ID to the runtime parameters to signal that this job has been restarted
String jobName = thisJob.getJobName();
// Now call submit
return submit(jobName, thisJob.getRuntimeParameters() + PARAMETER_SEPARATOR + JOB_ID + PARAMETER_NAME_VALUE_SEPARATOR + jobId);
}
}
use of datawave.webservice.result.GenericResponse in project datawave by NationalSecurityAgency.
the class MapReduceBean method ooziesubmit.
/**
* Execute a Oozie workflow with the given workFlow name and runtime parameters
*
* @param queryParameters
* @return
*/
@POST
@Produces({ "application/xml", "text/xml", "application/json", "text/yaml", "text/x-yaml", "application/x-yaml", "application/x-protobuf", "application/x-protostuff" })
@javax.ws.rs.Path("/ooziesubmit")
@GZIP
public GenericResponse<String> ooziesubmit(MultivaluedMap<String, String> queryParameters) {
GenericResponse<String> response = new GenericResponse<>();
String workFlow = queryParameters.getFirst(OozieJobConstants.WORKFLOW_PARAM);
if (StringUtils.isBlank(workFlow)) {
throw new BadRequestException(new IllegalArgumentException(OozieJobConstants.WORKFLOW_PARAM + " parameter missing"), response);
}
String parameters = queryParameters.getFirst(OozieJobConstants.PARAMETERS);
// Find out who/what called this method
Principal p = ctx.getCallerPrincipal();
String sid = null;
String userDn = p.getName();
DatawavePrincipal datawavePrincipal = null;
if (p instanceof DatawavePrincipal) {
datawavePrincipal = (DatawavePrincipal) p;
sid = datawavePrincipal.getShortName();
} else {
QueryException qe = new QueryException(DatawaveErrorCode.UNEXPECTED_PRINCIPAL_ERROR, MessageFormat.format("Class: {0}", p.getClass().getName()));
response.addException(qe);
throw new DatawaveWebApplicationException(qe, response);
}
OozieJobConfiguration job;
try {
MapReduceJobConfiguration mrConfig = this.mapReduceConfiguration.getConfiguration(workFlow);
if (mrConfig instanceof OozieJobConfiguration) {
job = (OozieJobConfiguration) mrConfig;
} else {
throw new IllegalArgumentException(workFlow + " not an Oozie job configuration");
}
} catch (IllegalArgumentException e) {
BadRequestQueryException qe = new BadRequestQueryException(DatawaveErrorCode.JOB_CONFIGURATION_ERROR, e);
response.addException(qe);
throw new BadRequestException(qe, response);
}
if (job instanceof NeedCallerDetails) {
((NeedCallerDetails) job).setUserSid(sid);
((NeedCallerDetails) job).setPrincipal(p);
}
// Ensure that the user has the required roles and has passed the required auths
if (null != job.getRequiredRoles() || null != job.getRequiredAuths()) {
try {
canRunJob(datawavePrincipal, queryParameters, job.getRequiredRoles(), job.getRequiredAuths());
} catch (UnauthorizedQueryException qe) {
// user does not have all of the required roles or did not pass the required auths
response.addException(qe);
throw new UnauthorizedException(qe, response);
}
}
String id = sid + "_" + UUID.randomUUID();
OozieClient oozieClient = null;
Properties oozieConf = null;
try {
oozieClient = new OozieClient((String) job.getJobConfigurationProperties().get(OozieJobConstants.OOZIE_CLIENT_PROP));
oozieConf = oozieClient.createConfiguration();
job.initializeOozieConfiguration(id, oozieConf, queryParameters);
job.validateWorkflowParameter(oozieConf, mapReduceConfiguration);
} catch (QueryException qe) {
log.error(qe.getMessage(), qe);
response.addException(qe);
throw new DatawaveWebApplicationException(qe, response);
} catch (Exception e) {
log.error(e.getMessage(), e);
response.addException(new QueryException(e.getMessage(), e));
throw new DatawaveWebApplicationException(e, response);
} finally {
// audit query here
Auditor.AuditType auditType = job.getAuditType();
log.trace("Audit type is: " + auditType.name());
if (!auditType.equals(Auditor.AuditType.NONE)) {
try {
marking.validate(queryParameters);
PrivateAuditConstants.stripPrivateParameters(queryParameters);
queryParameters.putSingle(PrivateAuditConstants.USER_DN, userDn);
queryParameters.putSingle(PrivateAuditConstants.COLUMN_VISIBILITY, marking.toColumnVisibilityString());
queryParameters.putSingle(PrivateAuditConstants.AUDIT_TYPE, auditType.name());
List<String> selectors = job.getSelectors(queryParameters, oozieConf);
if (selectors != null && !selectors.isEmpty()) {
queryParameters.put(PrivateAuditConstants.SELECTORS, selectors);
}
// if the user didn't set an audit id, use the query id
if (!queryParameters.containsKey(AuditParameters.AUDIT_ID)) {
queryParameters.putSingle(AuditParameters.AUDIT_ID, id);
}
auditor.audit(queryParameters);
} catch (IllegalArgumentException e) {
log.error("Error validating audit parameters", e);
BadRequestQueryException qe = new BadRequestQueryException(DatawaveErrorCode.MISSING_REQUIRED_PARAMETER, e);
response.addException(qe);
throw new BadRequestException(qe, response);
} catch (Exception e) {
log.error("Error auditing query", e);
response.addMessage("Error auditing query - " + e.getMessage());
throw new BadRequestException(e, response);
}
}
}
// Submit the Oozie workflow.
try {
String jobID = null;
try {
jobID = oozieClient.run(oozieConf);
} catch (Exception e) {
throw new QueryException(DatawaveErrorCode.OOZIE_JOB_START_ERROR, e);
}
try {
String jobResultstDir = oozieConf.getProperty(OozieJobConstants.OUT_DIR_PROP) + "/" + id;
response.setResult(id);
Path baseDir = new Path(this.mapReduceConfiguration.getMapReduceBaseDirectory());
// Create a directory path for this job
Path jobDir = new Path(baseDir, id);
mapReduceState.create(id, job.getHdfsUri(), job.getJobTracker(), jobDir.toString(), jobID, jobResultstDir, parameters, workFlow);
} catch (Exception e) {
QueryException qe = new QueryException(DatawaveErrorCode.MAPREDUCE_STATE_PERSISTENCE_ERROR, e);
response.addException(qe.getBottomQueryException());
try {
oozieClient.kill(jobID);
// if we successfully kill the job, throw the original exception
throw qe;
} catch (Exception e2) {
// throw the exception from killing the job
throw new QueryException(DatawaveErrorCode.MAPREDUCE_JOB_KILL_ERROR, e2);
}
}
} catch (QueryException qe) {
log.error(qe.getMessage(), qe);
response.addException(qe);
throw new DatawaveWebApplicationException(qe, response);
} catch (Exception e) {
log.error(e.getMessage(), e);
QueryException qe = new QueryException(DatawaveErrorCode.UNKNOWN_SERVER_ERROR, e.getMessage());
response.addException(qe);
throw new DatawaveWebApplicationException(qe, response);
}
return response;
}
use of datawave.webservice.result.GenericResponse in project datawave by NationalSecurityAgency.
the class HealthBean method waitForQueryCompletion.
/**
* This method initiates a shutdown. This will set internal state such that the {@link #health()} method will return a {@link Status#SERVICE_UNAVAILABLE}
* error to prevent new queries from coming in. After that, it will wait up to {@code timeoutMinutes} for queries to complete before initiating a shutdown
* of the web server.
*
* <strong>NOTE:</strong> This method is restricted to users calling from the local host, or via the JMX interface.
*
* TODO: Is there a way to "cancel" any requests that are waiting for an Accumulo connection? It would be nice if such requests could be rejected somehow
* and the load balancer could automatically redirect to a different web server.
*
* @param timeoutMinutes
* the number of minutes to wait for queries to complete before continuing with the shutdown operation anyway
* @return a message indicating whether or not shutdown was attempted
*/
@GET
@Path("/shutdown")
@JmxManaged
public Response waitForQueryCompletion(@QueryParam("timeoutMinutes") @DefaultValue("75") int timeoutMinutes, @Context HttpServletRequest request) {
GenericResponse<String> response = new GenericResponse<>();
// We're only allowed to call shutdown from the loopback interface.
if (!"127.0.0.1".equals(request.getRemoteAddr())) {
LOG.error("Shutdown requested from {}. Denying access since the request was not from localhost.", request.getRemoteAddr());
response.setResult("Shutdown calls must be made on the local host.");
return Response.status(Status.FORBIDDEN).entity(response).build();
}
LOG.warn("Shutdown requested from {}. Waiting up to {} minutes for queries to complete.", request.getRemoteAddr(), timeoutMinutes);
// Wait for queries to complete
long timeoutMillis = TimeUnit.MINUTES.toMillis(timeoutMinutes);
shutdownInProgress = true;
status = "drain";
long startTime = System.currentTimeMillis();
int connectionUsage;
while ((connectionUsage = accumuloConnectionFactoryBean.getConnectionUsagePercent()) > 0) {
long delta = System.currentTimeMillis() - startTime;
if (delta > timeoutMillis) {
LOG.warn("Timeout of {} minutes exceeded while waiting for queries to complete. Shutting down anyway.", timeoutMinutes);
break;
}
LOG.info("Connection usage is {}%. Waiting for queries to complete.", connectionUsage);
try {
Thread.sleep(queryCompletionWaitIntervalMillis);
} catch (InterruptedException e) {
LOG.warn("Interrupted while waiting for queries to complete.");
}
}
if (connectionUsage <= 0) {
response.setResult("All queries completed. Shutting down.");
} else {
response.setResult("Gave up waiting for queries to complete. Shutting down with pool usage percentage of " + connectionUsage + ".");
}
// Initiate a server shutdown using the management JMX bean
try {
MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer();
ObjectName objectName = new ObjectName("jboss.as:management-root=server");
mBeanServer.invoke(objectName, "shutdown", new Object[] { false, 0, 0 }, new String[] { boolean.class.getName(), int.class.getName(), int.class.getName() });
} catch (MalformedObjectNameException | ReflectionException | InstanceNotFoundException | MBeanException e) {
LOG.warn("Error shutting down: {}", e);
}
return Response.ok().entity(response).build();
}
Aggregations