use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.
the class JexlStringBuildingVisitor method visit.
@Override
public Object visit(ASTMethodNode node, Object data) {
StringBuilder sb = (StringBuilder) data;
StringBuilder methodStringBuilder = new StringBuilder();
StringBuilder argumentStringBuilder = new StringBuilder();
int kidCount = node.jjtGetNumChildren();
for (int i = 0; i < kidCount; i++) {
if (i == 0) {
JexlNode methodNode = node.jjtGetChild(i);
methodStringBuilder.append(".");
if (allowedMethods.contains(methodNode.image) == false) {
QueryException qe = new QueryException(DatawaveErrorCode.METHOD_COMPOSITION_ERROR, MessageFormat.format("{0}", methodNode.image));
throw new DatawaveFatalQueryException(qe);
}
methodStringBuilder.append(methodNode.image);
// parens are open. don't forget to close
methodStringBuilder.append("(");
} else {
// adding any method arguments
JexlNode argumentNode = node.jjtGetChild(i);
if (argumentNode instanceof ASTReference) {
// a method may have an argument that is another method. In this case, descend the visit tree for it
if (JexlASTHelper.HasMethodVisitor.hasMethod(argumentNode)) {
this.visit((ASTReference) argumentNode, argumentStringBuilder);
} else {
for (int j = 0; j < argumentNode.jjtGetNumChildren(); j++) {
JexlNode argKid = argumentNode.jjtGetChild(j);
if (argKid instanceof ASTFunctionNode) {
this.visit((ASTFunctionNode) argKid, argumentStringBuilder);
} else {
if (argumentStringBuilder.length() > 0) {
argumentStringBuilder.append(",");
}
if (argKid instanceof ASTStringLiteral) {
argumentStringBuilder.append("'");
}
argumentStringBuilder.append(argKid.image);
if (argKid instanceof ASTStringLiteral) {
argumentStringBuilder.append("'");
}
}
}
}
} else if (argumentNode instanceof ASTNumberLiteral) {
if (argumentStringBuilder.length() > 0) {
argumentStringBuilder.append(",");
}
argumentStringBuilder.append(argumentNode.image);
}
}
}
methodStringBuilder.append(argumentStringBuilder);
// close parens in method
methodStringBuilder.append(")");
sb.append(methodStringBuilder);
return sb;
}
use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.
the class MapReduceStatusUpdateBean method updateState.
/**
* This method is meant to be a callback from the Hadoop infrastructure and is not protected. When a BulkResults job is submitted the
* "job.end.notification.url" property is set to public URL endpoint for this servlet. The Hadoop infrastructure will call back to this servlet. If the call
* back fails for some reason, then Hadoop will retry for the number of configured attempts (job.end.retry.attempts) at some configured interval
* (job.end.retry.interval)
*
* @param jobId
* @param jobStatus
*
* @HTTP 200 success
* @HTTP 500 failure
*
* @return datawave.webservice.result.VoidResponse
* @ResponseHeader X-OperationTimeInMS time spent on the server performing the operation, does not account for network or result serialization
*/
@GET
@Produces({ "application/xml", "text/xml", "application/json", "text/yaml", "text/x-yaml", "application/x-yaml", "application/x-protobuf", "application/x-protostuff" })
@GZIP
@Path("/updateState")
@PermitAll
@Interceptors({ ResponseInterceptor.class, RequiredInterceptor.class })
public VoidResponse updateState(@Required("jobId") @QueryParam("jobId") String jobId, @Required("jobStatus") @QueryParam("jobStatus") String jobStatus) {
log.info("Received MapReduce status update for job: " + jobId + ", new status: " + jobStatus);
VoidResponse response = new VoidResponse();
try {
mapReduceState.updateState(jobId, MapReduceState.valueOf(jobStatus));
return response;
} catch (Exception e) {
QueryException qe = new QueryException(DatawaveErrorCode.MAPRED_UPDATE_STATUS_ERROR, e);
log.error(qe);
response.addException(qe.getBottomQueryException());
throw new DatawaveWebApplicationException(qe, response);
}
}
use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.
the class MapReduceBean method getResultFiles.
/**
* Returns the a tar file where each tar entry is a result file.
*
* @param jobId
* @return tar file
* @RequestHeader X-ProxiedEntitiesChain use when proxying request for user by specifying a chain of DNs of the identities to proxy
* @RequestHeader X-ProxiedIssuersChain required when using X-ProxiedEntitiesChain, specify one issuer DN per subject DN listed in X-ProxiedEntitiesChain
* @ResponseHeader X-OperationTimeInMS time spent on the server performing the operation, does not account for network or result serialization
* @HTTP 200 success
* @HTTP 404 if jobId is invalid or cannot be found
* @HTTP 500
*/
@GET
@Produces("*/*")
@javax.ws.rs.Path("/{jobId}/getAllFiles")
@GZIP
public StreamingOutput getResultFiles(@Required("jobId") @PathParam("jobId") final String jobId, @Context HttpServletResponse httpResponse) {
MapReduceInfoResponseList response = list(jobId);
MapReduceInfoResponse result = response.getResults().get(0);
String hdfs = result.getHdfs();
String resultsDir = result.getResultsDirectory();
final FileSystem fs = getFS(hdfs, response);
final Path jobDirectory = new Path(resultsDir);
final int jobDirectoryPathLength = jobDirectory.toUri().getPath().length();
try {
if (!fs.exists(jobDirectory) || !fs.getFileStatus(jobDirectory).isDirectory()) {
NotFoundQueryException qe = new NotFoundQueryException(DatawaveErrorCode.JOB_DIRECTORY_NOT_FOUND, MessageFormat.format("{0} at path {1}", jobId, jobDirectory));
response.addException(qe);
throw new NotFoundException(qe, response);
}
} catch (IOException e1) {
NotFoundQueryException qe = new NotFoundQueryException(DatawaveErrorCode.RESULT_DIRECTORY_ACCESS_ERROR, e1, MessageFormat.format("{0}", resultsDir));
log.error(qe);
response.addException(qe);
throw new NotFoundException(qe, response);
}
// Get the children
List<FileStatus> resultFiles = new ArrayList<>();
try {
// recurse through the directory to find all files
Queue<FileStatus> fileQueue = new LinkedList<>();
fileQueue.add(fs.getFileStatus(jobDirectory));
while (!fileQueue.isEmpty()) {
FileStatus currentFileStatus = fileQueue.remove();
if (currentFileStatus.isFile()) {
resultFiles.add(currentFileStatus);
} else {
FileStatus[] dirList = fs.listStatus(currentFileStatus.getPath());
Collections.addAll(fileQueue, dirList);
}
}
} catch (IOException e) {
QueryException qe = new QueryException(DatawaveErrorCode.DFS_DIRECTORY_LISTING_ERROR, e, MessageFormat.format("directory: {0}", resultsDir));
log.error(qe);
response.addException(qe);
throw new DatawaveWebApplicationException(qe, response);
}
String filename = jobId + "-files.tar";
httpResponse.addHeader("Content-Disposition", "attachment; filename=\"" + filename + "\"");
// Make final references for use in anonymous class
final List<FileStatus> paths = resultFiles;
return output -> {
TarArchiveOutputStream tos = new TarArchiveOutputStream(output);
tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX);
try {
for (FileStatus fileStatus : paths) {
if (fileStatus.isDirectory())
continue;
// The archive entry will be started when the first (and possibly only) chunk is
// written out. It is done this way because we need to know the size of the file
// for the archive entry, and don't want to scan twice to get that info (once
// here and again in streamFile).
String fileName = fileStatus.getPath().toUri().getPath().substring(jobDirectoryPathLength + 1);
TarArchiveEntry entry = new TarArchiveEntry(jobId + "/" + fileName, false);
entry.setSize(fileStatus.getLen());
tos.putArchiveEntry(entry);
FSDataInputStream fis = fs.open(fileStatus.getPath());
byte[] buf = new byte[BUFFER_SIZE];
int read;
try {
read = fis.read(buf);
while (read != -1) {
tos.write(buf, 0, read);
read = fis.read(buf);
}
} catch (Exception e) {
log.error("Error writing result file to output", e);
throw new WebApplicationException(e);
} finally {
try {
if (null != fis)
fis.close();
} catch (IOException e) {
log.error("Error closing FSDataInputStream for file: " + fileStatus.getPath().getName(), e);
}
}
tos.closeArchiveEntry();
}
tos.finish();
} catch (Exception e) {
log.error(e.getMessage(), e);
} finally {
try {
if (null != tos)
tos.close();
} catch (IOException ioe) {
log.error("Error closing TarArchiveOutputStream", ioe);
}
try {
if (null != fs)
fs.close();
} catch (IOException ioe) {
log.error("Error closing HDFS client", ioe);
}
}
};
}
use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.
the class MapReduceBean method submit.
/**
* Execute a MapReduce job with the given name and runtime parameters
*
* @param jobName
* Name of the map reduce job configuration
* @param parameters
* A semi-colon separated list name:value pairs. These are the required and optional parameters listed in the MapReduceConfiguration objects
* returned in the call to list()
* @return {@code datawave.webservice.result.GenericResponse<String>} job id
* @RequestHeader X-ProxiedEntitiesChain use when proxying request for user by specifying a chain of DNs of the identities to proxy
* @RequestHeader X-ProxiedIssuersChain required when using X-ProxiedEntitiesChain, specify one issuer DN per subject DN listed in X-ProxiedEntitiesChain
* @ResponseHeader X-OperationTimeInMS time spent on the server performing the operation, does not account for network or result serialization
* @HTTP 200 success
* @HTTP 204 if no data was found
* @HTTP 400 if jobName is invalid
* @HTTP 401 if user does not have correct roles
* @HTTP 500 error starting the job
*/
@POST
@Produces({ "application/xml", "text/xml", "application/json", "text/yaml", "text/x-yaml", "application/x-yaml", "application/x-protobuf", "application/x-protostuff" })
@javax.ws.rs.Path("/submit")
@GZIP
public GenericResponse<String> submit(@FormParam("jobName") String jobName, @FormParam("parameters") String parameters) {
GenericResponse<String> response = new GenericResponse<>();
// Find out who/what called this method
Principal p = ctx.getCallerPrincipal();
String sid;
Set<Collection<String>> cbAuths = new HashSet<>();
DatawavePrincipal datawavePrincipal = null;
if (p instanceof DatawavePrincipal) {
datawavePrincipal = (DatawavePrincipal) p;
sid = datawavePrincipal.getShortName();
cbAuths.addAll(datawavePrincipal.getAuthorizations());
} else {
QueryException qe = new QueryException(DatawaveErrorCode.UNEXPECTED_PRINCIPAL_ERROR, MessageFormat.format("Class: {0}", p.getClass().getName()));
response.addException(qe);
throw new DatawaveWebApplicationException(qe, response);
}
// Get the MapReduceJobConfiguration from the configuration
MapReduceJobConfiguration job;
try {
job = this.mapReduceConfiguration.getConfiguration(jobName);
} catch (IllegalArgumentException e) {
BadRequestQueryException qe = new BadRequestQueryException(DatawaveErrorCode.JOB_CONFIGURATION_ERROR, e);
response.addException(qe);
throw new BadRequestException(qe, response);
}
// Ensure that the user has the required roles and has passed the required auths
if (null != job.getRequiredRoles() || null != job.getRequiredAuths()) {
try {
canRunJob(datawavePrincipal, new MultivaluedMapImpl<>(), job.getRequiredRoles(), job.getRequiredAuths());
} catch (UnauthorizedQueryException qe) {
// user does not have all of the required roles or did not pass the required auths
response.addException(qe);
throw new UnauthorizedException(qe, response);
}
}
// Parse the parameters
Map<String, String> runtimeParameters = new HashMap<>();
if (null != parameters) {
String[] param = parameters.split(PARAMETER_SEPARATOR);
for (String yyy : param) {
String[] parts = yyy.split(PARAMETER_NAME_VALUE_SEPARATOR);
if (parts.length == 2) {
runtimeParameters.put(parts[0], parts[1]);
}
}
}
// Check to see if the job configuration class implements specific interfaces.
if (job instanceof NeedCallerDetails) {
((NeedCallerDetails) job).setUserSid(sid);
((NeedCallerDetails) job).setPrincipal(p);
}
if (job instanceof NeedAccumuloConnectionFactory) {
((NeedAccumuloConnectionFactory) job).setAccumuloConnectionFactory(this.connectionFactory);
}
if (job instanceof NeedAccumuloDetails) {
((NeedAccumuloDetails) job).setUsername(this.connectionPoolsConfiguration.getPools().get(this.connectionPoolsConfiguration.getDefaultPool()).getUsername());
((NeedAccumuloDetails) job).setPassword(this.connectionPoolsConfiguration.getPools().get(this.connectionPoolsConfiguration.getDefaultPool()).getPassword());
((NeedAccumuloDetails) job).setInstanceName(this.connectionPoolsConfiguration.getPools().get(this.connectionPoolsConfiguration.getDefaultPool()).getInstance());
((NeedAccumuloDetails) job).setZookeepers(this.connectionPoolsConfiguration.getPools().get(this.connectionPoolsConfiguration.getDefaultPool()).getZookeepers());
}
if (job instanceof NeedQueryLogicFactory) {
((NeedQueryLogicFactory) job).setQueryLogicFactory(this.queryLogicFactory);
}
if (job instanceof NeedQueryPersister) {
((NeedQueryPersister) job).setPersister(this.queryPersister);
}
if (job instanceof NeedQueryCache) {
((NeedQueryCache) job).setQueryCache(cache);
}
if (job instanceof NeedSecurityDomain) {
((NeedSecurityDomain) job).setSecurityDomain(this.jsseSecurityDomain);
}
// If this job is being restarted, then the jobId will be the same. The restart method
// puts the id into the runtime parameters
String id = runtimeParameters.get(JOB_ID);
if (null == id)
id = UUID.randomUUID().toString();
org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration();
StringBuilder name = new StringBuilder().append(jobName).append("_sid_").append(sid).append("_id_").append(id);
Job j;
try {
j = createJob(conf, name);
job.initializeConfiguration(id, j, runtimeParameters, serverPrincipal);
} catch (WebApplicationException waEx) {
throw waEx;
} catch (Exception e) {
QueryException qe = new QueryException(DatawaveErrorCode.LOGIC_CONFIGURATION_ERROR, e);
log.error(qe.getMessage(), e);
response.addException(qe.getBottomQueryException());
throw new DatawaveWebApplicationException(qe, response);
}
// Enforce that certain InputFormat classes are being used here.
if (this.mapReduceConfiguration.isRestrictInputFormats()) {
// Make sure that the job input format is in the list
Class<? extends InputFormat<?, ?>> ifClass;
try {
ifClass = j.getInputFormatClass();
} catch (ClassNotFoundException e1) {
QueryException qe = new QueryException(DatawaveErrorCode.INPUT_FORMAT_CLASS_ERROR, e1);
log.error(qe);
response.addException(qe);
throw new DatawaveWebApplicationException(qe, response);
}
if (!this.mapReduceConfiguration.getValidInputFormats().contains(ifClass)) {
IllegalArgumentException e = new IllegalArgumentException("Invalid input format class specified. Must use one of " + this.mapReduceConfiguration.getValidInputFormats());
QueryException qe = new QueryException(DatawaveErrorCode.INVALID_FORMAT, e);
log.error(qe);
response.addException(qe.getBottomQueryException());
throw new DatawaveWebApplicationException(qe, response);
}
}
try {
j.submit();
} catch (Exception e) {
QueryException qe = new QueryException(DatawaveErrorCode.MAPREDUCE_JOB_START_ERROR, e);
log.error(qe.getMessage(), qe);
response.addException(qe.getBottomQueryException());
throw new DatawaveWebApplicationException(qe, response);
}
JobID mapReduceJobId = j.getJobID();
log.info("JOB ID: " + mapReduceJobId);
// Create an entry in the state table
boolean restarted = (runtimeParameters.get(JOB_ID) != null);
try {
if (!restarted)
mapReduceState.create(id, job.getHdfsUri(), job.getJobTracker(), job.getJobDir(), mapReduceJobId.toString(), job.getResultsDir(), parameters, jobName);
else
mapReduceState.addJob(id, mapReduceJobId.toString());
} catch (Exception e) {
QueryException qe = new QueryException(DatawaveErrorCode.MAPREDUCE_STATE_PERSISTENCE_ERROR, e);
log.error(qe);
response.addException(qe.getBottomQueryException());
try {
j.killJob();
} catch (IOException ioe) {
QueryException qe2 = new QueryException(DatawaveErrorCode.MAPREDUCE_JOB_KILL_ERROR, ioe);
response.addException(qe2);
}
throw new DatawaveWebApplicationException(qe, response);
}
response.setResult(id);
return response;
}
use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.
the class MapReduceBean method remove.
/**
* Removes the MapReduce entry and associated data
*
* @param jobId
* @return datawave.webservice.result.VoidResponse
* @RequestHeader X-ProxiedEntitiesChain use when proxying request for user by specifying a chain of DNs of the identities to proxy
* @RequestHeader X-ProxiedIssuersChain required when using X-ProxiedEntitiesChain, specify one issuer DN per subject DN listed in X-ProxiedEntitiesChain
* @ResponseHeader X-OperationTimeInMS time spent on the server performing the operation, does not account for network or result serialization
* @HTTP 200 success
* @HTTP 404 if jobId is invalid or cannot be found
* @HTTP 500 error removing the job
*/
@DELETE
@Produces({ "application/xml", "text/xml", "application/json", "text/yaml", "text/x-yaml", "application/x-yaml", "application/x-protobuf", "application/x-protostuff" })
@javax.ws.rs.Path("/{jobId}/remove")
@GZIP
public VoidResponse remove(@PathParam("jobId") String jobId) {
VoidResponse response = new VoidResponse();
// Call cancel which will kill any running jobs and remove the results directory in HDFS.
cancel(jobId);
// Remove the working directory from HDFS
MapReduceInfoResponseList list = list(jobId);
MapReduceInfoResponse result = list.getResults().get(0);
String hdfs = result.getHdfs();
String wdir = result.getWorkingDirectory();
Path p = new Path(wdir);
try {
FileSystem fs = getFS(hdfs, response);
if (fs.exists(p) && !fs.delete(p, true)) {
NotFoundQueryException qe = new NotFoundQueryException(DatawaveErrorCode.WORKING_DIRECTORY_DELETE_ERROR, MessageFormat.format("{0}", wdir));
log.error(qe);
response.addException(qe);
throw new NotFoundException(qe, response);
}
} catch (IOException e) {
NotFoundQueryException qe = new NotFoundQueryException(DatawaveErrorCode.WORKING_DIRECTORY_DELETE_ERROR, e, MessageFormat.format("{0}", wdir));
log.error(qe);
throw new NotFoundException(qe, response);
}
// Remove any persisted state information
try {
mapReduceState.remove(jobId);
} catch (QueryException e) {
log.error("Error removing job state information", e);
response.addException(e.getBottomQueryException());
throw new DatawaveWebApplicationException(e, response);
}
return response;
}
Aggregations