Search in sources :

Example 6 with MapReduceInfoResponseList

use of datawave.webservice.results.mr.MapReduceInfoResponseList in project datawave by NationalSecurityAgency.

the class MapReduceStatePersisterTest method testFind.

@Test
public void testFind() throws Exception {
    // create some entries
    testPersistentCreate();
    PowerMock.resetAll();
    id = UUID.randomUUID().toString();
    testPersistentCreate();
    PowerMock.resetAll();
    id = UUID.randomUUID().toString();
    testPersistentCreate();
    PowerMock.resetAll();
    EasyMock.expect(ctx.getCallerPrincipal()).andReturn(principal);
    HashMap<String, String> trackingMap = new HashMap<>();
    expect(connectionFactory.getTrackingMap(EasyMock.anyObject())).andReturn(trackingMap);
    expect(connectionFactory.getConnection(EasyMock.eq(AccumuloConnectionFactory.Priority.ADMIN), EasyMock.eq(trackingMap))).andReturn(connection);
    connectionFactory.returnConnection(connection);
    replayAll();
    MapReduceInfoResponseList result = bean.find();
    verifyAll();
    assertEquals(3, result.getResults().size());
}
Also used : HashMap(java.util.HashMap) MapReduceInfoResponseList(datawave.webservice.results.mr.MapReduceInfoResponseList) Test(org.junit.Test)

Example 7 with MapReduceInfoResponseList

use of datawave.webservice.results.mr.MapReduceInfoResponseList in project datawave by NationalSecurityAgency.

the class MapReduceBean method list.

/**
 * Returns status of a job with the given jobId
 *
 * @param jobId
 * @return datawave.webservice.results.mr.MapReduceInfoResponseList
 * @RequestHeader X-ProxiedEntitiesChain use when proxying request for user by specifying a chain of DNs of the identities to proxy
 * @RequestHeader X-ProxiedIssuersChain required when using X-ProxiedEntitiesChain, specify one issuer DN per subject DN listed in X-ProxiedEntitiesChain
 * @ResponseHeader X-OperationTimeInMS time spent on the server performing the operation, does not account for network or result serialization
 * @HTTP 200 success
 * @HTTP 404 if jobId is invalid or cannot be found
 * @HTTP 500
 */
@GET
@Produces({ "application/xml", "text/xml", "application/json", "text/yaml", "text/x-yaml", "application/x-yaml", "application/x-protobuf", "application/x-protostuff" })
@javax.ws.rs.Path("/{jobId}/list")
@GZIP
public MapReduceInfoResponseList list(@PathParam("jobId") String jobId) {
    MapReduceInfoResponseList response = mapReduceState.findById(jobId);
    if (null == response) {
        response = new MapReduceInfoResponseList();
    }
    if (null == response.getResults() || response.getResults().isEmpty()) {
        NotFoundQueryException qe = new NotFoundQueryException(DatawaveErrorCode.NO_QUERY_OBJECT_MATCH);
        response.addException(qe);
        throw new NotFoundException(qe, response);
    }
    if (response.getResults().size() > 1) {
        NotFoundQueryException qe = new NotFoundQueryException(DatawaveErrorCode.TOO_MANY_MAPREDUCE_OBJECT_MATCHES);
        response.addException(qe);
        throw new NotFoundException(qe, response);
    }
    return response;
}
Also used : NotFoundException(datawave.webservice.common.exception.NotFoundException) MapReduceInfoResponseList(datawave.webservice.results.mr.MapReduceInfoResponseList) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET) GZIP(org.jboss.resteasy.annotations.GZIP)

Example 8 with MapReduceInfoResponseList

use of datawave.webservice.results.mr.MapReduceInfoResponseList in project datawave by NationalSecurityAgency.

the class MapReduceBean method restart.

/**
 * Kill any job running associated with the BulkResults id and start a new job.
 *
 * @param jobId
 * @return {@code datawave.webservice.result.GenericResponse<String>}
 * @RequestHeader X-ProxiedEntitiesChain use when proxying request for user by specifying a chain of DNs of the identities to proxy
 * @RequestHeader X-ProxiedIssuersChain required when using X-ProxiedEntitiesChain, specify one issuer DN per subject DN listed in X-ProxiedEntitiesChain
 * @ResponseHeader X-OperationTimeInMS time spent on the server performing the operation, does not account for network or result serialization
 * @HTTP 200 success
 * @HTTP 404 if jobId is invalid or cannot be found
 * @HTTP 500 error restarting the job
 */
@PUT
@Produces({ "application/xml", "text/xml", "application/json", "text/yaml", "text/x-yaml", "application/x-yaml", "application/x-protobuf", "application/x-protostuff" })
@javax.ws.rs.Path("/{jobId}/restart")
@GZIP
public GenericResponse<String> restart(@PathParam("jobId") String jobId) {
    GenericResponse<String> response = new GenericResponse<>();
    // Find all potential running jobs
    MapReduceInfoResponseList list = mapReduceState.findById(jobId);
    // Should contain zero or one job
    if (list.getResults().isEmpty()) {
        NotFoundQueryException qe = new NotFoundQueryException(DatawaveErrorCode.NO_MAPREDUCE_OBJECT_MATCH);
        response.addException(qe);
        throw new NotFoundException(qe, response);
    } else if (list.getResults().size() > 1) {
        NotFoundQueryException qe = new NotFoundQueryException(DatawaveErrorCode.TOO_MANY_MAPREDUCE_OBJECT_MATCHES);
        response.addException(qe);
        throw new NotFoundException(qe, response);
    } else {
        MapReduceInfoResponse thisJob = list.getResults().get(0);
        // Call cancel for this job. This will kill any running jobs and remove the results directory
        cancel(jobId);
        // Now re-submit this job after adding the JOB_ID to the runtime parameters to signal that this job has been restarted
        String jobName = thisJob.getJobName();
        // Now call submit
        return submit(jobName, thisJob.getRuntimeParameters() + PARAMETER_SEPARATOR + JOB_ID + PARAMETER_NAME_VALUE_SEPARATOR + jobId);
    }
}
Also used : GenericResponse(datawave.webservice.result.GenericResponse) MapReduceInfoResponse(datawave.webservice.results.mr.MapReduceInfoResponse) NotFoundException(datawave.webservice.common.exception.NotFoundException) MapReduceInfoResponseList(datawave.webservice.results.mr.MapReduceInfoResponseList) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) Produces(javax.ws.rs.Produces) GZIP(org.jboss.resteasy.annotations.GZIP) PUT(javax.ws.rs.PUT)

Example 9 with MapReduceInfoResponseList

use of datawave.webservice.results.mr.MapReduceInfoResponseList in project datawave by NationalSecurityAgency.

the class MapReduceBean method getResultFile.

/**
 * Returns the contents of a result file. The list of resulting output files from the MapReduce job is listed in the response object of the status
 * operation.
 *
 * @param jobId
 * @param fileName
 * @return file contents
 * @RequestHeader X-ProxiedEntitiesChain use when proxying request for user by specifying a chain of DNs of the identities to proxy
 * @RequestHeader X-ProxiedIssuersChain required when using X-ProxiedEntitiesChain, specify one issuer DN per subject DN listed in X-ProxiedEntitiesChain
 * @ResponseHeader X-OperationTimeInMS time spent on the server performing the operation, does not account for network or result serialization
 * @HTTP 200 success
 * @HTTP 404 if jobId is invalid or cannot be found
 * @HTTP 500
 */
@GET
@Produces("*/*")
@javax.ws.rs.Path("/{jobId}/getFile/{fileName}")
@GZIP
public StreamingOutput getResultFile(@PathParam("jobId") String jobId, @PathParam("fileName") String fileName) {
    MapReduceInfoResponseList response = list(jobId);
    MapReduceInfoResponse result = response.getResults().get(0);
    String hdfs = result.getHdfs();
    String resultsDir = result.getResultsDirectory();
    final FileSystem fs = getFS(hdfs, response);
    final Path resultFile = new Path(resultsDir, fileName);
    FSDataInputStream fis;
    try {
        if (!fs.exists(resultFile) || !fs.isFile(resultFile)) {
            NotFoundQueryException qe = new NotFoundQueryException(DatawaveErrorCode.FILE_NOT_FOUND, MessageFormat.format("{0} at path {1}", fileName, resultsDir));
            response.addException(qe);
            throw new NotFoundException(qe, response);
        }
        fis = fs.open(resultFile);
    } catch (IOException e1) {
        NotFoundQueryException qe = new NotFoundQueryException(DatawaveErrorCode.RESULT_FILE_ACCESS_ERROR, e1, MessageFormat.format("{0}", resultFile.toString()));
        log.error(qe);
        response.addException(qe);
        throw new NotFoundException(qe, response);
    }
    // Make a final reference to the fis for referencing inside the inner class
    final FSDataInputStream fiz = fis;
    return new StreamingOutput() {

        private Logger log = Logger.getLogger(this.getClass());

        @Override
        public void write(java.io.OutputStream output) throws IOException, WebApplicationException {
            byte[] buf = new byte[BUFFER_SIZE];
            int read;
            try {
                read = fiz.read(buf);
                while (read != -1) {
                    output.write(buf, 0, read);
                    read = fiz.read(buf);
                }
            } catch (Exception e) {
                log.error("Error writing result file to output", e);
                throw new WebApplicationException(e);
            } finally {
                try {
                    if (null != fiz)
                        fiz.close();
                } catch (IOException e) {
                    log.error("Error closing FSDataInputStream for file: " + resultFile, e);
                }
                try {
                    if (null != fs)
                        fs.close();
                } catch (IOException e) {
                    log.error("Error closing HDFS client", e);
                }
            }
        }
    };
}
Also used : Path(org.apache.hadoop.fs.Path) DatawaveWebApplicationException(datawave.webservice.common.exception.DatawaveWebApplicationException) WebApplicationException(javax.ws.rs.WebApplicationException) TarArchiveOutputStream(org.apache.commons.compress.archivers.tar.TarArchiveOutputStream) NotFoundException(datawave.webservice.common.exception.NotFoundException) StreamingOutput(javax.ws.rs.core.StreamingOutput) MapReduceInfoResponseList(datawave.webservice.results.mr.MapReduceInfoResponseList) IOException(java.io.IOException) Logger(org.apache.log4j.Logger) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) DatawaveWebApplicationException(datawave.webservice.common.exception.DatawaveWebApplicationException) WebApplicationException(javax.ws.rs.WebApplicationException) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) IOException(java.io.IOException) QueryException(datawave.webservice.query.exception.QueryException) BadRequestException(datawave.webservice.common.exception.BadRequestException) NotFoundException(datawave.webservice.common.exception.NotFoundException) UnauthorizedQueryException(datawave.webservice.query.exception.UnauthorizedQueryException) UnauthorizedException(datawave.webservice.common.exception.UnauthorizedException) BadRequestQueryException(datawave.webservice.query.exception.BadRequestQueryException) MapReduceInfoResponse(datawave.webservice.results.mr.MapReduceInfoResponse) FileSystem(org.apache.hadoop.fs.FileSystem) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET) GZIP(org.jboss.resteasy.annotations.GZIP)

Example 10 with MapReduceInfoResponseList

use of datawave.webservice.results.mr.MapReduceInfoResponseList in project datawave by NationalSecurityAgency.

the class MapReduceBean method getResultFiles.

/**
 * Returns the a tar file where each tar entry is a result file.
 *
 * @param jobId
 * @return tar file
 * @RequestHeader X-ProxiedEntitiesChain use when proxying request for user by specifying a chain of DNs of the identities to proxy
 * @RequestHeader X-ProxiedIssuersChain required when using X-ProxiedEntitiesChain, specify one issuer DN per subject DN listed in X-ProxiedEntitiesChain
 * @ResponseHeader X-OperationTimeInMS time spent on the server performing the operation, does not account for network or result serialization
 * @HTTP 200 success
 * @HTTP 404 if jobId is invalid or cannot be found
 * @HTTP 500
 */
@GET
@Produces("*/*")
@javax.ws.rs.Path("/{jobId}/getAllFiles")
@GZIP
public StreamingOutput getResultFiles(@Required("jobId") @PathParam("jobId") final String jobId, @Context HttpServletResponse httpResponse) {
    MapReduceInfoResponseList response = list(jobId);
    MapReduceInfoResponse result = response.getResults().get(0);
    String hdfs = result.getHdfs();
    String resultsDir = result.getResultsDirectory();
    final FileSystem fs = getFS(hdfs, response);
    final Path jobDirectory = new Path(resultsDir);
    final int jobDirectoryPathLength = jobDirectory.toUri().getPath().length();
    try {
        if (!fs.exists(jobDirectory) || !fs.getFileStatus(jobDirectory).isDirectory()) {
            NotFoundQueryException qe = new NotFoundQueryException(DatawaveErrorCode.JOB_DIRECTORY_NOT_FOUND, MessageFormat.format("{0} at path {1}", jobId, jobDirectory));
            response.addException(qe);
            throw new NotFoundException(qe, response);
        }
    } catch (IOException e1) {
        NotFoundQueryException qe = new NotFoundQueryException(DatawaveErrorCode.RESULT_DIRECTORY_ACCESS_ERROR, e1, MessageFormat.format("{0}", resultsDir));
        log.error(qe);
        response.addException(qe);
        throw new NotFoundException(qe, response);
    }
    // Get the children
    List<FileStatus> resultFiles = new ArrayList<>();
    try {
        // recurse through the directory to find all files
        Queue<FileStatus> fileQueue = new LinkedList<>();
        fileQueue.add(fs.getFileStatus(jobDirectory));
        while (!fileQueue.isEmpty()) {
            FileStatus currentFileStatus = fileQueue.remove();
            if (currentFileStatus.isFile()) {
                resultFiles.add(currentFileStatus);
            } else {
                FileStatus[] dirList = fs.listStatus(currentFileStatus.getPath());
                Collections.addAll(fileQueue, dirList);
            }
        }
    } catch (IOException e) {
        QueryException qe = new QueryException(DatawaveErrorCode.DFS_DIRECTORY_LISTING_ERROR, e, MessageFormat.format("directory: {0}", resultsDir));
        log.error(qe);
        response.addException(qe);
        throw new DatawaveWebApplicationException(qe, response);
    }
    String filename = jobId + "-files.tar";
    httpResponse.addHeader("Content-Disposition", "attachment; filename=\"" + filename + "\"");
    // Make final references for use in anonymous class
    final List<FileStatus> paths = resultFiles;
    return output -> {
        TarArchiveOutputStream tos = new TarArchiveOutputStream(output);
        tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX);
        try {
            for (FileStatus fileStatus : paths) {
                if (fileStatus.isDirectory())
                    continue;
                // The archive entry will be started when the first (and possibly only) chunk is
                // written out. It is done this way because we need to know the size of the file
                // for the archive entry, and don't want to scan twice to get that info (once
                // here and again in streamFile).
                String fileName = fileStatus.getPath().toUri().getPath().substring(jobDirectoryPathLength + 1);
                TarArchiveEntry entry = new TarArchiveEntry(jobId + "/" + fileName, false);
                entry.setSize(fileStatus.getLen());
                tos.putArchiveEntry(entry);
                FSDataInputStream fis = fs.open(fileStatus.getPath());
                byte[] buf = new byte[BUFFER_SIZE];
                int read;
                try {
                    read = fis.read(buf);
                    while (read != -1) {
                        tos.write(buf, 0, read);
                        read = fis.read(buf);
                    }
                } catch (Exception e) {
                    log.error("Error writing result file to output", e);
                    throw new WebApplicationException(e);
                } finally {
                    try {
                        if (null != fis)
                            fis.close();
                    } catch (IOException e) {
                        log.error("Error closing FSDataInputStream for file: " + fileStatus.getPath().getName(), e);
                    }
                }
                tos.closeArchiveEntry();
            }
            tos.finish();
        } catch (Exception e) {
            log.error(e.getMessage(), e);
        } finally {
            try {
                if (null != tos)
                    tos.close();
            } catch (IOException ioe) {
                log.error("Error closing TarArchiveOutputStream", ioe);
            }
            try {
                if (null != fs)
                    fs.close();
            } catch (IOException ioe) {
                log.error("Error closing HDFS client", ioe);
            }
        }
    };
}
Also used : Path(org.apache.hadoop.fs.Path) StringUtils(org.apache.commons.lang.StringUtils) RolesAllowed(javax.annotation.security.RolesAllowed) Produces(javax.ws.rs.Produces) FileSystem(org.apache.hadoop.fs.FileSystem) FileStatus(org.apache.hadoop.fs.FileStatus) TarArchiveOutputStream(org.apache.commons.compress.archivers.tar.TarArchiveOutputStream) TransactionAttributeType(javax.ejb.TransactionAttributeType) DatawaveWebApplicationException(datawave.webservice.common.exception.DatawaveWebApplicationException) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) AccumuloConnectionFactory(datawave.webservice.common.connection.AccumuloConnectionFactory) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) MapReduceState(datawave.webservice.mr.state.MapReduceStatePersisterBean.MapReduceState) Resource(javax.annotation.Resource) ConnectionPoolsConfiguration(datawave.webservice.common.connection.config.ConnectionPoolsConfiguration) Set(java.util.Set) OozieJobConfiguration(datawave.webservice.mr.configuration.OozieJobConfiguration) MapReduceStatePersisterBean(datawave.webservice.mr.state.MapReduceStatePersisterBean) WebApplicationException(javax.ws.rs.WebApplicationException) Exclude(org.apache.deltaspike.core.api.exclude.Exclude) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) GET(javax.ws.rs.GET) MapReduceJobConfiguration(datawave.webservice.mr.configuration.MapReduceJobConfiguration) MapReduceInfoResponseList(datawave.webservice.results.mr.MapReduceInfoResponseList) ArrayList(java.util.ArrayList) TarArchiveEntry(org.apache.commons.compress.archivers.tar.TarArchiveEntry) JSSESecurityDomain(org.jboss.security.JSSESecurityDomain) LocalBean(javax.ejb.LocalBean) EJBContext(javax.ejb.EJBContext) Auditor(datawave.webservice.common.audit.Auditor) Properties(java.util.Properties) NeedQueryLogicFactory(datawave.webservice.mr.configuration.NeedQueryLogicFactory) InputFormat(org.apache.hadoop.mapreduce.InputFormat) IOException(java.io.IOException) DatawaveErrorCode(datawave.webservice.query.exception.DatawaveErrorCode) QueryException(datawave.webservice.query.exception.QueryException) NeedAccumuloDetails(datawave.webservice.mr.configuration.NeedAccumuloDetails) TransactionManagementType(javax.ejb.TransactionManagementType) Required(datawave.annotation.Required) OozieClient(org.apache.oozie.client.OozieClient) BadRequestException(datawave.webservice.common.exception.BadRequestException) PrivateAuditConstants(datawave.webservice.common.audit.PrivateAuditConstants) Logger(org.apache.log4j.Logger) MultivaluedMapImpl(org.jboss.resteasy.specimpl.MultivaluedMapImpl) QueryParam(javax.ws.rs.QueryParam) DefaultValue(javax.ws.rs.DefaultValue) Path(org.apache.hadoop.fs.Path) NotFoundException(datawave.webservice.common.exception.NotFoundException) DELETE(javax.ws.rs.DELETE) Stateless(javax.ejb.Stateless) RunningJob(org.apache.hadoop.mapred.RunningJob) Context(javax.ws.rs.core.Context) Collection(java.util.Collection) GenericResponse(datawave.webservice.result.GenericResponse) UnauthorizedQueryException(datawave.webservice.query.exception.UnauthorizedQueryException) StreamingOutput(javax.ws.rs.core.StreamingOutput) NeedSecurityDomain(datawave.webservice.mr.configuration.NeedSecurityDomain) UUID(java.util.UUID) MapReduceConfiguration(datawave.webservice.mr.configuration.MapReduceConfiguration) MapReduceInfoResponse(datawave.webservice.results.mr.MapReduceInfoResponse) UnauthorizedException(datawave.webservice.common.exception.UnauthorizedException) List(java.util.List) Principal(java.security.Principal) Job(org.apache.hadoop.mapreduce.Job) QueryLogicFactory(datawave.webservice.query.logic.QueryLogicFactory) Entry(java.util.Map.Entry) Queue(java.util.Queue) DatawaveEmbeddedProjectStageHolder(datawave.configuration.DatawaveEmbeddedProjectStageHolder) MapReduceJobDescription(datawave.webservice.results.mr.MapReduceJobDescription) PathParam(javax.ws.rs.PathParam) QueryCache(datawave.webservice.query.cache.QueryCache) HashMap(java.util.HashMap) DeclareRoles(javax.annotation.security.DeclareRoles) MessageFormat(java.text.MessageFormat) Inject(javax.inject.Inject) HashSet(java.util.HashSet) AuditParameters(datawave.webservice.common.audit.AuditParameters) TransactionAttribute(javax.ejb.TransactionAttribute) BaseResponse(datawave.webservice.result.BaseResponse) NeedQueryPersister(datawave.webservice.mr.configuration.NeedQueryPersister) VoidResponse(datawave.webservice.result.VoidResponse) JobID(org.apache.hadoop.mapreduce.JobID) LinkedList(java.util.LinkedList) AuditBean(datawave.webservice.common.audit.AuditBean) BadRequestQueryException(datawave.webservice.query.exception.BadRequestQueryException) FormParam(javax.ws.rs.FormParam) Persister(datawave.webservice.query.factory.Persister) TransactionManagement(javax.ejb.TransactionManagement) POST(javax.ws.rs.POST) ServerPrincipal(datawave.security.system.ServerPrincipal) HttpServletResponse(javax.servlet.http.HttpServletResponse) AuthorizationsUtil(datawave.security.util.AuthorizationsUtil) NeedCallerDetails(datawave.webservice.mr.configuration.NeedCallerDetails) SpringBean(datawave.configuration.spring.SpringBean) JobExecution(datawave.webservice.results.mr.JobExecution) JobConf(org.apache.hadoop.mapred.JobConf) MultivaluedMap(javax.ws.rs.core.MultivaluedMap) DatawavePrincipal(datawave.security.authorization.DatawavePrincipal) NeedQueryCache(datawave.webservice.mr.configuration.NeedQueryCache) SecurityMarking(datawave.marking.SecurityMarking) NeedAccumuloConnectionFactory(datawave.webservice.mr.configuration.NeedAccumuloConnectionFactory) OozieJobConstants(datawave.webservice.mr.configuration.OozieJobConstants) PUT(javax.ws.rs.PUT) Collections(java.util.Collections) GZIP(org.jboss.resteasy.annotations.GZIP) JobClient(org.apache.hadoop.mapred.JobClient) FileStatus(org.apache.hadoop.fs.FileStatus) DatawaveWebApplicationException(datawave.webservice.common.exception.DatawaveWebApplicationException) WebApplicationException(javax.ws.rs.WebApplicationException) ArrayList(java.util.ArrayList) NotFoundException(datawave.webservice.common.exception.NotFoundException) MapReduceInfoResponseList(datawave.webservice.results.mr.MapReduceInfoResponseList) IOException(java.io.IOException) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) LinkedList(java.util.LinkedList) TarArchiveEntry(org.apache.commons.compress.archivers.tar.TarArchiveEntry) DatawaveWebApplicationException(datawave.webservice.common.exception.DatawaveWebApplicationException) WebApplicationException(javax.ws.rs.WebApplicationException) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) IOException(java.io.IOException) QueryException(datawave.webservice.query.exception.QueryException) BadRequestException(datawave.webservice.common.exception.BadRequestException) NotFoundException(datawave.webservice.common.exception.NotFoundException) UnauthorizedQueryException(datawave.webservice.query.exception.UnauthorizedQueryException) UnauthorizedException(datawave.webservice.common.exception.UnauthorizedException) BadRequestQueryException(datawave.webservice.query.exception.BadRequestQueryException) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) QueryException(datawave.webservice.query.exception.QueryException) UnauthorizedQueryException(datawave.webservice.query.exception.UnauthorizedQueryException) BadRequestQueryException(datawave.webservice.query.exception.BadRequestQueryException) MapReduceInfoResponse(datawave.webservice.results.mr.MapReduceInfoResponse) FileSystem(org.apache.hadoop.fs.FileSystem) DatawaveWebApplicationException(datawave.webservice.common.exception.DatawaveWebApplicationException) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) TarArchiveOutputStream(org.apache.commons.compress.archivers.tar.TarArchiveOutputStream) Produces(javax.ws.rs.Produces) GET(javax.ws.rs.GET) GZIP(org.jboss.resteasy.annotations.GZIP)

Aggregations

MapReduceInfoResponseList (datawave.webservice.results.mr.MapReduceInfoResponseList)14 NotFoundQueryException (datawave.webservice.query.exception.NotFoundQueryException)9 MapReduceInfoResponse (datawave.webservice.results.mr.MapReduceInfoResponse)9 QueryException (datawave.webservice.query.exception.QueryException)7 IOException (java.io.IOException)7 HashMap (java.util.HashMap)7 DatawavePrincipal (datawave.security.authorization.DatawavePrincipal)6 NotFoundException (datawave.webservice.common.exception.NotFoundException)6 Produces (javax.ws.rs.Produces)6 GZIP (org.jboss.resteasy.annotations.GZIP)6 Test (org.junit.Test)5 DatawaveWebApplicationException (datawave.webservice.common.exception.DatawaveWebApplicationException)4 BadRequestQueryException (datawave.webservice.query.exception.BadRequestQueryException)4 UnauthorizedQueryException (datawave.webservice.query.exception.UnauthorizedQueryException)4 GenericResponse (datawave.webservice.result.GenericResponse)3 JobExecution (datawave.webservice.results.mr.JobExecution)3 Principal (java.security.Principal)3 ArrayList (java.util.ArrayList)3 PUT (javax.ws.rs.PUT)3 AccumuloException (org.apache.accumulo.core.client.AccumuloException)3