Search in sources :

Example 76 with QueryException

use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.

the class MapReduceStatePersisterBean method addJob.

/**
 * Adds a new job to the history for this BulkResults id
 *
 * @param id
 *            bulk results id
 * @param mapReduceJobId
 *            map reduce job id
 * @throws QueryException
 */
public void addJob(String id, String mapReduceJobId) throws QueryException {
    // Find out who/what called this method
    Principal p = ctx.getCallerPrincipal();
    String sid = p.getName();
    if (p instanceof DatawavePrincipal) {
        DatawavePrincipal dp = (DatawavePrincipal) p;
        sid = dp.getShortName();
    }
    Connector c = null;
    try {
        Map<String, String> trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace());
        c = connectionFactory.getConnection(AccumuloConnectionFactory.Priority.ADMIN, trackingMap);
        tableCheck(c);
        // Not using a MultiTableBatchWriter here because its not implemented yet
        // in Mock Accumulo.
        BatchWriterConfig bwCfg = new BatchWriterConfig().setMaxLatency(10, TimeUnit.SECONDS).setMaxMemory(10240L).setMaxWriteThreads(1);
        try (BatchWriter tableWriter = c.createBatchWriter(TABLE_NAME, bwCfg);
            BatchWriter indexWriter = c.createBatchWriter(INDEX_TABLE_NAME, bwCfg)) {
            Mutation m = new Mutation(id);
            m.put(sid, STATE + NULL + mapReduceJobId, new Value(MapReduceState.STARTED.toString().getBytes()));
            tableWriter.addMutation(m);
            Mutation i = new Mutation(mapReduceJobId);
            i.put(sid, id, NULL_VALUE);
            indexWriter.addMutation(i);
        }
    } catch (Exception e) {
        QueryException qe = new QueryException(DatawaveErrorCode.BULK_RESULTS_ENTRY_ERROR, e);
        log.error(qe);
        throw qe;
    } finally {
        try {
            connectionFactory.returnConnection(c);
        } catch (Exception e) {
            log.error("Error closing writers", e);
        }
    }
}
Also used : Connector(org.apache.accumulo.core.client.Connector) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) QueryException(datawave.webservice.query.exception.QueryException) Value(org.apache.accumulo.core.data.Value) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) Principal(java.security.Principal) DatawavePrincipal(datawave.security.authorization.DatawavePrincipal) DatawavePrincipal(datawave.security.authorization.DatawavePrincipal) TableNotFoundException(org.apache.accumulo.core.client.TableNotFoundException) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) TableExistsException(org.apache.accumulo.core.client.TableExistsException) AccumuloSecurityException(org.apache.accumulo.core.client.AccumuloSecurityException) IOException(java.io.IOException) AccumuloException(org.apache.accumulo.core.client.AccumuloException) QueryException(datawave.webservice.query.exception.QueryException)

Example 77 with QueryException

use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.

the class MapReduceStatePersisterBean method create.

/**
 * @param id
 *            map reduce id
 * @param hdfsUri
 * @param jobTracker
 * @param workingDirectory
 *            map reduce job working directory
 * @param mapReduceJobId
 *            map reduce job id
 * @param resultsDirectory
 *            either HDFS directory name or some other location (i.e. table name)
 * @param runtimeParameters
 *            parameters
 * @param jobName
 */
public void create(String id, String hdfsUri, String jobTracker, String workingDirectory, String mapReduceJobId, String resultsDirectory, String runtimeParameters, String jobName) throws QueryException {
    // Find out who/what called this method
    Principal p = ctx.getCallerPrincipal();
    String sid = p.getName();
    if (p instanceof DatawavePrincipal) {
        DatawavePrincipal cp = (DatawavePrincipal) p;
        sid = cp.getShortName();
    }
    Connector c = null;
    try {
        Map<String, String> trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace());
        c = connectionFactory.getConnection(AccumuloConnectionFactory.Priority.ADMIN, trackingMap);
        tableCheck(c);
        // Not using a MultiTableBatchWriter here because its not implemented yet
        // in Mock Accumulo.
        BatchWriterConfig bwCfg = new BatchWriterConfig().setMaxLatency(10, TimeUnit.SECONDS).setMaxMemory(10240L).setMaxWriteThreads(1);
        try (BatchWriter tableWriter = c.createBatchWriter(TABLE_NAME, bwCfg);
            BatchWriter indexWriter = c.createBatchWriter(INDEX_TABLE_NAME, bwCfg)) {
            Mutation m = new Mutation(id);
            m.put(sid, WORKING_DIRECTORY, workingDirectory);
            m.put(sid, HDFS, hdfsUri);
            m.put(sid, JT, jobTracker);
            m.put(sid, NAME, jobName);
            m.put(sid, RESULTS_LOCATION, resultsDirectory);
            m.put(sid, PARAMS, runtimeParameters);
            m.put(sid, STATE + NULL + mapReduceJobId, new Value(MapReduceState.STARTED.toString().getBytes()));
            tableWriter.addMutation(m);
            Mutation i = new Mutation(mapReduceJobId);
            i.put(sid, id, NULL_VALUE);
            indexWriter.addMutation(i);
        }
    } catch (Exception e) {
        QueryException qe = new QueryException(DatawaveErrorCode.BULK_RESULTS_ENTRY_ERROR, e);
        log.error(qe);
        throw qe;
    } finally {
        try {
            connectionFactory.returnConnection(c);
        } catch (Exception e) {
            log.error("Error closing writers", e);
        }
    }
}
Also used : Connector(org.apache.accumulo.core.client.Connector) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) QueryException(datawave.webservice.query.exception.QueryException) Value(org.apache.accumulo.core.data.Value) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) Principal(java.security.Principal) DatawavePrincipal(datawave.security.authorization.DatawavePrincipal) DatawavePrincipal(datawave.security.authorization.DatawavePrincipal) TableNotFoundException(org.apache.accumulo.core.client.TableNotFoundException) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) TableExistsException(org.apache.accumulo.core.client.TableExistsException) AccumuloSecurityException(org.apache.accumulo.core.client.AccumuloSecurityException) IOException(java.io.IOException) AccumuloException(org.apache.accumulo.core.client.AccumuloException) QueryException(datawave.webservice.query.exception.QueryException)

Example 78 with QueryException

use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.

the class ModelBean method insertMapping.

/**
 * <strong>Administrator credentials required.</strong> Insert a new field mapping into an existing model
 *
 * @param model
 *            list of new field mappings to insert
 * @param modelTableName
 *            name of the table that contains the model
 * @return datawave.webservice.result.VoidResponse
 * @RequestHeader X-ProxiedEntitiesChain use when proxying request for user
 *
 * @HTTP 200 success
 * @HTTP 500 internal server error
 */
@POST
@Consumes({ "application/xml", "text/xml", "application/json", "text/yaml", "text/x-yaml", "application/x-yaml" })
@Produces({ "application/xml", "text/xml", "application/json", "text/yaml", "text/x-yaml", "application/x-yaml", "application/x-protobuf", "application/x-protostuff" })
@Path("/insert")
@GZIP
@RolesAllowed({ "Administrator", "JBossAdministrator" })
@Interceptors(ResponseInterceptor.class)
public VoidResponse insertMapping(datawave.webservice.model.Model model, @QueryParam("modelTableName") String modelTableName) {
    if (modelTableName == null) {
        modelTableName = defaultModelTableName;
    }
    VoidResponse response = new VoidResponse();
    Connector connector = null;
    BatchWriter writer = null;
    String tableName = this.checkModelTableName(modelTableName);
    try {
        Map<String, String> trackingMap = connectionFactory.getTrackingMap(Thread.currentThread().getStackTrace());
        connector = connectionFactory.getConnection(AccumuloConnectionFactory.Priority.LOW, trackingMap);
        writer = connector.createBatchWriter(tableName, new BatchWriterConfig().setMaxLatency(BATCH_WRITER_MAX_LATENCY, TimeUnit.MILLISECONDS).setMaxMemory(BATCH_WRITER_MAX_MEMORY).setMaxWriteThreads(BATCH_WRITER_MAX_THREADS));
        for (FieldMapping mapping : model.getFields()) {
            Mutation m = ModelKeyParser.createMutation(mapping, model.getName());
            writer.addMutation(m);
        }
    } catch (Exception e) {
        log.error("Could not insert mapping.", e);
        QueryException qe = new QueryException(DatawaveErrorCode.INSERT_MAPPING_ERROR, e);
        response.addException(qe.getBottomQueryException());
        throw new DatawaveWebApplicationException(qe, response);
    } finally {
        if (null != writer) {
            try {
                writer.close();
            } catch (MutationsRejectedException e1) {
                QueryException qe = new QueryException(DatawaveErrorCode.WRITER_CLOSE_ERROR, e1);
                log.error(qe);
                response.addException(qe);
                throw new DatawaveWebApplicationException(qe, response);
            }
        }
        if (null != connector) {
            try {
                connectionFactory.returnConnection(connector);
            } catch (Exception e) {
                log.error("Error returning connection to factory", e);
            }
        }
    }
    cache.reloadCache(tableName);
    return response;
}
Also used : Connector(org.apache.accumulo.core.client.Connector) QueryException(datawave.webservice.query.exception.QueryException) VoidResponse(datawave.webservice.result.VoidResponse) FieldMapping(datawave.webservice.model.FieldMapping) BatchWriterConfig(org.apache.accumulo.core.client.BatchWriterConfig) DatawaveWebApplicationException(datawave.webservice.common.exception.DatawaveWebApplicationException) BatchWriter(org.apache.accumulo.core.client.BatchWriter) Mutation(org.apache.accumulo.core.data.Mutation) DatawaveWebApplicationException(datawave.webservice.common.exception.DatawaveWebApplicationException) PreConditionFailedException(datawave.webservice.common.exception.PreConditionFailedException) NotFoundException(datawave.webservice.common.exception.NotFoundException) MutationsRejectedException(org.apache.accumulo.core.client.MutationsRejectedException) QueryException(datawave.webservice.query.exception.QueryException) MutationsRejectedException(org.apache.accumulo.core.client.MutationsRejectedException) Path(javax.ws.rs.Path) RolesAllowed(javax.annotation.security.RolesAllowed) Interceptors(javax.interceptor.Interceptors) POST(javax.ws.rs.POST) Consumes(javax.ws.rs.Consumes) Produces(javax.ws.rs.Produces) GZIP(org.jboss.resteasy.annotations.GZIP)

Example 79 with QueryException

use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.

the class MapReduceBean method cancel.

/**
 * Cancels any MapReduce jobs with the specified jobId and clears out the results directory
 *
 * @param jobId
 * @return {@code datawave.webservice.result.GenericResponse<Boolean>}
 * @RequestHeader X-ProxiedEntitiesChain use when proxying request for user by specifying a chain of DNs of the identities to proxy
 * @RequestHeader X-ProxiedIssuersChain required when using X-ProxiedEntitiesChain, specify one issuer DN per subject DN listed in X-ProxiedEntitiesChain
 * @ResponseHeader X-OperationTimeInMS time spent on the server performing the operation, does not account for network or result serialization
 * @HTTP 200 success
 * @HTTP 404 if jobId is invalid or cannot be found
 * @HTTP 500 error killing the job
 */
@PUT
@Produces({ "application/xml", "text/xml", "application/json", "text/yaml", "text/x-yaml", "application/x-yaml", "application/x-protobuf", "application/x-protostuff" })
@javax.ws.rs.Path("/{jobId}/cancel")
@GZIP
public GenericResponse<Boolean> cancel(@PathParam("jobId") String jobId) {
    GenericResponse<Boolean> response = new GenericResponse<>();
    // Find all potential running jobs
    MapReduceInfoResponseList list = mapReduceState.findById(jobId);
    List<String> jobIdsToKill = new ArrayList<>();
    // Should contain zero or one bulk result job
    if (list.getResults().isEmpty()) {
        NotFoundQueryException qe = new NotFoundQueryException(DatawaveErrorCode.NO_MAPREDUCE_OBJECT_MATCH);
        response.addException(qe);
        throw new NotFoundException(qe, response);
    } else if (list.getResults().size() > 1) {
        NotFoundQueryException qe = new NotFoundQueryException(DatawaveErrorCode.TOO_MANY_MAPREDUCE_OBJECT_MATCHES);
        response.addException(qe);
        throw new NotFoundException(qe, response);
    } else {
        MapReduceInfoResponse thisJob = list.getResults().get(0);
        // Get all the executions for this job
        String prevId = null;
        String prevState = null;
        // add it to the kill list
        for (JobExecution ex : thisJob.getJobExecutions()) {
            if (prevId != null) {
                if (prevState.equals(MapReduceState.STARTED.toString()) && !ex.getMapReduceJobId().equals(prevId))
                    jobIdsToKill.add(prevId);
            }
            prevId = ex.getMapReduceJobId();
            prevState = ex.getState();
        }
        // Get the last one
        if (MapReduceState.STARTED.toString().equals(prevState))
            jobIdsToKill.add(prevId);
        FileSystem hdfs = null;
        try {
            hdfs = getFS(thisJob.getHdfs(), response);
            Path resultsDir = new Path(thisJob.getResultsDirectory());
            hdfs.getConf().set("mapreduce.jobtracker.address", thisJob.getJobTracker());
            // Create a Job object
            try (JobClient job = new JobClient(new JobConf(hdfs.getConf()))) {
                for (String killId : jobIdsToKill) {
                    try {
                        JobID jid = JobID.forName(killId);
                        RunningJob rj = job.getJob(new org.apache.hadoop.mapred.JobID(jid.getJtIdentifier(), jid.getId()));
                        // job.getJob(jid);
                        if (null != rj)
                            rj.killJob();
                        else
                            mapReduceState.updateState(killId, MapReduceState.KILLED);
                    } catch (IOException | QueryException e) {
                        QueryException qe = new QueryException(DatawaveErrorCode.MAPREDUCE_JOB_KILL_ERROR, e, MessageFormat.format("job_id: {0}", killId));
                        log.error(qe);
                        response.addException(qe.getBottomQueryException());
                        throw new DatawaveWebApplicationException(qe, response);
                    }
                }
            }
            // Delete the contents of the results directory
            if (hdfs.exists(resultsDir) && !hdfs.delete(resultsDir, true)) {
                QueryException qe = new QueryException(DatawaveErrorCode.MAPRED_RESULTS_DELETE_ERROR, MessageFormat.format("directory: {0}", resultsDir.toString()));
                log.error(qe);
                response.addException(qe);
                throw new DatawaveWebApplicationException(qe, response);
            }
            response.setResult(true);
            return response;
        } catch (IOException e) {
            QueryException qe = new QueryException(DatawaveErrorCode.JOBTRACKER_CONNECTION_ERROR, e, MessageFormat.format("JobTracker: {0}", thisJob.getJobTracker()));
            log.error(qe);
            response.addException(qe);
            throw new DatawaveWebApplicationException(qe, response);
        } finally {
            if (null != hdfs) {
                try {
                    hdfs.close();
                } catch (IOException e) {
                    log.error("Error closing HDFS client", e);
                }
            }
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) GenericResponse(datawave.webservice.result.GenericResponse) ArrayList(java.util.ArrayList) NotFoundException(datawave.webservice.common.exception.NotFoundException) MapReduceInfoResponseList(datawave.webservice.results.mr.MapReduceInfoResponseList) IOException(java.io.IOException) JobClient(org.apache.hadoop.mapred.JobClient) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) JobExecution(datawave.webservice.results.mr.JobExecution) NotFoundQueryException(datawave.webservice.query.exception.NotFoundQueryException) QueryException(datawave.webservice.query.exception.QueryException) UnauthorizedQueryException(datawave.webservice.query.exception.UnauthorizedQueryException) BadRequestQueryException(datawave.webservice.query.exception.BadRequestQueryException) MapReduceInfoResponse(datawave.webservice.results.mr.MapReduceInfoResponse) FileSystem(org.apache.hadoop.fs.FileSystem) RunningJob(org.apache.hadoop.mapred.RunningJob) DatawaveWebApplicationException(datawave.webservice.common.exception.DatawaveWebApplicationException) JobConf(org.apache.hadoop.mapred.JobConf) JobID(org.apache.hadoop.mapreduce.JobID) Produces(javax.ws.rs.Produces) GZIP(org.jboss.resteasy.annotations.GZIP) PUT(javax.ws.rs.PUT)

Example 80 with QueryException

use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.

the class BulkResultsJobConfiguration method setupJob.

/**
 * Common MapReduce setup methods
 *
 * @param job
 *            the job to configure
 * @param jobDir
 *            the directory in HDFS where aux job files are stored
 * @param queryConfig
 *            the query configuration for this job's query input format
 * @param logic
 *            the query logic for this job's query input format
 * @param base64EncodedQuery
 *            the query, encoded using Base64
 * @param queryImplClass
 *            the class of query in {@code base64EncodedQuery}
 * @param runtimeQueryAuthorizations
 *            the authorizations to use for input format query scanners
 * @param serverPrincipal
 *            the {@link Principal} of the server running DATAWAVE
 * @throws IOException
 * @throws AccumuloSecurityException
 */
private void setupJob(Job job, Path jobDir, GenericQueryConfiguration queryConfig, QueryLogic<?> logic, String base64EncodedQuery, Class<? extends Query> queryImplClass, Set<Authorizations> runtimeQueryAuthorizations, DatawavePrincipal serverPrincipal) throws IOException, AccumuloSecurityException {
    job.setInputFormatClass(BulkInputFormat.class);
    QueryData queryData = null;
    Collection<Range> ranges = new ArrayList<>();
    if (!queryConfig.canRunQuery()) {
        throw new UnsupportedOperationException("Unable to run query");
    }
    Iterator<QueryData> iter = queryConfig.getQueries();
    while (iter.hasNext()) {
        queryData = iter.next();
        ranges.addAll(queryData.getRanges());
    }
    if (ranges.isEmpty()) {
        throw new NoResultsException(new QueryException("No scan ranges produced for query."));
    }
    BulkInputFormat.setWorkingDirectory(job.getConfiguration(), jobDir.toString());
    // Copy the information from the GenericQueryConfiguration to the job.
    BulkInputFormat.setRanges(job, ranges);
    for (IteratorSetting cfg : queryData.getSettings()) {
        BulkInputFormat.addIterator(job.getConfiguration(), cfg);
    }
    BulkInputFormat.setZooKeeperInstance(job.getConfiguration(), this.instanceName, this.zookeepers);
    Iterator<Authorizations> authsIter = (runtimeQueryAuthorizations == null || runtimeQueryAuthorizations.isEmpty()) ? null : runtimeQueryAuthorizations.iterator();
    Authorizations auths = (authsIter == null) ? null : authsIter.next();
    BulkInputFormat.setInputInfo(job, this.user, this.password.getBytes(), logic.getTableName(), auths);
    for (int priority = 10; authsIter != null && authsIter.hasNext(); ++priority) {
        IteratorSetting cfg = new IteratorSetting(priority, ConfigurableVisibilityFilter.class);
        cfg.setName("visibilityFilter" + priority);
        cfg.addOption(ConfigurableVisibilityFilter.AUTHORIZATIONS_OPT, authsIter.next().toString());
        BulkInputFormat.addIterator(job.getConfiguration(), cfg);
    }
    job.getConfiguration().set(BulkResultsFileOutputMapper.QUERY_LOGIC_SETTINGS, base64EncodedQuery);
    job.getConfiguration().set(BulkResultsFileOutputMapper.QUERY_IMPL_CLASS, queryImplClass.getName());
    job.getConfiguration().set(BulkResultsFileOutputMapper.QUERY_LOGIC_NAME, logic.getLogicName());
    job.getConfiguration().set(BulkResultsFileOutputMapper.APPLICATION_CONTEXT_PATH, "classpath*:datawave/configuration/spring/CDIBeanPostProcessor.xml," + "classpath*:datawave/query/*QueryLogicFactory.xml," + "classpath*:/MarkingFunctionsContext.xml," + "classpath*:/MetadataHelperContext.xml," + "classpath*:/CacheContext.xml");
    job.getConfiguration().set(BulkResultsFileOutputMapper.SPRING_CONFIG_LOCATIONS, job.getConfiguration().get(BulkResultsFileOutputMapper.APPLICATION_CONTEXT_PATH));
    // Tell the Mapper/Reducer to use a specific set of application context files when doing Spring-CDI integration.
    String cdiOpts = "'-Dcdi.spring.configs=" + job.getConfiguration().get(BulkResultsFileOutputMapper.APPLICATION_CONTEXT_PATH) + "'";
    // Pass our server DN along to the child VM so it can be made available for injection.
    cdiOpts += " '-Dserver.principal=" + encodePrincipal(serverPrincipal) + "'";
    cdiOpts += " '-Dcaller.principal=" + encodePrincipal((DatawavePrincipal) principal) + "'";
    String javaOpts = job.getConfiguration().get("mapreduce.map.java.opts");
    javaOpts = (javaOpts == null) ? cdiOpts : (javaOpts + " " + cdiOpts);
    job.getConfiguration().set("mapreduce.map.java.opts", javaOpts);
    job.setMapOutputKeyClass(Key.class);
    job.setMapOutputValueClass(Value.class);
    job.setWorkingDirectory(jobDir);
}
Also used : NoResultsException(datawave.webservice.common.exception.NoResultsException) Authorizations(org.apache.accumulo.core.security.Authorizations) QueryData(datawave.webservice.query.configuration.QueryData) ArrayList(java.util.ArrayList) Range(org.apache.accumulo.core.data.Range) DatawavePrincipal(datawave.security.authorization.DatawavePrincipal) QueryException(datawave.webservice.query.exception.QueryException) IteratorSetting(org.apache.accumulo.core.client.IteratorSetting)

Aggregations

QueryException (datawave.webservice.query.exception.QueryException)131 DatawaveWebApplicationException (datawave.webservice.common.exception.DatawaveWebApplicationException)63 IOException (java.io.IOException)62 NotFoundQueryException (datawave.webservice.query.exception.NotFoundQueryException)57 BadRequestQueryException (datawave.webservice.query.exception.BadRequestQueryException)51 NoResultsQueryException (datawave.webservice.query.exception.NoResultsQueryException)47 PreConditionFailedQueryException (datawave.webservice.query.exception.PreConditionFailedQueryException)45 Produces (javax.ws.rs.Produces)44 NoResultsException (datawave.webservice.common.exception.NoResultsException)40 UnauthorizedQueryException (datawave.webservice.query.exception.UnauthorizedQueryException)39 DatawaveFatalQueryException (datawave.query.exceptions.DatawaveFatalQueryException)36 DatawavePrincipal (datawave.security.authorization.DatawavePrincipal)36 Interceptors (javax.interceptor.Interceptors)36 UnauthorizedException (datawave.webservice.common.exception.UnauthorizedException)34 GZIP (org.jboss.resteasy.annotations.GZIP)34 Principal (java.security.Principal)32 WebApplicationException (javax.ws.rs.WebApplicationException)31 BadRequestException (datawave.webservice.common.exception.BadRequestException)29 Path (javax.ws.rs.Path)28 Timed (com.codahale.metrics.annotation.Timed)26