Search in sources :

Example 81 with QueryException

use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.

the class MapReduceJobConfiguration method prepareClasspath.

/**
 * Common code to setup distributed cache and classpath for the job
 *
 * @param job
 * @param jobDir
 * @throws Exception
 */
protected void prepareClasspath(String jobId, Job job, Path jobDir) throws Exception {
    FileSystem fs = getFileSystem(job.getConfiguration());
    if (!fs.exists(baseDir))
        if (!fs.mkdirs(baseDir)) {
            throw new QueryException(DatawaveErrorCode.DFS_DIRECTORY_CREATE_ERROR, MessageFormat.format("Directory: {0}", baseDir.toString()));
        }
    // Create the directory for this job
    if (!fs.exists(jobDir))
        if (!fs.mkdirs(jobDir)) {
            throw new QueryException(DatawaveErrorCode.DFS_DIRECTORY_CREATE_ERROR, MessageFormat.format("Directory: {0}", jobDir.toString()));
        }
    // Create classpath directory for this job
    Path classpath = new Path(jobDir, "classpath");
    if (!fs.exists(classpath))
        if (!fs.mkdirs(classpath)) {
            throw new QueryException(DatawaveErrorCode.DFS_DIRECTORY_CREATE_ERROR, MessageFormat.format("Directory: {0}", classpath.toString()));
        }
    // Add all of the jars to the classpath dir and to the DistributedCache
    for (String jarFile : this.classpathJarFiles) {
        int idx = jarFile.indexOf('!');
        Pattern pattern = null;
        if (idx > 0) {
            pattern = Pattern.compile(jarFile.substring(idx + 1));
            jarFile = jarFile.substring(0, idx);
        }
        File file = new File(jarFile);
        if (pattern == null) {
            Path cachedJarPath = new Path(classpath, file.getName());
            addSingleFile(jarFile, cachedJarPath, jobId, job, fs);
        } else {
            // Jars within the deployed EAR in Wildfly use the VFS protocol, and need to be handled specially.
            if (jarFile.startsWith("vfs:")) {
                Set<String> files = new LinkedHashSet<>();
                try (InputStream urlInputStream = new URL(jarFile).openStream()) {
                    JarInputStream jarInputStream = (JarInputStream) urlInputStream;
                    for (JarEntry jarEntry = jarInputStream.getNextJarEntry(); jarEntry != null; jarEntry = jarInputStream.getNextJarEntry()) {
                        if (pattern.matcher(jarEntry.getName()).matches()) {
                            String name = jarEntry.getName();
                            if (name.endsWith("/"))
                                name = name.substring(0, name.length() - 1);
                            files.add(jarFile + "/" + name);
                        }
                    }
                }
                for (String nestedFile : files) {
                    Path cachedJarPath = new Path(classpath, new File(nestedFile).getName());
                    addSingleFile(nestedFile, cachedJarPath, jobId, job, fs);
                }
            } else if (jarFile.startsWith("archive:")) {
                jarFile = jarFile.substring("archive:".length());
                addArchivedDirectory(jarFile, pattern, classpath, jobId, job, fs);
            } else {
                addArchiveFile(file, pattern, classpath, jobId, job, fs);
            }
        }
    }
    String homeDir = System.getProperty("jboss.home.dir");
    // Add all of the jars in the server lib directory
    File libDir = new File(homeDir, "bin/client");
    if (!(libDir.isDirectory() && libDir.canRead())) {
        throw new QueryException(DatawaveErrorCode.DFS_DIRECTORY_READ_ERROR, MessageFormat.format("directory: {0}", libDir));
    }
    FilenameFilter jarFilter = (dir, name) -> name.toLowerCase().endsWith(".jar");
    File[] jarFiles = libDir.listFiles(jarFilter);
    if (jarFiles != null) {
        for (File jar : jarFiles) {
            // remove guava classes from jboss-client.jar
            if (jar.getName().equals("jboss-client.jar")) {
                List<Pattern> patterns = new ArrayList<>();
                patterns.add(Pattern.compile("^com/google.*"));
                patterns.add(Pattern.compile("^META-INF/maven/com.google.guava.*"));
                File filteredJar = filterJar(jar, patterns);
                addSingleFile(filteredJar, new Path(classpath, jar.getName()), jobId, job, fs);
                filteredJar.delete();
            } else {
                addSingleFile(jar, new Path(classpath, jar.getName()), jobId, job, fs);
            }
        }
    }
    // Add all of the jars in the server mapreduce helper lib directory
    libDir = new File(homeDir, "tools/mapreduce/lib");
    if (!(libDir.isDirectory() && libDir.canRead())) {
        throw new QueryException(DatawaveErrorCode.DFS_DIRECTORY_READ_ERROR, MessageFormat.format("directory: {0}", libDir));
    }
    jarFiles = libDir.listFiles(jarFilter);
    if (jarFiles != null) {
        for (File jar : jarFiles) {
            addSingleFile(jar, new Path(classpath, jar.getName()), jobId, job, fs);
        }
    }
    exportSystemProperties(jobId, job, fs, classpath);
}
Also used : Path(org.apache.hadoop.fs.Path) LinkedHashSet(java.util.LinkedHashSet) ZipOutputStream(java.util.zip.ZipOutputStream) FilenameFilter(java.io.FilenameFilter) MapReduceJobDescription(datawave.webservice.results.mr.MapReduceJobDescription) StringUtils(org.apache.commons.lang.StringUtils) ZipInputStream(java.util.zip.ZipInputStream) FileSystem(org.apache.hadoop.fs.FileSystem) URL(java.net.URL) LoggerFactory(org.slf4j.LoggerFactory) MessageFormat(java.text.MessageFormat) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) FSDataOutputStream(org.apache.hadoop.fs.FSDataOutputStream) JarEntry(java.util.jar.JarEntry) Files(com.google.common.io.Files) Configuration(org.apache.hadoop.conf.Configuration) Map(java.util.Map) JarInputStream(java.util.jar.JarInputStream) Path(org.apache.hadoop.fs.Path) JarOutputStream(java.util.jar.JarOutputStream) ZipEntry(java.util.zip.ZipEntry) LinkedHashSet(java.util.LinkedHashSet) Logger(org.slf4j.Logger) Properties(java.util.Properties) Iterator(java.util.Iterator) FileOutputStream(java.io.FileOutputStream) Set(java.util.Set) IOException(java.io.IOException) FileInputStream(java.io.FileInputStream) DatawaveErrorCode(datawave.webservice.query.exception.DatawaveErrorCode) File(java.io.File) DatawavePrincipal(datawave.security.authorization.DatawavePrincipal) List(java.util.List) QueryException(datawave.webservice.query.exception.QueryException) Job(org.apache.hadoop.mapreduce.Job) ByteStreams(com.google.common.io.ByteStreams) Entry(java.util.Map.Entry) Pattern(java.util.regex.Pattern) InputStream(java.io.InputStream) Pattern(java.util.regex.Pattern) JarInputStream(java.util.jar.JarInputStream) ZipInputStream(java.util.zip.ZipInputStream) JarInputStream(java.util.jar.JarInputStream) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) ArrayList(java.util.ArrayList) JarEntry(java.util.jar.JarEntry) URL(java.net.URL) FilenameFilter(java.io.FilenameFilter) QueryException(datawave.webservice.query.exception.QueryException) FileSystem(org.apache.hadoop.fs.FileSystem) File(java.io.File)

Example 82 with QueryException

use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.

the class FieldIndexCountQueryLogic method setupQuery.

/**
 * Create the batch scanner and set the iterator options / stack.
 *
 * @param genericConfig
 *            configuration object
 * @throws Exception
 */
@Override
public void setupQuery(GenericQueryConfiguration genericConfig) throws Exception {
    if (logger.isTraceEnabled()) {
        logger.trace("setupQuery");
    }
    if (!ShardQueryConfiguration.class.isAssignableFrom(genericConfig.getClass())) {
        throw new QueryException("Did not receive a ShardQueryConfiguration instance!!");
    }
    ShardQueryConfiguration config = (ShardQueryConfiguration) genericConfig;
    // Ensure we have all of the information needed to run a query
    if (!config.canRunQuery()) {
        logger.warn("The given query '" + config.getQueryString() + "' could not be run, most likely due to not matching any records in the global index.");
        // Stub out an iterator to correctly present "no results"
        this.iterator = new Iterator<Map.Entry<Key, Value>>() {

            @Override
            public boolean hasNext() {
                return false;
            }

            @Override
            public Map.Entry<Key, Value> next() {
                return null;
            }

            @Override
            public void remove() {
            }
        };
        this.scanner = null;
        return;
    }
    try {
        if (logger.isTraceEnabled()) {
            logger.trace("configuring batch scanner and iterators.");
        }
        BatchScanner bs = getScannerFactory().newScanner(config.getShardTableName(), config.getAuthorizations(), config.getNumQueryThreads(), config.getQuery());
        bs.setRanges(this.ranges);
        // The stack we want to use
        // 21 FieldIndexCountingIterator
        // FieldIndexCountingIterator setup
        IteratorSetting cfg;
        cfg = new IteratorSetting(config.getBaseIteratorPriority() + 21, "countingIter", FieldIndexCountingIterator.class);
        cfg.addOption(FieldIndexCountingIterator.DATA_TYPES, config.getDatatypeFilterAsString());
        cfg.addOption(FieldIndexCountingIterator.FIELD_NAMES, join(this.fieldNames, FieldIndexCountingIterator.SEP));
        if (null != this.fieldValues && !this.fieldValues.isEmpty()) {
            cfg.addOption(FieldIndexCountingIterator.FIELD_VALUES, join(this.fieldValues, FieldIndexCountingIterator.SEP));
        }
        SimpleDateFormat sdf = new SimpleDateFormat(FieldIndexCountingIterator.DATE_FORMAT_STRING);
        cfg.addOption(FieldIndexCountingIterator.START_TIME, sdf.format(config.getBeginDate()));
        cfg.addOption(FieldIndexCountingIterator.STOP_TIME, sdf.format(config.getEndDate()));
        cfg.addOption(FieldIndexCountingIterator.UNIQ_BY_DATA_TYPE, Boolean.toString(this.uniqueByDataType));
        bs.addScanIterator(cfg);
        this.iterator = bs.iterator();
        this.scanner = bs;
    } catch (TableNotFoundException e) {
        logger.error("The table '" + config.getShardTableName() + "' does not exist", e);
    }
}
Also used : TableNotFoundException(org.apache.accumulo.core.client.TableNotFoundException) QueryException(datawave.webservice.query.exception.QueryException) Entry(java.util.Map.Entry) FieldIndexCountingIterator(datawave.query.iterators.FieldIndexCountingIterator) IteratorSetting(org.apache.accumulo.core.client.IteratorSetting) BatchScanner(org.apache.accumulo.core.client.BatchScanner) ShardQueryConfiguration(datawave.query.config.ShardQueryConfiguration) SimpleDateFormat(java.text.SimpleDateFormat)

Example 83 with QueryException

use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.

the class TermFrequencyQueryTable method setupQuery.

@Override
public void setupQuery(GenericQueryConfiguration configuration) throws Exception {
    if (!configuration.getClass().getName().equals(TermFrequencyQueryConfiguration.class.getName())) {
        throw new QueryException("Did not receive a TermFrequencyQueryConfiguration instance");
    }
    TermFrequencyQueryConfiguration tfConfig = (TermFrequencyQueryConfiguration) configuration;
    try {
        Scanner scanner = QueryScannerHelper.createScanner(tfConfig.getConnector(), tfConfig.getTableName(), tfConfig.getAuthorizations(), tfConfig.getQuery());
        scanner.setRange(tfConfig.getRange());
        this.iterator = scanner.iterator();
        this.scanner = scanner;
    } catch (TableNotFoundException e) {
        throw new RuntimeException("Table not found: " + this.getTableName(), e);
    }
}
Also used : TermFrequencyQueryConfiguration(datawave.query.config.TermFrequencyQueryConfiguration) Scanner(org.apache.accumulo.core.client.Scanner) TableNotFoundException(org.apache.accumulo.core.client.TableNotFoundException) QueryException(datawave.webservice.query.exception.QueryException)

Example 84 with QueryException

use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.

the class JexlNodeFactory method buildUntypedNewNode.

/**
 * Build a JexlNode with a Number literal
 *
 * @param fieldName
 * @param fieldValue
 * @return
 */
protected static JexlNode buildUntypedNewNode(JexlNode newNode, ASTIdentifier fieldName, Number fieldValue) {
    ASTNumberLiteral literal = new ASTNumberLiteral(ParserTreeConstants.JJTNUMBERLITERAL);
    literal.image = fieldValue.toString();
    if (NATURAL_NUMBERS.contains(fieldValue.getClass())) {
        literal.setNatural(fieldValue.toString());
    } else if (REAL_NUMBERS.contains(fieldValue.getClass())) {
        literal.setReal(fieldValue.toString());
    } else {
        QueryException qe = new QueryException(DatawaveErrorCode.ASTNUMBERLITERAL_TYPE_ASCERTAIN_ERROR, MessageFormat.format("{0}", literal));
        throw new DatawaveFatalQueryException(qe);
    }
    return buildUntypedNewNode(newNode, fieldName, literal);
}
Also used : DatawaveFatalQueryException(datawave.query.exceptions.DatawaveFatalQueryException) QueryException(datawave.webservice.query.exception.QueryException) DatawaveFatalQueryException(datawave.query.exceptions.DatawaveFatalQueryException) ASTNumberLiteral(org.apache.commons.jexl2.parser.ASTNumberLiteral)

Example 85 with QueryException

use of datawave.webservice.query.exception.QueryException in project datawave by NationalSecurityAgency.

the class SortedKeyValueIteratorToIterator method next.

@Override
public Map.Entry<Key, Value> next() {
    load();
    if (next == null) {
        QueryException qe = new QueryException(DatawaveErrorCode.FETCH_NEXT_ELEMENT_ERROR);
        throw (NoSuchElementException) (new NoSuchElementException().initCause(qe));
    }
    loaded = false;
    return next;
}
Also used : QueryException(datawave.webservice.query.exception.QueryException) NoSuchElementException(java.util.NoSuchElementException)

Aggregations

QueryException (datawave.webservice.query.exception.QueryException)131 DatawaveWebApplicationException (datawave.webservice.common.exception.DatawaveWebApplicationException)63 IOException (java.io.IOException)62 NotFoundQueryException (datawave.webservice.query.exception.NotFoundQueryException)57 BadRequestQueryException (datawave.webservice.query.exception.BadRequestQueryException)51 NoResultsQueryException (datawave.webservice.query.exception.NoResultsQueryException)47 PreConditionFailedQueryException (datawave.webservice.query.exception.PreConditionFailedQueryException)45 Produces (javax.ws.rs.Produces)44 NoResultsException (datawave.webservice.common.exception.NoResultsException)40 UnauthorizedQueryException (datawave.webservice.query.exception.UnauthorizedQueryException)39 DatawaveFatalQueryException (datawave.query.exceptions.DatawaveFatalQueryException)36 DatawavePrincipal (datawave.security.authorization.DatawavePrincipal)36 Interceptors (javax.interceptor.Interceptors)36 UnauthorizedException (datawave.webservice.common.exception.UnauthorizedException)34 GZIP (org.jboss.resteasy.annotations.GZIP)34 Principal (java.security.Principal)32 WebApplicationException (javax.ws.rs.WebApplicationException)31 BadRequestException (datawave.webservice.common.exception.BadRequestException)29 Path (javax.ws.rs.Path)28 Timed (com.codahale.metrics.annotation.Timed)26