use of datawave.query.language.parser.jexl.LuceneToJexlQueryParser in project datawave by NationalSecurityAgency.
the class LuceneQueryTest method testInit.
// ============================================
// implemented abstract methods
protected void testInit() {
this.auths = CitiesDataType.getTestAuths();
this.documentKey = CityField.EVENT_ID.name();
LuceneToJexlQueryParser parser = new LuceneToJexlQueryParser();
for (JexlQueryFunction queryFunction : parser.getAllowedFunctions()) {
if (queryFunction instanceof EvaluationOnly) {
((EvaluationOnly) queryFunction).setParser(parser);
break;
}
}
this.logic.setParser(parser);
}
use of datawave.query.language.parser.jexl.LuceneToJexlQueryParser in project datawave by NationalSecurityAgency.
the class TextFunctionQueryTest method testInit.
// ============================================
// implemented abstract methods
protected void testInit() {
this.auths = CitiesDataType.getTestAuths();
this.documentKey = CityField.EVENT_ID.name();
this.logic.setParser(new LuceneToJexlQueryParser());
}
use of datawave.query.language.parser.jexl.LuceneToJexlQueryParser in project datawave by NationalSecurityAgency.
the class TokensQueryTest method testLuceneToken.
@Test
public void testLuceneToken() throws Exception {
String token = "yuma";
String query = GroupField.TOKENS.name() + ":\"" + token + "\"~4";
String expect = GroupField.TOKENS.name() + EQ_OP + "'" + token + "'";
this.logic.setParser(new LuceneToJexlQueryParser());
runTest(query, expect);
}
use of datawave.query.language.parser.jexl.LuceneToJexlQueryParser in project datawave by NationalSecurityAgency.
the class ShardIndexQueryTable method initialize.
@Override
public GenericQueryConfiguration initialize(Connector connection, Query settings, Set<Authorizations> auths) throws Exception {
ShardIndexQueryConfiguration config = new ShardIndexQueryConfiguration(this, settings);
this.scannerFactory = new ScannerFactory(connection);
MetadataHelper metadataHelper = initializeMetadataHelper(connection, config.getMetadataTableName(), auths);
if (StringUtils.isEmpty(settings.getQuery())) {
throw new IllegalArgumentException("Query cannot be null");
}
if (log.isDebugEnabled()) {
log.debug("Query parameters set to " + settings.getParameters());
}
String tModelName = getTrimmedOrNull(settings, QueryParameters.PARAMETER_MODEL_NAME);
if (tModelName != null) {
modelName = tModelName;
}
String tModelTableName = getTrimmedOrNull(settings, QueryParameters.PARAMETER_MODEL_TABLE_NAME);
if (tModelTableName != null) {
modelTableName = tModelTableName;
}
queryModel = metadataHelper.getQueryModel(modelTableName, modelName, null);
String datatypeFilterString = getTrimmedOrNull(settings, QueryParameters.DATATYPE_FILTER_SET);
if (datatypeFilterString != null) {
config.setDatatypeFilter(new HashSet<>(Arrays.asList(datatypeFilterString.split(PARAM_VALUE_SEP_STR))));
if (log.isDebugEnabled()) {
log.debug("Data type filter set to " + config.getDatatypeFilterAsString());
}
}
config.setConnector(connection);
config.setAuthorizations(auths);
if (indexTableName != null) {
config.setIndexTableName(indexTableName);
}
if (reverseIndexTableName != null) {
config.setReverseIndexTableName(reverseIndexTableName);
}
if (settings.getBeginDate() != null) {
config.setBeginDate(settings.getBeginDate());
} else {
config.setBeginDate(new Date(0));
if (log.isDebugEnabled()) {
log.debug("No begin date supplied in settings.");
}
}
if (settings.getEndDate() != null) {
config.setEndDate(settings.getEndDate());
} else {
config.setEndDate(new Date(Long.MAX_VALUE));
if (log.isDebugEnabled()) {
log.debug("No end date supplied in settings.");
}
}
// start with a trimmed version of the query, converted to JEXL
LuceneToJexlQueryParser parser = new LuceneToJexlQueryParser();
parser.setAllowLeadingWildCard(this.isAllowLeadingWildcard());
QueryNode node = parser.parse(settings.getQuery().trim());
// TODO: Validate that this is a simple list of terms type of query
config.setQueryString(node.getOriginalQuery());
if (log.isDebugEnabled()) {
log.debug("Original Query = " + settings.getQuery().trim());
log.debug("JEXL Query = " + node.getOriginalQuery());
}
// Parse & flatten the query.
ASTJexlScript origScript = JexlASTHelper.parseAndFlattenJexlQuery(config.getQueryString());
ASTJexlScript script;
try {
script = UnfieldedIndexExpansionVisitor.expandUnfielded(config, this.scannerFactory, metadataHelper, origScript);
} catch (EmptyUnfieldedTermExpansionException e) {
Multimap<String, String> emptyMap = Multimaps.unmodifiableMultimap(HashMultimap.create());
config.setNormalizedTerms(emptyMap);
config.setNormalizedPatterns(emptyMap);
return config;
}
Set<String> dataTypes = config.getDatatypeFilter();
Set<String> allFields = metadataHelper.getAllFields(dataTypes);
script = QueryModelVisitor.applyModel(script, queryModel, allFields);
if (log.isTraceEnabled()) {
log.trace("fetching dataTypes from FetchDataTypesVisitor");
}
Multimap<String, Type<?>> fieldToDataTypeMap = FetchDataTypesVisitor.fetchDataTypes(metadataHelper, config.getDatatypeFilter(), script);
config.setDataTypes(fieldToDataTypeMap);
config.setQueryFieldsDatatypes(fieldToDataTypeMap);
final Set<String> indexedFields = metadataHelper.getIndexedFields(dataTypes);
config.setIndexedFields(indexedFields);
final Set<String> reverseIndexedFields = metadataHelper.getReverseIndexedFields(dataTypes);
config.setReverseIndexedFields(reverseIndexedFields);
final Multimap<String, Type<?>> normalizedFields = metadataHelper.getFieldsToDatatypes(dataTypes);
config.setNormalizedFieldsDatatypes(normalizedFields);
if (log.isTraceEnabled()) {
log.trace("Normalizers:");
for (String field : fieldToDataTypeMap.keySet()) {
log.trace(field + ": " + fieldToDataTypeMap.get(field));
}
}
script = ExpandMultiNormalizedTerms.expandTerms(config, metadataHelper, script);
Multimap<String, String> literals = LiteralNodeVisitor.getLiterals(script);
Multimap<String, String> patterns = PatternNodeVisitor.getPatterns(script);
Map<Entry<String, String>, Range> rangesForTerms = Maps.newHashMap();
Map<Entry<String, String>, Entry<Range, Boolean>> rangesForPatterns = Maps.newHashMap();
config.setNormalizedTerms(literals);
config.setNormalizedPatterns(patterns);
if (log.isDebugEnabled()) {
log.debug("Normalized Literals = " + literals);
log.debug("Normalized Patterns = " + patterns);
}
for (Entry<String, String> entry : literals.entries()) {
rangesForTerms.put(entry, ShardIndexQueryTableStaticMethods.getLiteralRange(entry));
}
for (Entry<String, String> entry : patterns.entries()) {
ShardIndexQueryTableStaticMethods.RefactoredRangeDescription r = ShardIndexQueryTableStaticMethods.getRegexRange(entry, isFullTableScanEnabled(), metadataHelper, config);
rangesForPatterns.put(entry, Maps.immutableEntry(r.range, r.isForReverseIndex));
}
config.setRangesForTerms(rangesForTerms);
config.setRangesForPatterns(rangesForPatterns);
return config;
}
use of datawave.query.language.parser.jexl.LuceneToJexlQueryParser in project datawave by NationalSecurityAgency.
the class GeoWaveFunctionsTest method testLuceneToJexlConversion.
@Test
public void testLuceneToJexlConversion() throws Exception {
LuceneToJexlQueryParser parser = new LuceneToJexlQueryParser();
QueryNode node = null;
node = parser.parse("#CONTAINS(FIELD, 'POINT(10 20)')");
Assert.assertEquals("geowave:contains(FIELD, 'POINT(10 20)')", node.getOriginalQuery());
node = parser.parse("#COVERS(FIELD, 'POINT(10 20)')");
Assert.assertEquals("geowave:covers(FIELD, 'POINT(10 20)')", node.getOriginalQuery());
node = parser.parse("#COVERED_BY(FIELD, 'POINT(10 20)')");
Assert.assertEquals("geowave:covered_by(FIELD, 'POINT(10 20)')", node.getOriginalQuery());
node = parser.parse("#CROSSES(FIELD, 'POINT(10 20)')");
Assert.assertEquals("geowave:crosses(FIELD, 'POINT(10 20)')", node.getOriginalQuery());
node = parser.parse("#INTERSECTS(FIELD, 'POINT(10 20)')");
Assert.assertEquals("geowave:intersects(FIELD, 'POINT(10 20)')", node.getOriginalQuery());
node = parser.parse("#OVERLAPS(FIELD, 'POINT(10 20)')");
Assert.assertEquals("geowave:overlaps(FIELD, 'POINT(10 20)')", node.getOriginalQuery());
node = parser.parse("#WITHIN(FIELD, 'POINT(10 20)')");
Assert.assertEquals("geowave:within(FIELD, 'POINT(10 20)')", node.getOriginalQuery());
}
Aggregations