use of datawave.webservice.query.logic.QueryLogicTransformer in project datawave by NationalSecurityAgency.
the class DashboardQueryLogic method getTransformIterator.
@Override
@SuppressWarnings("unchecked")
public TransformIterator getTransformIterator(Query settings) {
TransformIterator origIter = super.getTransformIterator(settings);
QueryLogicTransformer transformer = super.getTransformer(settings);
DashboardSummary summary = new DashboardSummary(settings.getEndDate());
while (origIter.hasNext()) {
EventBase event = (EventBase) transformer.transform(origIter.next());
DashboardFields.addEvent(summary, event);
}
return new TransformIterator(Arrays.asList(summary).iterator(), this);
}
use of datawave.webservice.query.logic.QueryLogicTransformer in project datawave by NationalSecurityAgency.
the class ShardQueryLogic method loadQueryParameters.
protected void loadQueryParameters(ShardQueryConfiguration config, Query settings) throws QueryException {
TraceStopwatch stopwatch = config.getTimers().newStartedStopwatch("ShardQueryLogic - Parse query parameters");
boolean rawDataOnly = false;
String rawDataOnlyStr = settings.findParameter(QueryParameters.RAW_DATA_ONLY).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(rawDataOnlyStr)) {
rawDataOnly = Boolean.valueOf(rawDataOnlyStr);
// note that if any of these other options are set, then it overrides the settings here
if (rawDataOnly) {
// set the grouping context to trye to ensure we get the full field names
this.setIncludeGroupingContext(true);
config.setIncludeGroupingContext(true);
// set the hierarchy fields to false as they are generated fields
this.setIncludeHierarchyFields(false);
config.setIncludeHierarchyFields(false);
// set the datatype field to false as it is a generated field
this.setIncludeDataTypeAsField(false);
config.setIncludeDataTypeAsField(false);
// do not include the record id
this.setIncludeRecordId(false);
config.setIncludeRecordId(false);
// set the hit list to false as it is a generated field
this.setHitList(false);
config.setHitList(false);
// set the raw types to true to avoid any type transformations of the values
config.setRawTypes(true);
// do not filter masked values
this.setFilterMaskedValues(false);
config.setFilterMaskedValues(false);
// do not reduce the response
this.setReducedResponse(false);
config.setReducedResponse(false);
// clear the content field names to prevent content field transformations (see DocumentTransformer)
this.setContentFieldNames(Collections.EMPTY_LIST);
// clear the model name to avoid field name translations
this.setModelName(null);
config.setModelName(null);
}
}
// Get the datatype set if specified
String typeList = settings.findParameter(QueryParameters.DATATYPE_FILTER_SET).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(typeList)) {
HashSet<String> typeFilter = new HashSet<>();
typeFilter.addAll(Arrays.asList(StringUtils.split(typeList, Constants.PARAM_VALUE_SEP)));
if (log.isDebugEnabled()) {
log.debug("Type Filter: " + typeFilter);
}
config.setDatatypeFilter(typeFilter);
}
// Get the list of fields to project up the stack. May be null.
String projectFields = settings.findParameter(QueryParameters.RETURN_FIELDS).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(projectFields)) {
List<String> projectFieldsList = Arrays.asList(StringUtils.split(projectFields, Constants.PARAM_VALUE_SEP));
// Only set the projection fields if we were actually given some
if (!projectFieldsList.isEmpty()) {
config.setProjectFields(new HashSet<>(projectFieldsList));
if (log.isDebugEnabled()) {
final int maxLen = 100;
// Trim down the projection if it's stupid long
projectFields = maxLen < projectFields.length() ? projectFields.substring(0, maxLen) + "[TRUNCATED]" : projectFields;
log.debug("Projection fields: " + projectFields);
}
}
}
// if the TRANFORM_CONTENT_TO_UID is false, then unset the list of content field names preventing the DocumentTransformer from
// transforming them.
String transformContentStr = settings.findParameter(QueryParameters.TRANFORM_CONTENT_TO_UID).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(transformContentStr)) {
if (!Boolean.valueOf(transformContentStr)) {
setContentFieldNames(Collections.EMPTY_LIST);
}
}
// Get the list of blacklisted fields. May be null.
String tBlacklistedFields = settings.findParameter(QueryParameters.BLACKLISTED_FIELDS).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(tBlacklistedFields)) {
List<String> blacklistedFieldsList = Arrays.asList(StringUtils.split(tBlacklistedFields, Constants.PARAM_VALUE_SEP));
// Only set the blacklisted fields if we were actually given some
if (!blacklistedFieldsList.isEmpty()) {
if (!config.getProjectFields().isEmpty()) {
throw new QueryException("Whitelist and blacklist projection options are mutually exclusive");
}
config.setBlacklistedFields(new HashSet<>(blacklistedFieldsList));
if (log.isDebugEnabled()) {
log.debug("Blacklisted fields: " + tBlacklistedFields);
}
}
}
// Get the LIMIT_FIELDS parameter if given
String limitFields = settings.findParameter(QueryParameters.LIMIT_FIELDS).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(limitFields)) {
List<String> limitFieldsList = Arrays.asList(StringUtils.split(limitFields, Constants.PARAM_VALUE_SEP));
// Only set the limit fields if we were actually given some
if (!limitFieldsList.isEmpty()) {
config.setLimitFields(new HashSet<>(limitFieldsList));
}
}
String limitFieldsPreQueryEvaluation = settings.findParameter(QueryOptions.LIMIT_FIELDS_PRE_QUERY_EVALUATION).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(limitFieldsPreQueryEvaluation)) {
Boolean limitFieldsPreQueryEvaluationValue = Boolean.parseBoolean(limitFieldsPreQueryEvaluation);
this.setLimitFieldsPreQueryEvaluation(limitFieldsPreQueryEvaluationValue);
config.setLimitFieldsPreQueryEvaluation(limitFieldsPreQueryEvaluationValue);
}
String limitFieldsField = settings.findParameter(QueryOptions.LIMIT_FIELDS_FIELD).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(limitFieldsField)) {
this.setLimitFieldsField(limitFieldsField);
config.setLimitFieldsField(limitFieldsField);
}
// Get the GROUP_FIELDS parameter if given
String groupFields = settings.findParameter(QueryParameters.GROUP_FIELDS).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(groupFields)) {
List<String> groupFieldsList = Arrays.asList(StringUtils.split(groupFields, Constants.PARAM_VALUE_SEP));
// Only set the group fields if we were actually given some
if (!groupFieldsList.isEmpty()) {
this.setGroupFields(new HashSet<>(groupFieldsList));
config.setGroupFields(new HashSet<>(groupFieldsList));
config.setProjectFields(new HashSet<>(groupFieldsList));
}
}
String groupFieldsBatchSizeString = settings.findParameter(QueryParameters.GROUP_FIELDS_BATCH_SIZE).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(groupFieldsBatchSizeString)) {
int groupFieldsBatchSize = Integer.parseInt(groupFieldsBatchSizeString);
this.setGroupFieldsBatchSize(groupFieldsBatchSize);
config.setGroupFieldsBatchSize(groupFieldsBatchSize);
}
// Get the UNIQUE_FIELDS parameter if given
String uniqueFieldsParam = settings.findParameter(QueryParameters.UNIQUE_FIELDS).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(uniqueFieldsParam)) {
UniqueFields uniqueFields = UniqueFields.from(uniqueFieldsParam);
// Only set the unique fields if we were actually given some
if (!uniqueFields.isEmpty()) {
this.setUniqueFields(uniqueFields);
config.setUniqueFields(uniqueFields);
}
}
// Get the HIT_LIST parameter if given
String hitListString = settings.findParameter(QueryParameters.HIT_LIST).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(hitListString)) {
Boolean hitListBool = Boolean.parseBoolean(hitListString);
config.setHitList(hitListBool);
}
// Get the BYPASS_ACCUMULO parameter if given
String bypassAccumuloString = settings.findParameter(BYPASS_ACCUMULO).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(bypassAccumuloString)) {
Boolean bypassAccumuloBool = Boolean.parseBoolean(bypassAccumuloString);
config.setBypassAccumulo(bypassAccumuloBool);
}
// Get the DATE_INDEX_TIME_TRAVEL parameter if given
String dateIndexTimeTravelString = settings.findParameter(QueryOptions.DATE_INDEX_TIME_TRAVEL).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(dateIndexTimeTravelString)) {
Boolean dateIndexTimeTravel = Boolean.parseBoolean(dateIndexTimeTravelString);
config.setDateIndexTimeTravel(dateIndexTimeTravel);
}
// get the RAW_TYPES parameter if given
String rawTypesString = settings.findParameter(QueryParameters.RAW_TYPES).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(rawTypesString)) {
Boolean rawTypesBool = Boolean.parseBoolean(rawTypesString);
config.setRawTypes(rawTypesBool);
}
// Get the FILTER_MASKED_VALUES spring setting
String filterMaskedValuesStr = settings.findParameter(QueryParameters.FILTER_MASKED_VALUES).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(filterMaskedValuesStr)) {
Boolean filterMaskedValuesBool = Boolean.parseBoolean(filterMaskedValuesStr);
this.setFilterMaskedValues(filterMaskedValuesBool);
config.setFilterMaskedValues(filterMaskedValuesBool);
}
// Get the INCLUDE_DATATYPE_AS_FIELD spring setting
String includeDatatypeAsFieldStr = settings.findParameter(QueryParameters.INCLUDE_DATATYPE_AS_FIELD).getParameterValue().trim();
if (((org.apache.commons.lang.StringUtils.isNotBlank(includeDatatypeAsFieldStr) && Boolean.valueOf(includeDatatypeAsFieldStr))) || (this.getIncludeDataTypeAsField() && !rawDataOnly)) {
config.setIncludeDataTypeAsField(true);
}
// Get the INCLUDE_RECORD_ID spring setting
String includeRecordIdStr = settings.findParameter(QueryParameters.INCLUDE_RECORD_ID).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(includeRecordIdStr)) {
boolean includeRecordIdBool = Boolean.parseBoolean(includeRecordIdStr) && !rawDataOnly;
this.setIncludeRecordId(includeRecordIdBool);
config.setIncludeRecordId(includeRecordIdBool);
}
// Get the INCLUDE_HIERARCHY_FIELDS spring setting
String includeHierarchyFieldsStr = settings.findParameter(QueryParameters.INCLUDE_HIERARCHY_FIELDS).getParameterValue().trim();
if (((org.apache.commons.lang.StringUtils.isNotBlank(includeHierarchyFieldsStr) && Boolean.valueOf(includeHierarchyFieldsStr))) || (this.getIncludeHierarchyFields() && !rawDataOnly)) {
config.setIncludeHierarchyFields(true);
final Map<String, String> options = this.getHierarchyFieldOptions();
config.setHierarchyFieldOptions(options);
}
// Get the query profile to allow us to select the tune profile of the query
String queryProfile = settings.findParameter(QueryParameters.QUERY_PROFILE).getParameterValue().trim();
if ((org.apache.commons.lang.StringUtils.isNotBlank(queryProfile))) {
selectedProfile = configuredProfiles.get(queryProfile);
if (null == selectedProfile) {
throw new QueryException(QueryParameters.QUERY_PROFILE + " has been specified but " + queryProfile + " is not a selectable profile");
}
}
// Get the include.grouping.context = true/false spring setting
String includeGroupingContextStr = settings.findParameter(QueryParameters.INCLUDE_GROUPING_CONTEXT).getParameterValue().trim();
if (((org.apache.commons.lang.StringUtils.isNotBlank(includeGroupingContextStr) && Boolean.valueOf(includeGroupingContextStr))) || (this.getIncludeGroupingContext() && !rawDataOnly)) {
config.setIncludeGroupingContext(true);
}
// Check if the default modelName and modelTableNames have been overridden by custom parameters.
String parameterModelName = settings.findParameter(QueryParameters.PARAMETER_MODEL_NAME).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(parameterModelName)) {
this.setModelName(parameterModelName);
}
config.setModelName(this.getModelName());
String parameterModelTableName = settings.findParameter(QueryParameters.PARAMETER_MODEL_TABLE_NAME).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(parameterModelTableName)) {
this.setModelTableName(parameterModelTableName);
}
if (null != config.getModelName() && null == config.getModelTableName()) {
throw new IllegalArgumentException(QueryParameters.PARAMETER_MODEL_NAME + " has been specified but " + QueryParameters.PARAMETER_MODEL_TABLE_NAME + " is missing. Both are required to use a model");
}
configureDocumentAggregation(settings);
config.setLimitTermExpansionToModel(this.isExpansionLimitedToModelContents());
String reducedResponseStr = settings.findParameter(QueryOptions.REDUCED_RESPONSE).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(reducedResponseStr)) {
Boolean reducedResponseValue = Boolean.parseBoolean(reducedResponseStr);
this.setReducedResponse(reducedResponseValue);
config.setReducedResponse(reducedResponseValue);
}
final String postProcessingClasses = settings.findParameter(QueryOptions.POSTPROCESSING_CLASSES).getParameterValue().trim();
final String postProcessingOptions = settings.findParameter(QueryOptions.POSTPROCESSING_OPTIONS).getParameterValue().trim();
// build the post p
if (org.apache.commons.lang.StringUtils.isNotBlank(postProcessingClasses)) {
List<String> filterClasses = config.getFilterClassNames();
if (null == filterClasses) {
filterClasses = new ArrayList<>();
}
for (String fClassName : StringUtils.splitIterable(postProcessingClasses, ',', true)) {
filterClasses.add(fClassName);
}
config.setFilterClassNames(filterClasses);
final Map<String, String> options = this.getFilterOptions();
if (null != options) {
config.putFilterOptions(options);
}
if (org.apache.commons.lang.StringUtils.isNotBlank(postProcessingOptions)) {
for (String filterOptionStr : StringUtils.splitIterable(postProcessingOptions, ',', true)) {
if (org.apache.commons.lang.StringUtils.isNotBlank(filterOptionStr)) {
final String filterValueString = settings.findParameter(filterOptionStr).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(filterValueString)) {
config.putFilterOptions(filterOptionStr, filterValueString);
}
}
}
}
}
String tCompressServerSideResults = settings.findParameter(QueryOptions.COMPRESS_SERVER_SIDE_RESULTS).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(tCompressServerSideResults)) {
boolean compress = Boolean.parseBoolean(tCompressServerSideResults);
config.setCompressServerSideResults(compress);
}
// Configure index-only filter functions to be enabled if not already set to such a state
config.setIndexOnlyFilterFunctionsEnabled(this.isIndexOnlyFilterFunctionsEnabled());
// Set the ReturnType for Documents coming out of the iterator stack
config.setReturnType(DocumentSerialization.getReturnType(settings));
QueryLogicTransformer transformer = getTransformer(settings);
if (transformer instanceof WritesQueryMetrics) {
String logTimingDetailsStr = settings.findParameter(QueryOptions.LOG_TIMING_DETAILS).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(logTimingDetailsStr)) {
setLogTimingDetails(Boolean.valueOf(logTimingDetailsStr));
}
if (getLogTimingDetails()) {
// we have to collect the timing details on the iterator stack in order to log them
setCollectTimingDetails(true);
} else {
String collectTimingDetailsStr = settings.findParameter(QueryOptions.COLLECT_TIMING_DETAILS).getParameterValue().trim();
if (org.apache.commons.lang.StringUtils.isNotBlank(collectTimingDetailsStr)) {
setCollectTimingDetails(Boolean.valueOf(collectTimingDetailsStr));
}
}
} else {
// if the transformer can not process the timing metrics, then turn them off
setLogTimingDetails(false);
setCollectTimingDetails(false);
}
stopwatch.stop();
if (null != selectedProfile) {
selectedProfile.configure(this);
selectedProfile.configure(config);
selectedProfile.configure(planner);
}
}
use of datawave.webservice.query.logic.QueryLogicTransformer in project datawave by NationalSecurityAgency.
the class QueryExecutorBean method execute.
/**
* @param logicName
* @param queryParameters
*
* @return {@code datawave.webservice.result.GenericResponse<String>}
* @RequestHeader X-ProxiedEntitiesChain use when proxying request for user, by specifying a chain of DNs of the identities to proxy
* @RequestHeader X-ProxiedIssuersChain required when using X-ProxiedEntitiesChain, specify one issuer DN per subject DN listed in X-ProxiedEntitiesChain
* @ResponseHeader query-session-id this header and value will be in the Set-Cookie header, subsequent calls for this session will need to supply the
* query-session-id header in the request in a Cookie header or as a query parameter
* @ResponseHeader X-OperationTimeInMS time spent on the server performing the operation, does not account for network or result serialization
* @ResponseHeader X-Partial-Results true if the page contains less than the requested number of results
*
* @HTTP 200 success
* @HTTP 204 success and no results
* @HTTP 400 invalid or missing parameter
* @HTTP 500 internal server error
*/
@POST
@Produces("*/*")
@Path("/{logicName}/execute")
@GZIP
@Interceptors({ ResponseInterceptor.class, RequiredInterceptor.class })
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
@Override
@Timed(name = "dw.query.executeQuery", absolute = true)
public StreamingOutput execute(@PathParam("logicName") String logicName, MultivaluedMap<String, String> queryParameters, @Context HttpHeaders httpHeaders) {
/**
* This method captures the metrics on the query instead of doing it in the QueryMetricsEnrichmentInterceptor. The ExecuteStreamingOutputResponse class
* is returned from this method and executed in the JAX-RS layer. It updates the metrics which are then updated on each call to the _next method.
*/
Collection<String> proxyServers = null;
Principal p = ctx.getCallerPrincipal();
DatawavePrincipal dp;
if (p instanceof DatawavePrincipal) {
dp = (DatawavePrincipal) p;
proxyServers = dp.getProxyServers();
}
final MediaType PB_MEDIA_TYPE = new MediaType("application", "x-protobuf");
final MediaType YAML_MEDIA_TYPE = new MediaType("application", "x-yaml");
final VoidResponse response = new VoidResponse();
// HttpHeaders.getAcceptableMediaTypes returns a priority sorted list of acceptable response types.
// Find the first one in the list that we support.
MediaType responseType = null;
for (MediaType type : httpHeaders.getAcceptableMediaTypes()) {
if (type.equals(MediaType.APPLICATION_XML_TYPE) || type.equals(MediaType.APPLICATION_JSON_TYPE) || type.equals(PB_MEDIA_TYPE) || type.equals(YAML_MEDIA_TYPE)) {
responseType = type;
break;
}
}
if (null == responseType) {
QueryException qe = new QueryException(DatawaveErrorCode.UNSUPPORTED_MEDIA_TYPE);
response.setHasResults(false);
response.addException(qe);
throw new DatawaveWebApplicationException(qe, response, MediaType.APPLICATION_XML_TYPE);
}
// reference query necessary to avoid NPEs in getting the Transformer and BaseResponse
Query q = new QueryImpl();
Date now = new Date();
q.setBeginDate(now);
q.setEndDate(now);
q.setExpirationDate(now);
q.setQuery("test");
q.setQueryAuthorizations("ALL");
ResultsPage emptyList = new ResultsPage();
// Find the response class
Class<?> responseClass;
try {
QueryLogic<?> l = queryLogicFactory.getQueryLogic(logicName, p);
QueryLogicTransformer t = l.getTransformer(q);
BaseResponse refResponse = t.createResponse(emptyList);
responseClass = refResponse.getClass();
} catch (Exception e) {
QueryException qe = new QueryException(DatawaveErrorCode.QUERY_TRANSFORM_ERROR, e);
log.error(qe, e);
response.setHasResults(false);
response.addException(qe.getBottomQueryException());
int statusCode = qe.getBottomQueryException().getStatusCode();
throw new DatawaveWebApplicationException(qe, response, statusCode, MediaType.APPLICATION_XML_TYPE);
}
SerializationType s;
if (responseType.equals(MediaType.APPLICATION_XML_TYPE)) {
s = SerializationType.XML;
} else if (responseType.equals(MediaType.APPLICATION_JSON_TYPE)) {
s = SerializationType.JSON;
} else if (responseType.equals(PB_MEDIA_TYPE)) {
if (!(Message.class.isAssignableFrom(responseClass))) {
QueryException qe = new QueryException(DatawaveErrorCode.BAD_RESPONSE_CLASS, MessageFormat.format("Response class: {0}", responseClass));
response.setHasResults(false);
response.addException(qe);
throw new DatawaveWebApplicationException(qe, response, MediaType.APPLICATION_XML_TYPE);
}
s = SerializationType.PB;
} else if (responseType.equals(YAML_MEDIA_TYPE)) {
if (!(Message.class.isAssignableFrom(responseClass))) {
QueryException qe = new QueryException(DatawaveErrorCode.BAD_RESPONSE_CLASS, MessageFormat.format("Response class: {0}", responseClass));
response.setHasResults(false);
response.addException(qe);
throw new DatawaveWebApplicationException(qe, response, MediaType.APPLICATION_XML_TYPE);
}
s = SerializationType.YAML;
} else {
QueryException qe = new QueryException(DatawaveErrorCode.INVALID_FORMAT, MessageFormat.format("format: {0}", responseType.toString()));
response.setHasResults(false);
response.addException(qe);
throw new DatawaveWebApplicationException(qe, response, MediaType.APPLICATION_XML_TYPE);
}
long start = System.nanoTime();
GenericResponse<String> createResponse = null;
try {
createResponse = this.createQuery(logicName, queryParameters, httpHeaders);
} catch (Throwable t) {
if (t instanceof DatawaveWebApplicationException) {
QueryException qe = (QueryException) ((DatawaveWebApplicationException) t).getCause();
response.setHasResults(false);
response.addException(qe.getBottomQueryException());
int statusCode = qe.getBottomQueryException().getStatusCode();
throw new DatawaveWebApplicationException(qe, response, statusCode, MediaType.APPLICATION_XML_TYPE);
} else {
throw t;
}
}
long createCallTime = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - start);
final String queryId = createResponse.getResult();
// We created the query and put into cache, get the RunningQuery object
final RunningQuery rq = queryCache.get(queryId);
rq.getMetric().setCreateCallTime(createCallTime);
final Collection<String> proxies = proxyServers;
final SerializationType serializationType = s;
final Class<?> queryResponseClass = responseClass;
return new ExecuteStreamingOutputResponse(queryId, queryResponseClass, response, rq, serializationType, proxies);
}
use of datawave.webservice.query.logic.QueryLogicTransformer in project datawave by NationalSecurityAgency.
the class CompositeQueryLogic method getTransformer.
/**
* Method used to check that the configuration is correct and to get the response class by QueryExecutorBean.listQueryLogic()
*/
@Override
public synchronized QueryLogicTransformer getTransformer(Query settings) {
ResultsPage emptyList = new ResultsPage();
Class<? extends BaseResponse> responseClass = null;
List<QueryLogicTransformer> delegates = new ArrayList<>();
for (BaseQueryLogic<?> logic : queryLogics) {
QueryLogicTransformer t = logic.getTransformer(settings);
delegates.add(t);
BaseResponse refResponse = t.createResponse(emptyList);
if (null == responseClass) {
responseClass = refResponse.getClass();
} else {
if (!responseClass.equals(refResponse.getClass())) {
throw new RuntimeException("All query logics must use transformers that return the same object type");
}
}
}
if (null == this.transformer) {
this.transformer = new CompositeQueryLogicTransformer(delegates);
}
return this.transformer;
}
use of datawave.webservice.query.logic.QueryLogicTransformer in project datawave by NationalSecurityAgency.
the class CompositeQueryLogicTransformer method createResponse.
@Override
public BaseQueryResponse createResponse(ResultsPage resultList) {
BaseQueryResponse result = null;
for (QueryLogicTransformer t : delegates) {
try {
log.trace("createResponse ResultsPage");
result = t.createResponse(resultList);
} catch (Exception e) {
log.warn("Error calling createResponse on delegate, continuing...", e);
}
}
return result;
}
Aggregations