use of ddf.catalog.plugin.PluginExecutionException in project ddf by codice.
the class InMemoryProcessingFramework method submitUpdate.
@Override
public void submitUpdate(ProcessRequest<ProcessUpdateItem> input) {
if (postProcessPlugins == null || postProcessPlugins.isEmpty()) {
LOGGER.debug("postProcessPlugins is empty. Not starting post process thread");
} else {
threadPool.submit(() -> {
ProcessRequest<ProcessUpdateItem> request = input;
for (PostProcessPlugin plugin : postProcessPlugins) {
try {
request = plugin.processUpdate(request);
} catch (PluginExecutionException e) {
LOGGER.debug("Unable to process update request through plugin: {}", plugin.getClass().getCanonicalName(), e);
}
}
storeProcessRequest(request);
closeInputStream(request);
});
}
}
use of ddf.catalog.plugin.PluginExecutionException in project ddf by codice.
the class CachingFederationStrategy method sourceFederate.
private QueryResponse sourceFederate(List<Source> sources, final QueryRequest queryRequest) {
if (LOGGER.isDebugEnabled()) {
for (Source source : sources) {
if (source != null) {
LOGGER.debug("source to query: {}", source.getId());
}
}
}
Query originalQuery = queryRequest.getQuery();
int offset = originalQuery.getStartIndex();
final int pageSize = originalQuery.getPageSize();
// limit offset to max value
if (offset > this.maxStartIndex) {
offset = this.maxStartIndex;
}
final QueryResponseImpl queryResponseQueue = new QueryResponseImpl(queryRequest, null);
Map<Future<SourceResponse>, QueryRequest> futures = new HashMap<>();
Query modifiedQuery = getModifiedQuery(originalQuery, sources.size(), offset, pageSize);
QueryRequest modifiedQueryRequest = new QueryRequestImpl(modifiedQuery, queryRequest.isEnterprise(), queryRequest.getSourceIds(), queryRequest.getProperties());
CompletionService<SourceResponse> queryCompletion = new ExecutorCompletionService<>(queryExecutorService);
// Do NOT call source.isAvailable() when checking sources
for (final Source source : sources) {
if (source != null) {
if (!futuresContainsSource(source, futures)) {
LOGGER.debug("running query on source: {}", source.getId());
QueryRequest sourceQueryRequest = new QueryRequestImpl(modifiedQuery, queryRequest.isEnterprise(), Collections.singleton(source.getId()), new HashMap<>(queryRequest.getProperties()));
try {
for (PreFederatedQueryPlugin service : preQuery) {
try {
sourceQueryRequest = service.process(source, sourceQueryRequest);
} catch (PluginExecutionException e) {
LOGGER.info("Error executing PreFederatedQueryPlugin", e);
}
}
} catch (StopProcessingException e) {
LOGGER.info("Plugin stopped processing", e);
}
if (source instanceof CatalogProvider && SystemInfo.getSiteName().equals(source.getId())) {
// TODO RAP 12 Jul 16: DDF-2294 - Extract into a new PreFederatedQueryPlugin
sourceQueryRequest = validationQueryFactory.getQueryRequestWithValidationFilter(sourceQueryRequest, showErrors, showWarnings);
}
futures.put(queryCompletion.submit(new CallableSourceResponse(source, sourceQueryRequest)), sourceQueryRequest);
} else {
LOGGER.info("Duplicate source found with name {}. Ignoring second one.", source.getId());
}
}
}
QueryResponseImpl offsetResults = null;
// OffsetResultHandler does.
if (offset > 1 && sources.size() > 1) {
offsetResults = new QueryResponseImpl(queryRequest, null);
queryExecutorService.submit(new OffsetResultHandler(queryResponseQueue, offsetResults, pageSize, offset));
}
queryExecutorService.submit(sortedQueryMonitorFactory.createMonitor(queryCompletion, futures, queryResponseQueue, modifiedQueryRequest, postQuery));
QueryResponse queryResponse;
if (offset > 1 && sources.size() > 1) {
queryResponse = offsetResults;
LOGGER.debug("returning offsetResults");
} else {
queryResponse = queryResponseQueue;
LOGGER.debug("returning returnResults: {}", queryResponse);
}
LOGGER.debug("returning Query Results: {}", queryResponse);
return queryResponse;
}
use of ddf.catalog.plugin.PluginExecutionException in project ddf by codice.
the class IdentificationPlugin method process.
/**
* For registry metacards verifies the update should take place by checking that the update
* metacard is at least as up to date as the existing one. Also updates the tags, identifiers,
* and transient attributes of the updated metacard.
*
* @param input the {@link UpdateRequest} to process
* @return
* @throws PluginExecutionException
* @throws StopProcessingException
*/
@Override
public UpdateRequest process(UpdateRequest input) throws PluginExecutionException, StopProcessingException {
if (!Requests.isLocal(input)) {
return input;
}
OperationTransaction operationTransaction = (OperationTransaction) input.getProperties().get(Constants.OPERATION_TRANSACTION_KEY);
List<Metacard> previousMetacards = operationTransaction.getPreviousStateMetacards();
Map<String, Metacard> previousMetacardsMap = previousMetacards.stream().filter(e -> RegistryUtility.isRegistryMetacard(e) || RegistryUtility.isInternalRegistryMetacard(e)).collect(Collectors.toMap(RegistryUtility::getRegistryId, Function.identity()));
List<Map.Entry<Serializable, Metacard>> entriesToRemove = new ArrayList<>();
List<Map.Entry<Serializable, Metacard>> registryUpdates = input.getUpdates().stream().filter(e -> RegistryUtility.isRegistryMetacard(e.getValue())).collect(Collectors.toList());
for (Map.Entry<Serializable, Metacard> entry : registryUpdates) {
Metacard updateMetacard = entry.getValue();
Metacard existingMetacard = previousMetacardsMap.get(RegistryUtility.getRegistryId(updateMetacard));
if (existingMetacard == null) {
continue;
}
if (updateMetacard.getMetadata() != null && !updateMetacard.getModifiedDate().before(existingMetacard.getModifiedDate())) {
updateMetacard.setAttribute(new AttributeImpl(Metacard.ID, existingMetacard.getId()));
copyTransientAttributes(updateMetacard, existingMetacard);
updateTags(updateMetacard);
if (isInternal(updateMetacard)) {
updateMetacard.setAttribute(existingMetacard.getAttribute(RegistryObjectMetacardType.REMOTE_METACARD_ID));
updateMetacard.setAttribute(existingMetacard.getAttribute(RegistryObjectMetacardType.REMOTE_REGISTRY_ID));
}
updateIdentifiers(updateMetacard, false);
} else {
entriesToRemove.add(entry);
}
}
input.getUpdates().removeAll(entriesToRemove);
return input;
}
Aggregations