use of ddf.catalog.source.CatalogProvider in project ddf by codice.
the class MigrateCommand method executeWithSubject.
@Override
protected Object executeWithSubject() throws Exception {
final List<CatalogProvider> providers = getCatalogProviders();
if (listProviders) {
if (providers.size() == 0) {
console.println("There are no available Providers.");
return null;
}
console.println("Available Providers:");
providers.stream().map(p -> p.getClass().getSimpleName()).forEach(id -> console.println("\t" + id));
return null;
}
if (batchSize > MAX_BATCH_SIZE || batchSize < 1) {
console.println("Batch Size must be between 1 and " + MAX_BATCH_SIZE + ".");
return null;
}
if (providers.isEmpty() || providers.size() < 2) {
console.println("Not enough CatalogProviders installed to migrate.");
return null;
}
final CatalogProvider fromProvider = promptForProvider("FROM", fromProviderId, providers);
if (fromProvider == null) {
console.println("Invalid \"FROM\" Provider id.");
return null;
}
console.println("FROM Provider ID: " + fromProvider.getClass().getSimpleName());
final CatalogProvider toProvider = promptForProvider("TO", toProviderId, providers);
if (toProvider == null) {
console.println("Invalid \"TO\" Provider id.");
return null;
}
console.println("TO Provider ID: " + toProvider.getClass().getSimpleName());
CatalogFacade queryProvider = new Provider(fromProvider);
CatalogFacade ingestProvider = new Provider(toProvider);
start = System.currentTimeMillis();
console.println("Starting migration.");
duplicateInBatches(queryProvider, ingestProvider, getFilter());
console.println();
long end = System.currentTimeMillis();
String completed = String.format(" %d record(s) migrated; %d record(s) failed; completed in %3.3f seconds.", ingestedCount.get(), failedCount.get(), (end - start) / MS_PER_SECOND);
LOGGER.debug("Migration Complete: {}", completed);
console.println(completed);
return null;
}
use of ddf.catalog.source.CatalogProvider in project ddf by codice.
the class CachingFederationStrategy method sourceFederate.
private QueryResponse sourceFederate(List<Source> sources, final QueryRequest queryRequest) {
if (LOGGER.isDebugEnabled()) {
for (Source source : sources) {
if (source != null) {
LOGGER.debug("source to query: {}", source.getId());
}
}
}
Query originalQuery = queryRequest.getQuery();
int offset = originalQuery.getStartIndex();
final int pageSize = originalQuery.getPageSize();
// limit offset to max value
if (offset > this.maxStartIndex) {
offset = this.maxStartIndex;
}
final QueryResponseImpl queryResponseQueue = new QueryResponseImpl(queryRequest, null);
Map<Future<SourceResponse>, QueryRequest> futures = new HashMap<>();
Query modifiedQuery = getModifiedQuery(originalQuery, sources.size(), offset, pageSize);
QueryRequest modifiedQueryRequest = new QueryRequestImpl(modifiedQuery, queryRequest.isEnterprise(), queryRequest.getSourceIds(), queryRequest.getProperties());
CompletionService<SourceResponse> queryCompletion = new ExecutorCompletionService<>(queryExecutorService);
// Do NOT call source.isAvailable() when checking sources
for (final Source source : sources) {
if (source != null) {
if (!futuresContainsSource(source, futures)) {
LOGGER.debug("running query on source: {}", source.getId());
QueryRequest sourceQueryRequest = new QueryRequestImpl(modifiedQuery, queryRequest.isEnterprise(), Collections.singleton(source.getId()), new HashMap<>(queryRequest.getProperties()));
try {
for (PreFederatedQueryPlugin service : preQuery) {
try {
sourceQueryRequest = service.process(source, sourceQueryRequest);
} catch (PluginExecutionException e) {
LOGGER.info("Error executing PreFederatedQueryPlugin", e);
}
}
} catch (StopProcessingException e) {
LOGGER.info("Plugin stopped processing", e);
}
if (source instanceof CatalogProvider && SystemInfo.getSiteName().equals(source.getId())) {
// TODO RAP 12 Jul 16: DDF-2294 - Extract into a new PreFederatedQueryPlugin
sourceQueryRequest = validationQueryFactory.getQueryRequestWithValidationFilter(sourceQueryRequest, showErrors, showWarnings);
}
futures.put(queryCompletion.submit(new CallableSourceResponse(source, sourceQueryRequest)), sourceQueryRequest);
} else {
LOGGER.info("Duplicate source found with name {}. Ignoring second one.", source.getId());
}
}
}
QueryResponseImpl offsetResults = null;
// OffsetResultHandler does.
if (offset > 1 && sources.size() > 1) {
offsetResults = new QueryResponseImpl(queryRequest, null);
queryExecutorService.submit(new OffsetResultHandler(queryResponseQueue, offsetResults, pageSize, offset));
}
queryExecutorService.submit(sortedQueryMonitorFactory.createMonitor(queryCompletion, futures, queryResponseQueue, modifiedQueryRequest, postQuery));
QueryResponse queryResponse;
if (offset > 1 && sources.size() > 1) {
queryResponse = offsetResults;
LOGGER.debug("returning offsetResults");
} else {
queryResponse = queryResponseQueue;
LOGGER.debug("returning returnResults: {}", queryResponse);
}
LOGGER.debug("returning Query Results: {}", queryResponse);
return queryResponse;
}
use of ddf.catalog.source.CatalogProvider in project ddf by codice.
the class MetacardValidityFilterPluginTest method mockCatalogProvider.
private static CatalogProvider mockCatalogProvider(String id) {
CatalogProvider localSource = mock(CatalogProvider.class);
when(localSource.getId()).thenReturn(id);
return localSource;
}
use of ddf.catalog.source.CatalogProvider in project ddf by codice.
the class RemoteSolrCatalogProviderTest method testDescribableProperties.
@Test
public void testDescribableProperties() {
CatalogProvider provider = new MockedRemoteSolrCatalogProvider(mock(SolrClient.class));
assertNotNull(provider.getTitle());
assertNotNull(provider.getDescription());
assertNotNull(provider.getOrganization());
assertNotNull(provider.getVersion());
}
use of ddf.catalog.source.CatalogProvider in project ddf by codice.
the class RemoteSolrCatalogProviderTest method testId.
@Test
public void testId() {
CatalogProvider provider = new MockedRemoteSolrCatalogProvider(mock(SolrClient.class));
provider.maskId("myId");
assertEquals("myId", provider.getId());
}
Aggregations