use of ddf.catalog.filter.impl.SortByImpl in project ddf by codice.
the class OpenSearchParserImplTest method populateSearchOptionsNegativePageSize.
@Test
public void populateSearchOptionsNegativePageSize() {
SortBy sortBy = new SortByImpl(Result.TEMPORAL, SortOrder.DESCENDING);
Filter filter = mock(Filter.class);
Query query = new QueryImpl(filter, 0, -1000, sortBy, true, 30000);
QueryRequest queryRequest = new QueryRequestImpl(query);
openSearchParser.populateSearchOptions(webClient, queryRequest, null, Arrays.asList("q,src,mr,start,count,mt,dn,lat,lon,radius,bbox,polygon,dtstart,dtend,dateName,filter,sort".split(",")));
assertQueryParameterPopulated(OpenSearchConstants.COUNT);
assertQueryParameterPopulated(OpenSearchConstants.MAX_RESULTS, OpenSearchParserImpl.DEFAULT_TOTAL_MAX.toString());
assertQueryParameterPopulated(OpenSearchConstants.MAX_TIMEOUT, TIMEOUT);
assertQueryParameterPopulated(OpenSearchConstants.SORT, DESCENDING_TEMPORAL_SORT);
}
use of ddf.catalog.filter.impl.SortByImpl in project ddf by codice.
the class DuplicateCommands method duplicateInBatches.
/**
* In batches, loops through a query of the queryFacade and an ingest to the ingestFacade of the
* metacards from the response until there are no more metacards from the queryFacade or the
* maxMetacards has been reached.
*
* @param queryFacade - the CatalogFacade to duplicate from
* @param ingestFacade - the CatalogFacade to duplicate to
* @param filter - the filter to query with
*/
protected void duplicateInBatches(CatalogFacade queryFacade, CatalogFacade ingestFacade, Filter filter, String sourceId) throws InterruptedException {
AtomicInteger queryIndex = new AtomicInteger(1);
final long originalQuerySize;
if (maxMetacards > 0 && maxMetacards < batchSize) {
originalQuerySize = maxMetacards;
} else {
originalQuerySize = batchSize;
}
Function<Integer, QueryRequest> queryTemplate = (index) -> new QueryRequestImpl(new QueryImpl(filter, index, (int) originalQuerySize, new SortByImpl(Metacard.EFFECTIVE, SortOrder.DESCENDING), true, TimeUnit.MINUTES.toMillis(5)), Collections.singletonList(sourceId));
List<Metacard> initialMetacards = ResultIterable.resultIterable((queryRequest -> {
SourceResponse response = queryFacade.query(queryRequest);
if (response.getHits() != -1) {
maxMetacards = (int) response.getHits();
}
return response;
}), queryTemplate.apply(queryIndex.get()), (int) originalQuerySize).stream().map(Result::getMetacard).collect(Collectors.toList());
if (initialMetacards.isEmpty()) {
LOGGER.debug("Query returned 0 results.");
console.println(String.format("No results were returned by the source [%s]", sourceId));
return;
}
ingestMetacards(ingestFacade, initialMetacards);
if (initialMetacards.size() < originalQuerySize) {
// all done if results exhausted in the first batch
printProgressAndFlush(start, maxMetacards < 1 ? initialMetacards.size() : maxMetacards, ingestedCount.get());
return;
}
final long totalWanted = maxMetacards;
final AtomicBoolean done = new AtomicBoolean(false);
if (multithreaded > 1) {
BlockingQueue<Runnable> blockingQueue = new ArrayBlockingQueue<>(multithreaded);
RejectedExecutionHandler rejectedExecutionHandler = new ThreadPoolExecutor.CallerRunsPolicy();
final ExecutorService executorService = new ThreadPoolExecutor(multithreaded, multithreaded, 0L, TimeUnit.MILLISECONDS, blockingQueue, StandardThreadFactoryBuilder.newThreadFactory("duplicateCommandsThread"), rejectedExecutionHandler);
console.printf("Running a maximum of %d threads during replication.%n", multithreaded);
printProgressAndFlush(start, Math.max(totalWanted, initialMetacards.size()), ingestedCount.get());
int index;
while (!done.get()) {
index = queryIndex.addAndGet(batchSize);
final int taskIndex = index;
executorService.submit(() -> {
int querySize = (int) getQuerySizeFromIndex(totalWanted, taskIndex);
if (querySize < 1) {
// If we don't need any more metacards, we're finished
done.set(true);
return;
}
List<Metacard> metacards = ResultIterable.resultIterable(queryFacade::query, queryTemplate.apply(taskIndex), querySize).stream().map(Result::getMetacard).collect(Collectors.toList());
if (metacards.size() < querySize) {
done.set(true);
}
if (!metacards.isEmpty()) {
ingestMetacards(ingestFacade, metacards);
}
printProgressAndFlush(start, Math.max(totalWanted, ingestedCount.get()), ingestedCount.get());
});
}
executorService.shutdown();
try {
executorService.awaitTermination(1, TimeUnit.SECONDS);
} catch (InterruptedException e) {
executorService.shutdownNow();
throw e;
}
printProgressAndFlush(start, Math.max(totalWanted, ingestedCount.get()), ingestedCount.get());
} else {
// Single threaded
ResultIterable iter;
if (maxMetacards > 0) {
iter = ResultIterable.resultIterable(queryFacade::query, queryTemplate.apply(1 + batchSize), maxMetacards);
} else {
iter = ResultIterable.resultIterable(queryFacade::query, queryTemplate.apply(1 + batchSize));
}
Iterables.partition(iter, batchSize).forEach((batch) -> {
printProgressAndFlush(start, totalWanted, ingestedCount.get());
if (batch.isEmpty()) {
return;
}
ingestMetacards(ingestFacade, batch.stream().map(Result::getMetacard).collect(Collectors.toList()));
});
}
printProgressAndFlush(start, totalWanted, ingestedCount.get());
if (failedCount.get() > 0) {
LOGGER.info("Not all records were ingested. [{}] failed", failedCount.get());
if (StringUtils.isNotBlank(failedDir)) {
try {
writeFailedMetacards(failedMetacards);
} catch (IOException e) {
console.println("Error occurred while writing failed metacards to failedDir.");
}
}
}
}
use of ddf.catalog.filter.impl.SortByImpl in project ddf by codice.
the class ConfluenceSourceTest method testQueryWithSpace.
@Test
public void testQueryWithSpace() throws Exception {
QueryRequest request = new QueryRequestImpl(new QueryImpl(builder.attribute("anyText").is().like().text("searchValue"), 1, 1, new SortByImpl("title", SortOrder.DESCENDING), false, 1000));
InputStream entity = new ByteArrayInputStream(JSON_RESPONSE.getBytes(StandardCharsets.UTF_8));
confluence.setConfluenceSpaces(Collections.singletonList("DDF"));
when(clientResponse.getEntity()).thenReturn(entity);
when(clientResponse.getStatus()).thenReturn(Response.Status.OK.getStatusCode());
SourceResponse response = confluence.query(request);
assertThat(response.getHits(), is(1L));
Metacard mcard = response.getResults().get(0).getMetacard();
assertThat(mcard, notNullValue());
assertThat(mcard.getAttribute("attrib1").getValue(), is("val1"));
assertThat(mcard.getAttribute("attrib2").getValues().size(), is(3));
}
use of ddf.catalog.filter.impl.SortByImpl in project ddf by codice.
the class ConfluenceSourceTest method verifyAllMappings.
@Test
public void verifyAllMappings() throws Exception {
QueryRequest request = new QueryRequestImpl(new QueryImpl(builder.attribute("anyText").is().like().text("searchValue"), 1, 1, new SortByImpl("title", SortOrder.DESCENDING), false, 1000));
InputStream entity = new ByteArrayInputStream(JSON_RESPONSE.getBytes(StandardCharsets.UTF_8));
when(clientResponse.getEntity()).thenReturn(entity);
when(clientResponse.getStatus()).thenReturn(Response.Status.OK.getStatusCode());
SourceResponse response = confluence.query(request);
assertThat(response.getHits(), is(1L));
Metacard mcard = response.getResults().get(0).getMetacard();
assertThat(mcard.getCreatedDate(), is(DatatypeConverter.parseDateTime("2013-09-18T14:50:42.616-07:00").getTime()));
assertThat(mcard.getModifiedDate(), is(DatatypeConverter.parseDateTime("2015-06-16T19:21:39.141-07:00").getTime()));
assertThat(mcard.getAttribute(Core.METACARD_CREATED).getValue(), is(DatatypeConverter.parseDateTime("2013-09-18T14:50:42.616-07:00").getTime()));
assertThat(mcard.getAttribute(Core.METACARD_MODIFIED).getValue(), is(DatatypeConverter.parseDateTime("2015-06-16T19:21:39.141-07:00").getTime()));
assertThat(mcard.getTags(), contains("confluence", "resource"));
assertThat(mcard.getId(), is("1179681"));
assertThat(mcard.getTitle(), is("Formatting Source Code"));
assertThat(mcard.getAttribute(Associations.EXTERNAL).getValues(), contains("https://codice.atlassian.net/wiki/display/DDF/Formatting+Source+Code"));
assertThat(mcard.getAttribute(Contact.CREATOR_NAME).getValue(), is("another"));
assertThat(mcard.getAttribute(Contact.CONTRIBUTOR_NAME).getValue(), is("first.last"));
assertThat(mcard.getAttribute(Media.TYPE).getValue(), is("text/html"));
assertThat(mcard.getAttribute(Security.ACCESS_GROUPS).getValue(), is("ddf-developers"));
assertThat(mcard.getAttribute(Security.ACCESS_INDIVIDUALS).getValue(), is("first.last"));
assertThat(mcard.getAttribute(Topic.CATEGORY).getValue(), is("page"));
assertThat(mcard.getAttribute(Topic.VOCABULARY).getValue(), is("https://developer.atlassian.com/confdev/confluence-server-rest-api/advanced-searching-using-cql/cql-field-reference#CQLFieldReference-titleTitleType"));
assertThat(mcard.getAttribute(Topic.KEYWORD).getValue(), is("testlabel"));
}
use of ddf.catalog.filter.impl.SortByImpl in project ddf by codice.
the class ConfluenceSourceTest method testNonConfluenceQuery.
@Test
public void testNonConfluenceQuery() throws Exception {
QueryRequest request = new QueryRequestImpl(new QueryImpl(builder.attribute("metacard-tags").is().like().text("nonConfluecneTag"), 1, 1, new SortByImpl("title", SortOrder.DESCENDING), false, 1000));
SourceResponse response = confluence.query(request);
assertThat(response.getHits(), is(0L));
}
Aggregations