Search in sources :

Example 16 with SortByImpl

use of ddf.catalog.filter.impl.SortByImpl in project ddf by codice.

the class OpenSearchParserImplTest method populateSearchOptionsNegativePageSize.

@Test
public void populateSearchOptionsNegativePageSize() {
    SortBy sortBy = new SortByImpl(Result.TEMPORAL, SortOrder.DESCENDING);
    Filter filter = mock(Filter.class);
    Query query = new QueryImpl(filter, 0, -1000, sortBy, true, 30000);
    QueryRequest queryRequest = new QueryRequestImpl(query);
    openSearchParser.populateSearchOptions(webClient, queryRequest, null, Arrays.asList("q,src,mr,start,count,mt,dn,lat,lon,radius,bbox,polygon,dtstart,dtend,dateName,filter,sort".split(",")));
    assertQueryParameterPopulated(OpenSearchConstants.COUNT);
    assertQueryParameterPopulated(OpenSearchConstants.MAX_RESULTS, OpenSearchParserImpl.DEFAULT_TOTAL_MAX.toString());
    assertQueryParameterPopulated(OpenSearchConstants.MAX_TIMEOUT, TIMEOUT);
    assertQueryParameterPopulated(OpenSearchConstants.SORT, DESCENDING_TEMPORAL_SORT);
}
Also used : QueryImpl(ddf.catalog.operation.impl.QueryImpl) Query(ddf.catalog.operation.Query) QueryRequest(ddf.catalog.operation.QueryRequest) SortByImpl(ddf.catalog.filter.impl.SortByImpl) TemporalFilter(ddf.catalog.impl.filter.TemporalFilter) Filter(org.opengis.filter.Filter) SortBy(org.opengis.filter.sort.SortBy) QueryRequestImpl(ddf.catalog.operation.impl.QueryRequestImpl) Test(org.junit.Test)

Example 17 with SortByImpl

use of ddf.catalog.filter.impl.SortByImpl in project ddf by codice.

the class DuplicateCommands method duplicateInBatches.

/**
 * In batches, loops through a query of the queryFacade and an ingest to the ingestFacade of the
 * metacards from the response until there are no more metacards from the queryFacade or the
 * maxMetacards has been reached.
 *
 * @param queryFacade - the CatalogFacade to duplicate from
 * @param ingestFacade - the CatalogFacade to duplicate to
 * @param filter - the filter to query with
 */
protected void duplicateInBatches(CatalogFacade queryFacade, CatalogFacade ingestFacade, Filter filter, String sourceId) throws InterruptedException {
    AtomicInteger queryIndex = new AtomicInteger(1);
    final long originalQuerySize;
    if (maxMetacards > 0 && maxMetacards < batchSize) {
        originalQuerySize = maxMetacards;
    } else {
        originalQuerySize = batchSize;
    }
    Function<Integer, QueryRequest> queryTemplate = (index) -> new QueryRequestImpl(new QueryImpl(filter, index, (int) originalQuerySize, new SortByImpl(Metacard.EFFECTIVE, SortOrder.DESCENDING), true, TimeUnit.MINUTES.toMillis(5)), Collections.singletonList(sourceId));
    List<Metacard> initialMetacards = ResultIterable.resultIterable((queryRequest -> {
        SourceResponse response = queryFacade.query(queryRequest);
        if (response.getHits() != -1) {
            maxMetacards = (int) response.getHits();
        }
        return response;
    }), queryTemplate.apply(queryIndex.get()), (int) originalQuerySize).stream().map(Result::getMetacard).collect(Collectors.toList());
    if (initialMetacards.isEmpty()) {
        LOGGER.debug("Query returned 0 results.");
        console.println(String.format("No results were returned by the source [%s]", sourceId));
        return;
    }
    ingestMetacards(ingestFacade, initialMetacards);
    if (initialMetacards.size() < originalQuerySize) {
        // all done if results exhausted in the first batch
        printProgressAndFlush(start, maxMetacards < 1 ? initialMetacards.size() : maxMetacards, ingestedCount.get());
        return;
    }
    final long totalWanted = maxMetacards;
    final AtomicBoolean done = new AtomicBoolean(false);
    if (multithreaded > 1) {
        BlockingQueue<Runnable> blockingQueue = new ArrayBlockingQueue<>(multithreaded);
        RejectedExecutionHandler rejectedExecutionHandler = new ThreadPoolExecutor.CallerRunsPolicy();
        final ExecutorService executorService = new ThreadPoolExecutor(multithreaded, multithreaded, 0L, TimeUnit.MILLISECONDS, blockingQueue, StandardThreadFactoryBuilder.newThreadFactory("duplicateCommandsThread"), rejectedExecutionHandler);
        console.printf("Running a maximum of %d threads during replication.%n", multithreaded);
        printProgressAndFlush(start, Math.max(totalWanted, initialMetacards.size()), ingestedCount.get());
        int index;
        while (!done.get()) {
            index = queryIndex.addAndGet(batchSize);
            final int taskIndex = index;
            executorService.submit(() -> {
                int querySize = (int) getQuerySizeFromIndex(totalWanted, taskIndex);
                if (querySize < 1) {
                    // If we don't need any more metacards, we're finished
                    done.set(true);
                    return;
                }
                List<Metacard> metacards = ResultIterable.resultIterable(queryFacade::query, queryTemplate.apply(taskIndex), querySize).stream().map(Result::getMetacard).collect(Collectors.toList());
                if (metacards.size() < querySize) {
                    done.set(true);
                }
                if (!metacards.isEmpty()) {
                    ingestMetacards(ingestFacade, metacards);
                }
                printProgressAndFlush(start, Math.max(totalWanted, ingestedCount.get()), ingestedCount.get());
            });
        }
        executorService.shutdown();
        try {
            executorService.awaitTermination(1, TimeUnit.SECONDS);
        } catch (InterruptedException e) {
            executorService.shutdownNow();
            throw e;
        }
        printProgressAndFlush(start, Math.max(totalWanted, ingestedCount.get()), ingestedCount.get());
    } else {
        // Single threaded
        ResultIterable iter;
        if (maxMetacards > 0) {
            iter = ResultIterable.resultIterable(queryFacade::query, queryTemplate.apply(1 + batchSize), maxMetacards);
        } else {
            iter = ResultIterable.resultIterable(queryFacade::query, queryTemplate.apply(1 + batchSize));
        }
        Iterables.partition(iter, batchSize).forEach((batch) -> {
            printProgressAndFlush(start, totalWanted, ingestedCount.get());
            if (batch.isEmpty()) {
                return;
            }
            ingestMetacards(ingestFacade, batch.stream().map(Result::getMetacard).collect(Collectors.toList()));
        });
    }
    printProgressAndFlush(start, totalWanted, ingestedCount.get());
    if (failedCount.get() > 0) {
        LOGGER.info("Not all records were ingested. [{}] failed", failedCount.get());
        if (StringUtils.isNotBlank(failedDir)) {
            try {
                writeFailedMetacards(failedMetacards);
            } catch (IOException e) {
                console.println("Error occurred while writing failed metacards to failedDir.");
            }
        }
    }
}
Also used : QueryRequestImpl(ddf.catalog.operation.impl.QueryRequestImpl) Iterables(com.google.common.collect.Iterables) ResultIterable(ddf.catalog.util.impl.ResultIterable) Arrays(java.util.Arrays) StringUtils(org.apache.commons.lang.StringUtils) CreateRequest(ddf.catalog.operation.CreateRequest) SourceUnavailableException(ddf.catalog.source.SourceUnavailableException) ThreadPoolExecutor(java.util.concurrent.ThreadPoolExecutor) LoggerFactory(org.slf4j.LoggerFactory) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) Function(java.util.function.Function) ArrayList(java.util.ArrayList) HashSet(java.util.HashSet) CreateResponse(ddf.catalog.operation.CreateResponse) Metacard(ddf.catalog.data.Metacard) RejectedExecutionHandler(java.util.concurrent.RejectedExecutionHandler) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) MetacardImpl(ddf.catalog.data.impl.MetacardImpl) QueryRequest(ddf.catalog.operation.QueryRequest) SortByImpl(ddf.catalog.filter.impl.SortByImpl) ObjectOutputStream(java.io.ObjectOutputStream) Result(ddf.catalog.data.Result) ExecutorService(java.util.concurrent.ExecutorService) SortOrder(org.opengis.filter.sort.SortOrder) CreateRequestImpl(ddf.catalog.operation.impl.CreateRequestImpl) QueryImpl(ddf.catalog.operation.impl.QueryImpl) Logger(org.slf4j.Logger) StandardThreadFactoryBuilder(org.codice.ddf.platform.util.StandardThreadFactoryBuilder) IngestException(ddf.catalog.source.IngestException) FileOutputStream(java.io.FileOutputStream) Set(java.util.Set) IOException(java.io.IOException) BlockingQueue(java.util.concurrent.BlockingQueue) Collectors(java.util.stream.Collectors) File(java.io.File) TimeUnit(java.util.concurrent.TimeUnit) ArrayBlockingQueue(java.util.concurrent.ArrayBlockingQueue) SourceResponse(ddf.catalog.operation.SourceResponse) List(java.util.List) CatalogFacade(org.codice.ddf.commands.catalog.facade.CatalogFacade) Filter(org.opengis.filter.Filter) Option(org.apache.karaf.shell.api.action.Option) Collections(java.util.Collections) Result(ddf.catalog.data.Result) QueryImpl(ddf.catalog.operation.impl.QueryImpl) ArrayBlockingQueue(java.util.concurrent.ArrayBlockingQueue) QueryRequest(ddf.catalog.operation.QueryRequest) SourceResponse(ddf.catalog.operation.SourceResponse) RejectedExecutionHandler(java.util.concurrent.RejectedExecutionHandler) IOException(java.io.IOException) ResultIterable(ddf.catalog.util.impl.ResultIterable) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) Metacard(ddf.catalog.data.Metacard) AtomicInteger(java.util.concurrent.atomic.AtomicInteger) SortByImpl(ddf.catalog.filter.impl.SortByImpl) QueryRequestImpl(ddf.catalog.operation.impl.QueryRequestImpl) ExecutorService(java.util.concurrent.ExecutorService) ThreadPoolExecutor(java.util.concurrent.ThreadPoolExecutor)

Example 18 with SortByImpl

use of ddf.catalog.filter.impl.SortByImpl in project ddf by codice.

the class ConfluenceSourceTest method testQueryWithSpace.

@Test
public void testQueryWithSpace() throws Exception {
    QueryRequest request = new QueryRequestImpl(new QueryImpl(builder.attribute("anyText").is().like().text("searchValue"), 1, 1, new SortByImpl("title", SortOrder.DESCENDING), false, 1000));
    InputStream entity = new ByteArrayInputStream(JSON_RESPONSE.getBytes(StandardCharsets.UTF_8));
    confluence.setConfluenceSpaces(Collections.singletonList("DDF"));
    when(clientResponse.getEntity()).thenReturn(entity);
    when(clientResponse.getStatus()).thenReturn(Response.Status.OK.getStatusCode());
    SourceResponse response = confluence.query(request);
    assertThat(response.getHits(), is(1L));
    Metacard mcard = response.getResults().get(0).getMetacard();
    assertThat(mcard, notNullValue());
    assertThat(mcard.getAttribute("attrib1").getValue(), is("val1"));
    assertThat(mcard.getAttribute("attrib2").getValues().size(), is(3));
}
Also used : QueryImpl(ddf.catalog.operation.impl.QueryImpl) Metacard(ddf.catalog.data.Metacard) QueryRequest(ddf.catalog.operation.QueryRequest) SourceResponse(ddf.catalog.operation.SourceResponse) SortByImpl(ddf.catalog.filter.impl.SortByImpl) ByteArrayInputStream(java.io.ByteArrayInputStream) ByteArrayInputStream(java.io.ByteArrayInputStream) InputStream(java.io.InputStream) QueryRequestImpl(ddf.catalog.operation.impl.QueryRequestImpl) Test(org.junit.Test)

Example 19 with SortByImpl

use of ddf.catalog.filter.impl.SortByImpl in project ddf by codice.

the class ConfluenceSourceTest method verifyAllMappings.

@Test
public void verifyAllMappings() throws Exception {
    QueryRequest request = new QueryRequestImpl(new QueryImpl(builder.attribute("anyText").is().like().text("searchValue"), 1, 1, new SortByImpl("title", SortOrder.DESCENDING), false, 1000));
    InputStream entity = new ByteArrayInputStream(JSON_RESPONSE.getBytes(StandardCharsets.UTF_8));
    when(clientResponse.getEntity()).thenReturn(entity);
    when(clientResponse.getStatus()).thenReturn(Response.Status.OK.getStatusCode());
    SourceResponse response = confluence.query(request);
    assertThat(response.getHits(), is(1L));
    Metacard mcard = response.getResults().get(0).getMetacard();
    assertThat(mcard.getCreatedDate(), is(DatatypeConverter.parseDateTime("2013-09-18T14:50:42.616-07:00").getTime()));
    assertThat(mcard.getModifiedDate(), is(DatatypeConverter.parseDateTime("2015-06-16T19:21:39.141-07:00").getTime()));
    assertThat(mcard.getAttribute(Core.METACARD_CREATED).getValue(), is(DatatypeConverter.parseDateTime("2013-09-18T14:50:42.616-07:00").getTime()));
    assertThat(mcard.getAttribute(Core.METACARD_MODIFIED).getValue(), is(DatatypeConverter.parseDateTime("2015-06-16T19:21:39.141-07:00").getTime()));
    assertThat(mcard.getTags(), contains("confluence", "resource"));
    assertThat(mcard.getId(), is("1179681"));
    assertThat(mcard.getTitle(), is("Formatting Source Code"));
    assertThat(mcard.getAttribute(Associations.EXTERNAL).getValues(), contains("https://codice.atlassian.net/wiki/display/DDF/Formatting+Source+Code"));
    assertThat(mcard.getAttribute(Contact.CREATOR_NAME).getValue(), is("another"));
    assertThat(mcard.getAttribute(Contact.CONTRIBUTOR_NAME).getValue(), is("first.last"));
    assertThat(mcard.getAttribute(Media.TYPE).getValue(), is("text/html"));
    assertThat(mcard.getAttribute(Security.ACCESS_GROUPS).getValue(), is("ddf-developers"));
    assertThat(mcard.getAttribute(Security.ACCESS_INDIVIDUALS).getValue(), is("first.last"));
    assertThat(mcard.getAttribute(Topic.CATEGORY).getValue(), is("page"));
    assertThat(mcard.getAttribute(Topic.VOCABULARY).getValue(), is("https://developer.atlassian.com/confdev/confluence-server-rest-api/advanced-searching-using-cql/cql-field-reference#CQLFieldReference-titleTitleType"));
    assertThat(mcard.getAttribute(Topic.KEYWORD).getValue(), is("testlabel"));
}
Also used : QueryImpl(ddf.catalog.operation.impl.QueryImpl) Metacard(ddf.catalog.data.Metacard) QueryRequest(ddf.catalog.operation.QueryRequest) SourceResponse(ddf.catalog.operation.SourceResponse) SortByImpl(ddf.catalog.filter.impl.SortByImpl) ByteArrayInputStream(java.io.ByteArrayInputStream) ByteArrayInputStream(java.io.ByteArrayInputStream) InputStream(java.io.InputStream) QueryRequestImpl(ddf.catalog.operation.impl.QueryRequestImpl) Test(org.junit.Test)

Example 20 with SortByImpl

use of ddf.catalog.filter.impl.SortByImpl in project ddf by codice.

the class ConfluenceSourceTest method testNonConfluenceQuery.

@Test
public void testNonConfluenceQuery() throws Exception {
    QueryRequest request = new QueryRequestImpl(new QueryImpl(builder.attribute("metacard-tags").is().like().text("nonConfluecneTag"), 1, 1, new SortByImpl("title", SortOrder.DESCENDING), false, 1000));
    SourceResponse response = confluence.query(request);
    assertThat(response.getHits(), is(0L));
}
Also used : QueryImpl(ddf.catalog.operation.impl.QueryImpl) QueryRequest(ddf.catalog.operation.QueryRequest) SourceResponse(ddf.catalog.operation.SourceResponse) SortByImpl(ddf.catalog.filter.impl.SortByImpl) QueryRequestImpl(ddf.catalog.operation.impl.QueryRequestImpl) Test(org.junit.Test)

Aggregations

SortByImpl (ddf.catalog.filter.impl.SortByImpl)68 QueryImpl (ddf.catalog.operation.impl.QueryImpl)65 Test (org.junit.Test)56 QueryRequestImpl (ddf.catalog.operation.impl.QueryRequestImpl)47 SortBy (org.opengis.filter.sort.SortBy)45 SourceResponse (ddf.catalog.operation.SourceResponse)28 QueryRequest (ddf.catalog.operation.QueryRequest)23 Matchers.containsString (org.hamcrest.Matchers.containsString)21 Filter (org.opengis.filter.Filter)13 ArrayList (java.util.ArrayList)12 GetFeatureType (net.opengis.wfs.v_2_0_0.GetFeatureType)12 QueryType (net.opengis.wfs.v_2_0_0.QueryType)12 Query (ddf.catalog.operation.Query)9 TemporalFilter (ddf.catalog.impl.filter.TemporalFilter)8 InputStream (java.io.InputStream)8 GetRecordsType (net.opengis.cat.csw.v_2_0_2.GetRecordsType)8 QueryType (net.opengis.cat.csw.v_2_0_2.QueryType)8 CswException (org.codice.ddf.spatial.ogc.csw.catalog.common.CswException)8 MetacardMapper (org.codice.ddf.spatial.ogc.wfs.catalog.mapper.MetacardMapper)8 ByteArrayInputStream (java.io.ByteArrayInputStream)7