Search in sources :

Example 6 with Query

use of org.hypertrace.entity.data.service.v1.Query in project beam by apache.

the class DatastoreV1Test method testSplitQueryFnWithoutNumSplits.

/**
 * Tests {@link SplitQueryFn} when no query splits is specified.
 */
@Test
public void testSplitQueryFnWithoutNumSplits() throws Exception {
    // Force SplitQueryFn to compute the number of query splits
    int numSplits = 0;
    int expectedNumSplits = 20;
    long entityBytes = expectedNumSplits * DEFAULT_BUNDLE_SIZE_BYTES;
    // In seconds
    long timestamp = 1234L;
    RunQueryRequest latestTimestampRequest = makeRequest(makeLatestTimestampQuery(NAMESPACE), NAMESPACE);
    RunQueryResponse latestTimestampResponse = makeLatestTimestampResponse(timestamp);
    // Per Kind statistics request and response
    RunQueryRequest statRequest = makeRequest(makeStatKindQuery(NAMESPACE, timestamp), NAMESPACE);
    RunQueryResponse statResponse = makeStatKindResponse(entityBytes);
    when(mockDatastore.runQuery(latestTimestampRequest)).thenReturn(latestTimestampResponse);
    when(mockDatastore.runQuery(statRequest)).thenReturn(statResponse);
    when(mockQuerySplitter.getSplits(eq(QUERY), any(PartitionId.class), eq(expectedNumSplits), any(Datastore.class))).thenReturn(splitQuery(QUERY, expectedNumSplits));
    SplitQueryFn splitQueryFn = new SplitQueryFn(V_1_OPTIONS, numSplits, mockDatastoreFactory);
    DoFnTester<Query, Query> doFnTester = DoFnTester.of(splitQueryFn);
    doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE);
    List<Query> queries = doFnTester.processBundle(QUERY);
    assertEquals(expectedNumSplits, queries.size());
    verify(mockQuerySplitter, times(1)).getSplits(eq(QUERY), any(PartitionId.class), eq(expectedNumSplits), any(Datastore.class));
    verify(mockDatastore, times(1)).runQuery(latestTimestampRequest);
    verify(mockDatastore, times(1)).runQuery(statRequest);
}
Also used : Datastore(com.google.datastore.v1.client.Datastore) GqlQuery(com.google.datastore.v1.GqlQuery) Query(com.google.datastore.v1.Query) RunQueryResponse(com.google.datastore.v1.RunQueryResponse) RunQueryRequest(com.google.datastore.v1.RunQueryRequest) SplitQueryFn(org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.SplitQueryFn) PartitionId(com.google.datastore.v1.PartitionId) Test(org.junit.Test)

Example 7 with Query

use of org.hypertrace.entity.data.service.v1.Query in project beam by apache.

the class DatastoreV1Test method testReadFnRetriesErrors.

/**
 * Tests that {@link ReadFn} retries after an error.
 */
@Test
public void testReadFnRetriesErrors() throws Exception {
    // An empty query to read entities.
    Query query = Query.newBuilder().setLimit(Int32Value.newBuilder().setValue(1)).build();
    // Use mockResponseForQuery to generate results.
    when(mockDatastore.runQuery(any(RunQueryRequest.class))).thenThrow(new DatastoreException("RunQuery", Code.DEADLINE_EXCEEDED, "", null)).thenAnswer(invocationOnMock -> {
        Query q = ((RunQueryRequest) invocationOnMock.getArguments()[0]).getQuery();
        return mockResponseForQuery(q);
    });
    ReadFn readFn = new ReadFn(V_1_OPTIONS, mockDatastoreFactory);
    DoFnTester<Query, Entity> doFnTester = DoFnTester.of(readFn);
    doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE);
    doFnTester.processBundle(query);
    verifyMetricWasSet("BatchDatastoreRead", "ok", NAMESPACE, 1);
    verifyMetricWasSet("BatchDatastoreRead", "unknown", NAMESPACE, 1);
}
Also used : Entity(com.google.datastore.v1.Entity) DeleteEntity(org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.DeleteEntity) GqlQuery(com.google.datastore.v1.GqlQuery) Query(com.google.datastore.v1.Query) ReadFn(org.apache.beam.sdk.io.gcp.datastore.DatastoreV1.Read.ReadFn) RunQueryRequest(com.google.datastore.v1.RunQueryRequest) DatastoreException(com.google.datastore.v1.client.DatastoreException) Test(org.junit.Test)

Example 8 with Query

use of org.hypertrace.entity.data.service.v1.Query in project druid by druid-io.

the class SqlSegmentsMetadataQuery method retrieveSegments.

private CloseableIterator<DataSegment> retrieveSegments(final String dataSource, final Collection<Interval> intervals, final IntervalMode matchMode, final boolean used) {
    // Check if the intervals all support comparing as strings. If so, bake them into the SQL.
    final boolean compareAsString = intervals.stream().allMatch(Intervals::canCompareEndpointsAsStrings);
    final StringBuilder sb = new StringBuilder();
    sb.append("SELECT payload FROM %s WHERE used = :used AND dataSource = :dataSource");
    if (compareAsString && !intervals.isEmpty()) {
        sb.append(" AND (");
        for (int i = 0; i < intervals.size(); i++) {
            sb.append(matchMode.makeSqlCondition(connector.getQuoteString(), StringUtils.format(":start%d", i), StringUtils.format(":end%d", i)));
            if (i == intervals.size() - 1) {
                sb.append(")");
            } else {
                sb.append(" OR ");
            }
        }
    }
    final Query<Map<String, Object>> sql = handle.createQuery(StringUtils.format(sb.toString(), dbTables.getSegmentsTable())).setFetchSize(connector.getStreamingFetchSize()).bind("used", used).bind("dataSource", dataSource);
    if (compareAsString) {
        final Iterator<Interval> iterator = intervals.iterator();
        for (int i = 0; iterator.hasNext(); i++) {
            Interval interval = iterator.next();
            sql.bind(StringUtils.format("start%d", i), interval.getStart().toString()).bind(StringUtils.format("end%d", i), interval.getEnd().toString());
        }
    }
    final ResultIterator<DataSegment> resultIterator = sql.map((index, r, ctx) -> JacksonUtils.readValue(jsonMapper, r.getBytes(1), DataSegment.class)).iterator();
    return CloseableIterators.wrap(Iterators.filter(resultIterator, dataSegment -> {
        if (intervals.isEmpty()) {
            return true;
        } else {
            // segment interval like "20010/20011".)
            for (Interval interval : intervals) {
                if (matchMode.apply(interval, dataSegment.getInterval())) {
                    return true;
                }
            }
            return false;
        }
    }), resultIterator);
}
Also used : Intervals(org.apache.druid.java.util.common.Intervals) Logger(org.apache.druid.java.util.common.logger.Logger) JacksonUtils(org.apache.druid.java.util.common.jackson.JacksonUtils) Iterator(java.util.Iterator) Intervals(org.apache.druid.java.util.common.Intervals) Collection(java.util.Collection) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper) StringUtils(org.apache.druid.java.util.common.StringUtils) Query(org.skife.jdbi.v2.Query) Collectors(java.util.stream.Collectors) Iterators(com.google.common.collect.Iterators) Interval(org.joda.time.Interval) List(java.util.List) CloseableIterators(org.apache.druid.java.util.common.CloseableIterators) Handle(org.skife.jdbi.v2.Handle) ImmutableList(com.google.common.collect.ImmutableList) Map(java.util.Map) DataSegment(org.apache.druid.timeline.DataSegment) ResultIterator(org.skife.jdbi.v2.ResultIterator) PreparedBatch(org.skife.jdbi.v2.PreparedBatch) IAE(org.apache.druid.java.util.common.IAE) SegmentId(org.apache.druid.timeline.SegmentId) CloseableIterator(org.apache.druid.java.util.common.parsers.CloseableIterator) Collections(java.util.Collections) Map(java.util.Map) DataSegment(org.apache.druid.timeline.DataSegment) Interval(org.joda.time.Interval)

Example 9 with Query

use of org.hypertrace.entity.data.service.v1.Query in project packages-jpl by SWI-Prolog.

the class FamilyMT method run.

public void run() {
    Map<String, Term> solution;
    // Variable X = new Variable("X");
    // 
    Query q2 = new Query("child_of(joe,ralf)");
    System.err.println("child_of(joe,ralf) is " + (q2.hasSolution() ? "provable" : "not provable"));
    new Query("sleep(?)", new Term[] { new org.jpl7.Integer(delay) }).hasSolution();
    // 
    Query q3 = new Query("descendent_of(steve,ralf)");
    System.err.println("descendent_of(steve,ralf) is " + (q3.hasSolution() ? "provable" : "not provable"));
    delay();
    // 
    Query q4 = new Query("descendent_of(X, ralf)");
    solution = q4.oneSolution();
    System.err.println("first solution of descendent_of(X, ralf)");
    System.err.println("X = " + solution.get("X"));
    delay();
    // 
    Map<String, Term>[] solutions = q4.allSolutions();
    System.err.println("all solutions of descendent_of(X, ralf)");
    for (int i = 0; i < solutions.length; i++) {
        System.err.println("X = " + solutions[i].get("X"));
    }
    delay();
    // 
    System.err.println("each solution of descendent_of(X, ralf)");
    while (q4.hasMoreSolutions()) {
        solution = q4.nextSolution();
        System.err.println("X = " + solution.get("X"));
    }
    delay();
    // 
    Query q5 = new Query("descendent_of(X, Y)");
    System.err.println(id + ": each solution of descendent_of(X, Y)");
    while (q5.hasMoreSolutions()) {
        solution = q5.nextSolution();
        System.err.println(id + ": X = " + solution.get("X") + ", Y = " + solution.get("Y"));
        delay();
    }
}
Also used : Query(org.jpl7.Query) Term(org.jpl7.Term) Map(java.util.Map)

Example 10 with Query

use of org.hypertrace.entity.data.service.v1.Query in project packages-jpl by SWI-Prolog.

the class Test method test_3.

static void test_3() {
    System.out.print("test 3...");
    Query q3 = new Query("p", new Term[] { pair_a_b });
    if (!q3.hasSolution()) {
        System.out.println("p(a-b) failed");
    // System.exit(1);
    }
    System.out.println("passed");
}
Also used : Query(org.jpl7.Query)

Aggregations

Query (org.jpl7.Query)88 Term (org.jpl7.Term)52 Variable (org.jpl7.Variable)32 Query (org.hypertrace.entity.data.service.v1.Query)31 Map (java.util.Map)27 Test (org.junit.jupiter.api.Test)27 Atom (org.jpl7.Atom)21 Compound (org.jpl7.Compound)18 Filter (org.hypertrace.core.documentstore.Filter)17 Test (org.junit.Test)16 AttributeFilter (org.hypertrace.entity.data.service.v1.AttributeFilter)15 Query (com.google.datastore.v1.Query)14 ArrayList (java.util.ArrayList)10 Entity (org.hypertrace.entity.data.service.v1.Entity)9 EntityQueryRequest (org.hypertrace.entity.query.service.v1.EntityQueryRequest)8 GqlQuery (com.google.datastore.v1.GqlQuery)7 Integer (org.jpl7.Integer)6 IOException (java.io.IOException)5 Collections (java.util.Collections)5 List (java.util.List)5