use of org.skife.jdbi.v2.Query in project beam by apache.
the class DatastoreV1Test method testSplitQueryFnWithNumSplits.
/**
* Tests {@link SplitQueryFn} when number of query splits is specified.
*/
@Test
public void testSplitQueryFnWithNumSplits() throws Exception {
int numSplits = 100;
when(mockQuerySplitter.getSplits(eq(QUERY), any(PartitionId.class), eq(numSplits), any(Datastore.class))).thenReturn(splitQuery(QUERY, numSplits));
SplitQueryFn splitQueryFn = new SplitQueryFn(V_1_OPTIONS, numSplits, mockDatastoreFactory);
DoFnTester<Query, Query> doFnTester = DoFnTester.of(splitQueryFn);
/**
* Although Datastore client is marked transient in {@link SplitQueryFn}, when injected through
* mock factory using a when clause for unit testing purposes, it is not serializable because it
* doesn't have a no-arg constructor. Thus disabling the cloning to prevent the doFn from being
* serialized.
*/
doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE);
List<Query> queries = doFnTester.processBundle(QUERY);
assertEquals(queries.size(), numSplits);
// Confirms that sub-queries are not equal to original when there is more than one split.
for (Query subQuery : queries) {
assertNotEquals(subQuery, QUERY);
}
verify(mockQuerySplitter, times(1)).getSplits(eq(QUERY), any(PartitionId.class), eq(numSplits), any(Datastore.class));
verifyZeroInteractions(mockDatastore);
}
use of org.skife.jdbi.v2.Query in project beam by apache.
the class DatastoreV1Test method testSplitQueryFnWithoutNumSplits.
/**
* Tests {@link SplitQueryFn} when no query splits is specified.
*/
@Test
public void testSplitQueryFnWithoutNumSplits() throws Exception {
// Force SplitQueryFn to compute the number of query splits
int numSplits = 0;
int expectedNumSplits = 20;
long entityBytes = expectedNumSplits * DEFAULT_BUNDLE_SIZE_BYTES;
// In seconds
long timestamp = 1234L;
RunQueryRequest latestTimestampRequest = makeRequest(makeLatestTimestampQuery(NAMESPACE), NAMESPACE);
RunQueryResponse latestTimestampResponse = makeLatestTimestampResponse(timestamp);
// Per Kind statistics request and response
RunQueryRequest statRequest = makeRequest(makeStatKindQuery(NAMESPACE, timestamp), NAMESPACE);
RunQueryResponse statResponse = makeStatKindResponse(entityBytes);
when(mockDatastore.runQuery(latestTimestampRequest)).thenReturn(latestTimestampResponse);
when(mockDatastore.runQuery(statRequest)).thenReturn(statResponse);
when(mockQuerySplitter.getSplits(eq(QUERY), any(PartitionId.class), eq(expectedNumSplits), any(Datastore.class))).thenReturn(splitQuery(QUERY, expectedNumSplits));
SplitQueryFn splitQueryFn = new SplitQueryFn(V_1_OPTIONS, numSplits, mockDatastoreFactory);
DoFnTester<Query, Query> doFnTester = DoFnTester.of(splitQueryFn);
doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE);
List<Query> queries = doFnTester.processBundle(QUERY);
assertEquals(expectedNumSplits, queries.size());
verify(mockQuerySplitter, times(1)).getSplits(eq(QUERY), any(PartitionId.class), eq(expectedNumSplits), any(Datastore.class));
verify(mockDatastore, times(1)).runQuery(latestTimestampRequest);
verify(mockDatastore, times(1)).runQuery(statRequest);
}
use of org.skife.jdbi.v2.Query in project beam by apache.
the class DatastoreV1Test method testReadFnRetriesErrors.
/**
* Tests that {@link ReadFn} retries after an error.
*/
@Test
public void testReadFnRetriesErrors() throws Exception {
// An empty query to read entities.
Query query = Query.newBuilder().setLimit(Int32Value.newBuilder().setValue(1)).build();
// Use mockResponseForQuery to generate results.
when(mockDatastore.runQuery(any(RunQueryRequest.class))).thenThrow(new DatastoreException("RunQuery", Code.DEADLINE_EXCEEDED, "", null)).thenAnswer(invocationOnMock -> {
Query q = ((RunQueryRequest) invocationOnMock.getArguments()[0]).getQuery();
return mockResponseForQuery(q);
});
ReadFn readFn = new ReadFn(V_1_OPTIONS, mockDatastoreFactory);
DoFnTester<Query, Entity> doFnTester = DoFnTester.of(readFn);
doFnTester.setCloningBehavior(CloningBehavior.DO_NOT_CLONE);
doFnTester.processBundle(query);
verifyMetricWasSet("BatchDatastoreRead", "ok", NAMESPACE, 1);
verifyMetricWasSet("BatchDatastoreRead", "unknown", NAMESPACE, 1);
}
use of org.skife.jdbi.v2.Query in project druid by druid-io.
the class SqlSegmentsMetadataQuery method retrieveSegments.
private CloseableIterator<DataSegment> retrieveSegments(final String dataSource, final Collection<Interval> intervals, final IntervalMode matchMode, final boolean used) {
// Check if the intervals all support comparing as strings. If so, bake them into the SQL.
final boolean compareAsString = intervals.stream().allMatch(Intervals::canCompareEndpointsAsStrings);
final StringBuilder sb = new StringBuilder();
sb.append("SELECT payload FROM %s WHERE used = :used AND dataSource = :dataSource");
if (compareAsString && !intervals.isEmpty()) {
sb.append(" AND (");
for (int i = 0; i < intervals.size(); i++) {
sb.append(matchMode.makeSqlCondition(connector.getQuoteString(), StringUtils.format(":start%d", i), StringUtils.format(":end%d", i)));
if (i == intervals.size() - 1) {
sb.append(")");
} else {
sb.append(" OR ");
}
}
}
final Query<Map<String, Object>> sql = handle.createQuery(StringUtils.format(sb.toString(), dbTables.getSegmentsTable())).setFetchSize(connector.getStreamingFetchSize()).bind("used", used).bind("dataSource", dataSource);
if (compareAsString) {
final Iterator<Interval> iterator = intervals.iterator();
for (int i = 0; iterator.hasNext(); i++) {
Interval interval = iterator.next();
sql.bind(StringUtils.format("start%d", i), interval.getStart().toString()).bind(StringUtils.format("end%d", i), interval.getEnd().toString());
}
}
final ResultIterator<DataSegment> resultIterator = sql.map((index, r, ctx) -> JacksonUtils.readValue(jsonMapper, r.getBytes(1), DataSegment.class)).iterator();
return CloseableIterators.wrap(Iterators.filter(resultIterator, dataSegment -> {
if (intervals.isEmpty()) {
return true;
} else {
// segment interval like "20010/20011".)
for (Interval interval : intervals) {
if (matchMode.apply(interval, dataSegment.getInterval())) {
return true;
}
}
return false;
}
}), resultIterator);
}
use of org.skife.jdbi.v2.Query in project druid by druid-io.
the class JdbcCacheGenerator method lastUpdates.
@Nullable
private Long lastUpdates(CacheScheduler.EntryImpl<JdbcExtractionNamespace> key, JdbcExtractionNamespace namespace) {
final DBI dbi = ensureDBI(key, namespace);
final String table = namespace.getTable();
final String tsColumn = namespace.getTsColumn();
if (tsColumn == null) {
return null;
}
final Timestamp update = dbi.withHandle(handle -> {
final String query = StringUtils.format("SELECT MAX(%s) FROM %s", tsColumn, table);
return handle.createQuery(query).map(TimestampMapper.FIRST).first();
});
return update.getTime();
}
Aggregations