use of com.datastax.driver.core.Row in project newts by OpenNMS.
the class CassandraSearcher method searchForIds.
/**
* Returns the set of resource ids that match the given
* term query.
*/
private Set<String> searchForIds(Context context, TermQuery query, ConsistencyLevel readConsistency) {
Set<String> ids = Sets.newTreeSet();
BoundStatement bindStatement = m_searchStatement.bind();
bindStatement.setString(Schema.C_TERMS_CONTEXT, context.getId());
bindStatement.setString(Schema.C_TERMS_FIELD, query.getTerm().getField(Constants.DEFAULT_TERM_FIELD));
bindStatement.setString(Schema.C_TERMS_VALUE, query.getTerm().getValue());
bindStatement.setConsistencyLevel(readConsistency);
for (Row row : m_session.execute(bindStatement)) {
ids.add(row.getString(Constants.Schema.C_TERMS_RESOURCE));
}
return ids;
}
use of com.datastax.driver.core.Row in project javaee7-samples by javaee-samples.
the class PersonSessionBean method getPersons.
public List<Person> getPersons() {
List<Person> persons = new ArrayList<>();
ResultSet results = session.execute(selectAllPersons.bind());
for (Row row : results) {
persons.add(new Person(row.getString("name"), row.getInt("age")));
}
return persons;
}
use of com.datastax.driver.core.Row in project zipkin by openzipkin.
the class CassandraSpanStore method getSpanNames.
@Override
public ListenableFuture<List<String>> getSpanNames(String serviceName) {
if (serviceName == null || serviceName.isEmpty())
return EMPTY_LIST;
serviceName = checkNotNull(serviceName, "serviceName").toLowerCase();
int bucket = 0;
try {
BoundStatement bound = CassandraUtil.bindWithName(selectSpanNames, "select-span-names").setString("service_name", serviceName).setInt("bucket", bucket).setInt("limit_", 1000);
return transform(session.executeAsync(bound), new Function<ResultSet, List<String>>() {
@Override
public List<String> apply(ResultSet input) {
Set<String> spanNames = new HashSet<>();
for (Row row : input) {
spanNames.add(row.getString("span_name"));
}
return Ordering.natural().sortedCopy(spanNames);
}
});
} catch (RuntimeException ex) {
return immediateFailedFuture(ex);
}
}
use of com.datastax.driver.core.Row in project zipkin by openzipkin.
the class CassandraSpanStore method getSpansByTraceIds.
/**
* Get the available trace information from the storage system. Spans in trace should be sorted by
* the first annotation timestamp in that span. First event should be first in the spans list. <p>
* The return list will contain only spans that have been found, thus the return list may not
* match the provided list of ids.
*/
ListenableFuture<List<Span>> getSpansByTraceIds(Set<Long> traceIds, int limit) {
checkNotNull(traceIds, "traceIds");
if (traceIds.isEmpty()) {
return immediateFuture(Collections.<Span>emptyList());
}
try {
BoundStatement bound = CassandraUtil.bindWithName(selectTraces, "select-traces").setSet("trace_id", traceIds).setInt("limit_", limit);
bound.setFetchSize(Integer.MAX_VALUE);
return transform(session.executeAsync(bound), new Function<ResultSet, List<Span>>() {
@Override
public List<Span> apply(ResultSet input) {
List<Span> result = new ArrayList<>(input.getAvailableWithoutFetching());
for (Row row : input) {
result.add(Codec.THRIFT.readSpan(row.getBytes("span")));
}
return result;
}
});
} catch (RuntimeException ex) {
return immediateFailedFuture(ex);
}
}
use of com.datastax.driver.core.Row in project zipkin by openzipkin.
the class CassandraSpanStore method getTraceIdsByAnnotation.
ListenableFuture<Map<Long, Long>> getTraceIdsByAnnotation(String annotationKey, long endTs, long lookback, int limit) {
long startTs = endTs - lookback;
try {
BoundStatement bound = CassandraUtil.bindWithName(selectTraceIdsByAnnotation, "select-trace-ids-by-annotation").setBytes("annotation", CassandraUtil.toByteBuffer(annotationKey)).setSet("bucket", buckets).setBytesUnsafe("start_ts", timestampCodec.serialize(startTs)).setBytesUnsafe("end_ts", timestampCodec.serialize(endTs)).setInt("limit_", limit);
bound.setFetchSize(Integer.MAX_VALUE);
return transform(session.executeAsync(bound), new Function<ResultSet, Map<Long, Long>>() {
@Override
public Map<Long, Long> apply(ResultSet input) {
Map<Long, Long> traceIdsToTimestamps = new LinkedHashMap<>();
for (Row row : input) {
traceIdsToTimestamps.put(row.getLong("trace_id"), timestampCodec.deserialize(row, "ts"));
}
return traceIdsToTimestamps;
}
});
} catch (CharacterCodingException | RuntimeException ex) {
return immediateFailedFuture(ex);
}
}
Aggregations