use of com.datastax.driver.core.ConsistencyLevel in project opennms by OpenNMS.
the class ContextConfigurationFactory method getContextConfigurations.
public static ContextConfigurations getContextConfigurations() {
String resourceShardStr = System.getProperty("org.opennms.newts.config.resource_shard", "604800");
String readConsistencyStr = System.getProperty("org.opennms.newts.config.read_consistency", "ONE");
String writeConsistencyStr = System.getProperty("org.opennms.newts.config.write_consistency", "ANY");
Duration resourceShard = Duration.seconds(Long.parseLong(resourceShardStr));
ConsistencyLevel readConsistency = ConsistencyLevel.valueOf(readConsistencyStr);
ConsistencyLevel writeConsistency = ConsistencyLevel.valueOf(writeConsistencyStr);
ContextConfigurations contexts = new ContextConfigurations();
contexts.addContextConfig(Context.DEFAULT_CONTEXT, resourceShard, readConsistency, writeConsistency);
return contexts;
}
use of com.datastax.driver.core.ConsistencyLevel in project newts by OpenNMS.
the class CassandraIndexer method delete.
@Override
public void delete(final Context context, final Resource resource) {
final Timer.Context ctx = m_deleteTimer.time();
final ConsistencyLevel writeConsistency = m_contextConfigurations.getWriteConsistency(context);
final List<RegularStatement> statements = Lists.newArrayList();
definitelyUnindexResource(statements, context, resource, writeConsistency);
definitelyUnindexResourceAttributes(statements, context, resource, writeConsistency);
definitelyRemoveMetricName(statements, context, resource, writeConsistency);
try {
if (!statements.isEmpty()) {
m_session.execute(batch(statements.toArray(new RegularStatement[statements.size()])));
}
m_cache.delete(context, resource);
} finally {
ctx.stop();
}
}
use of com.datastax.driver.core.ConsistencyLevel in project newts by OpenNMS.
the class CassandraSearcher method search.
@Override
public SearchResults search(Context context, Query query, boolean populateMetricsAndAttributes) {
checkNotNull(context, "context argument");
checkNotNull(query, "query argument");
Timer.Context ctx = m_searchTimer.time();
ConsistencyLevel readConsistency = m_contextConfigurations.getReadConsistency(context);
SearchResults searchResults = new SearchResults();
try {
Set<String> ids;
Query q = query.rewrite();
if (q instanceof BooleanQuery) {
ids = searchForIds(context, (BooleanQuery) q, readConsistency);
} else if (q instanceof TermQuery) {
ids = searchForIds(context, (TermQuery) q, readConsistency);
} else {
throw new IllegalStateException("Unsupported query: " + q);
}
for (final String id : ids) {
if (!populateMetricsAndAttributes) {
Resource resource = new Resource(id);
List<String> emptyList = Collections.emptyList();
searchResults.addResult(resource, emptyList);
} else {
// Fetch the metric names and attributes concurrently
ResultSetFuture attrsFuture = fetchResourceAttributes(context, id, readConsistency);
ResultSetFuture metricsFuture = fetchMetricNames(context, id, readConsistency);
try {
Map<String, String> attrs = getResourceAttributesFromResults(attrsFuture);
Collection<String> metrics = getMetricNamesFromResults(metricsFuture);
Resource resource = attrs.size() > 0 ? new Resource(id, Optional.of(attrs)) : new Resource(id);
searchResults.addResult(resource, metrics);
} catch (ExecutionException | InterruptedException e) {
throw Throwables.propagate(e);
}
}
}
return searchResults;
} finally {
ctx.stop();
}
}
use of com.datastax.driver.core.ConsistencyLevel in project ignite by apache.
the class DatasourceSerializationTest method serializationTest.
/**
* Serialization test.
*/
@Test
public void serializationTest() {
DataSource src = new DataSource();
Credentials cred = new CassandraAdminCredentials();
String[] points = new String[] { "127.0.0.1", "10.0.0.2", "10.0.0.3" };
LoadBalancingPolicy plc = new MyLoadBalancingPolicy();
src.setCredentials(cred);
src.setContactPoints(points);
src.setReadConsistency("ONE");
src.setWriteConsistency("QUORUM");
src.setLoadBalancingPolicy(plc);
JavaSerializer serializer = new JavaSerializer();
ByteBuffer buff = serializer.serialize(src);
DataSource _src = (DataSource) serializer.deserialize(buff);
Credentials _cred = (Credentials) getFieldValue(_src, "creds");
List<InetAddress> _points = (List<InetAddress>) getFieldValue(_src, "contactPoints");
ConsistencyLevel _readCons = (ConsistencyLevel) getFieldValue(_src, "readConsistency");
ConsistencyLevel _writeCons = (ConsistencyLevel) getFieldValue(_src, "writeConsistency");
LoadBalancingPolicy _plc = (LoadBalancingPolicy) getFieldValue(_src, "loadBalancingPlc");
assertTrue("Incorrectly serialized/deserialized credentials for Cassandra DataSource", cred.getPassword().equals(_cred.getPassword()) && cred.getUser().equals(_cred.getUser()));
assertTrue("Incorrectly serialized/deserialized contact points for Cassandra DataSource", "/127.0.0.1".equals(_points.get(0).toString()) && "/10.0.0.2".equals(_points.get(1).toString()) && "/10.0.0.3".equals(_points.get(2).toString()));
assertTrue("Incorrectly serialized/deserialized consistency levels for Cassandra DataSource", ConsistencyLevel.ONE == _readCons && ConsistencyLevel.QUORUM == _writeCons);
assertTrue("Incorrectly serialized/deserialized load balancing policy for Cassandra DataSource", _plc instanceof MyLoadBalancingPolicy);
}
Aggregations