use of com.datastax.driver.core.Cluster.Builder in project flink by apache.
the class CassandraPojoSinkExample method main.
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<Message> source = env.fromCollection(messages);
CassandraSink.addSink(source).setClusterBuilder(new ClusterBuilder() {
@Override
protected Cluster buildCluster(Builder builder) {
return builder.addContactPoint("127.0.0.1").build();
}
}).build();
env.execute("Cassandra Sink example");
}
use of com.datastax.driver.core.Cluster.Builder in project flink by apache.
the class CassandraTupleSinkExample method main.
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
DataStreamSource<Tuple2<String, Integer>> source = env.fromCollection(collection);
CassandraSink.addSink(source).setQuery(INSERT).setClusterBuilder(new ClusterBuilder() {
@Override
protected Cluster buildCluster(Builder builder) {
return builder.addContactPoint("127.0.0.1").build();
}
}).build();
env.execute("WriteTupleIntoCassandra");
}
use of com.datastax.driver.core.Cluster.Builder in project flink by apache.
the class BatchExample method main.
/*
* table script: "CREATE TABLE test.batches (number int, strings text, PRIMARY KEY(number, strings));"
*/
public static void main(String[] args) throws Exception {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
ArrayList<Tuple2<Integer, String>> collection = new ArrayList<>(20);
for (int i = 0; i < 20; i++) {
collection.add(new Tuple2<>(i, "string " + i));
}
DataSet<Tuple2<Integer, String>> dataSet = env.fromCollection(collection);
dataSet.output(new CassandraOutputFormat<Tuple2<Integer, String>>(INSERT_QUERY, new ClusterBuilder() {
@Override
protected Cluster buildCluster(Builder builder) {
return builder.addContactPoints("127.0.0.1").build();
}
}));
env.execute("Write");
DataSet<Tuple2<Integer, String>> inputDS = env.createInput(new CassandraInputFormat<Tuple2<Integer, String>>(SELECT_QUERY, new ClusterBuilder() {
@Override
protected Cluster buildCluster(Builder builder) {
return builder.addContactPoints("127.0.0.1").build();
}
}), TupleTypeInfo.of(new TypeHint<Tuple2<Integer, String>>() {
}));
inputDS.print();
}
use of com.datastax.driver.core.Cluster.Builder in project cassandra-driver-mapping by valchkou.
the class SchemaSyncTest method init.
@BeforeClass
public static void init() {
String node = "127.0.0.1";
Builder builder = Cluster.builder();
builder.addContactPoint(node);
builder.withLoadBalancingPolicy(LatencyAwarePolicy.builder(new RoundRobinPolicy()).build());
builder.withReconnectionPolicy(new ConstantReconnectionPolicy(1000L));
cluster = builder.build();
session = cluster.connect();
Cache<String, PreparedStatement> cache = CacheBuilder.newBuilder().expireAfterAccess(1, TimeUnit.MILLISECONDS).maximumSize(1).concurrencyLevel(1).build();
MappingSession.setStatementCache(cache);
}
use of com.datastax.driver.core.Cluster.Builder in project cassandra-driver-mapping by valchkou.
the class MappingSessionAsyncTest method init.
@BeforeClass
public static void init() {
String node = "127.0.0.1";
Builder builder = Cluster.builder();
builder.addContactPoint(node);
builder.withLoadBalancingPolicy(LatencyAwarePolicy.builder(new RoundRobinPolicy()).build());
builder.withReconnectionPolicy(new ConstantReconnectionPolicy(1000L));
cluster = builder.build();
session = cluster.connect();
Cache<String, PreparedStatement> cache = CacheBuilder.newBuilder().expireAfterAccess(1, TimeUnit.MILLISECONDS).maximumSize(1).concurrencyLevel(1).build();
MappingSession.setStatementCache(cache);
}
Aggregations