use of com.datastax.oss.dsbulk.workflow.commons.schema.ReadResultCounter in project dsbulk by datastax.
the class SchemaSettingsTest method should_use_custom_query_when_mode_is_global.
@Test
void should_use_custom_query_when_mode_is_global() {
when(table.getClusteringColumns()).thenReturn(ImmutableMap.of(col2, ClusteringOrder.ASC));
Config config = TestConfigUtils.createTestConfig("dsbulk.schema", "query", "\"SELECT c1, c3 FROM ks.t1 WHERE c1 = 0\"");
SchemaSettings settings = new SchemaSettings(config, READ_AND_COUNT);
settings.init(session, codecFactory, false, true);
ReadResultCounter counter = settings.createReadResultCounter(session, codecFactory, EnumSet.of(global), 10);
assertThat(counter).isNotNull();
ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
verify(session).prepare(argument.capture());
assertThat(argument.getValue()).isEqualTo("SELECT c1 FROM ks.t1 WHERE c1 = 0");
}
use of com.datastax.oss.dsbulk.workflow.commons.schema.ReadResultCounter in project dsbulk by datastax.
the class SchemaSettingsTest method should_create_row_counter_for_hosts_stats.
@Test
void should_create_row_counter_for_hosts_stats() {
Config config = TestConfigUtils.createTestConfig("dsbulk.schema", "keyspace", "ks", "table", "t1");
SchemaSettings settings = new SchemaSettings(config, READ_AND_COUNT);
settings.init(session, codecFactory, false, true);
ReadResultCounter counter = settings.createReadResultCounter(session, codecFactory, EnumSet.of(hosts), 10);
assertThat(counter).isNotNull();
ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
verify(session).prepare(argument.capture());
assertThat(argument.getValue()).isEqualTo("SELECT token(c1) FROM ks.t1 WHERE token(c1) > :start AND token(c1) <= :end");
}
use of com.datastax.oss.dsbulk.workflow.commons.schema.ReadResultCounter in project dsbulk by datastax.
the class SchemaSettingsTest method should_create_row_counter_for_ranges_stats.
@Test
void should_create_row_counter_for_ranges_stats() {
Config config = TestConfigUtils.createTestConfig("dsbulk.schema", "keyspace", "ks", "table", "t1");
SchemaSettings settings = new SchemaSettings(config, READ_AND_COUNT);
settings.init(session, codecFactory, false, true);
ReadResultCounter counter = settings.createReadResultCounter(session, codecFactory, EnumSet.of(ranges), 10);
assertThat(counter).isNotNull();
ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
verify(session).prepare(argument.capture());
assertThat(argument.getValue()).isEqualTo("SELECT token(c1) FROM ks.t1 WHERE token(c1) > :start AND token(c1) <= :end");
}
use of com.datastax.oss.dsbulk.workflow.commons.schema.ReadResultCounter in project dsbulk by datastax.
the class SchemaSettingsTest method should_create_row_counter_for_partitions_and_ranges_stats.
@Test
void should_create_row_counter_for_partitions_and_ranges_stats() {
when(table.getClusteringColumns()).thenReturn(ImmutableMap.of(col2, ClusteringOrder.ASC));
Config config = TestConfigUtils.createTestConfig("dsbulk.schema", "keyspace", "ks", "table", "t1");
SchemaSettings settings = new SchemaSettings(config, READ_AND_COUNT);
settings.init(session, codecFactory, false, true);
ReadResultCounter counter = settings.createReadResultCounter(session, codecFactory, EnumSet.of(partitions, ranges), 10);
assertThat(counter).isNotNull();
ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
verify(session).prepare(argument.capture());
assertThat(argument.getValue()).isEqualTo("SELECT c1 FROM ks.t1 WHERE token(c1) > :start AND token(c1) <= :end");
}
use of com.datastax.oss.dsbulk.workflow.commons.schema.ReadResultCounter in project dsbulk by datastax.
the class SchemaSettingsTest method should_create_row_counter_for_partition_stats.
@Test
void should_create_row_counter_for_partition_stats() {
when(table.getClusteringColumns()).thenReturn(ImmutableMap.of(col2, ClusteringOrder.ASC));
Config config = TestConfigUtils.createTestConfig("dsbulk.schema", "keyspace", "ks", "table", "t1");
SchemaSettings settings = new SchemaSettings(config, READ_AND_COUNT);
settings.init(session, codecFactory, false, true);
ReadResultCounter counter = settings.createReadResultCounter(session, codecFactory, EnumSet.of(partitions), 10);
assertThat(counter).isNotNull();
ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
verify(session).prepare(argument.capture());
assertThat(argument.getValue()).isEqualTo("SELECT c1 FROM ks.t1 WHERE token(c1) > :start AND token(c1) <= :end");
}
Aggregations