use of com.datastax.oss.dsbulk.workflow.commons.schema.ReadResultMapper in project dsbulk by datastax.
the class SchemaSettingsTest method should_create_row_mapper_with_inferred_mapping_and_override.
@Test
void should_create_row_mapper_with_inferred_mapping_and_override() {
// Infer mapping, but override to set c4 source field to C3 column.
Config config = TestConfigUtils.createTestConfig("dsbulk.schema", "keyspace", "ks", "table", "t1", "mapping", "\" *=*, c4 = c3 \"");
SchemaSettings settings = new SchemaSettings(config, READ_AND_MAP);
settings.init(session, codecFactory, false, true);
ReadResultMapper mapper = settings.createReadResultMapper(session, recordMetadata, codecFactory, true);
assertThat(mapper).isNotNull();
ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
verify(session).prepare(argument.capture());
assertThat(argument.getValue()).isEqualTo("SELECT c3, c1, \"COL 2\" FROM ks.t1 WHERE token(c1) > :start AND token(c1) <= :end");
assertMapping(mapper, C1, C1, C2, C2, C4, C3);
}
use of com.datastax.oss.dsbulk.workflow.commons.schema.ReadResultMapper in project dsbulk by datastax.
the class SchemaSettingsTest method should_create_row_mapper_with_inferred_mapping_and_skip.
@Test
void should_create_row_mapper_with_inferred_mapping_and_skip() {
// Infer mapping, but skip C2.
Config config = TestConfigUtils.createTestConfig("dsbulk.schema", "keyspace", "ks", "table", "t1", "mapping", "\" *=-\\\"COL 2\\\" \"");
SchemaSettings settings = new SchemaSettings(config, READ_AND_MAP);
settings.init(session, codecFactory, false, true);
ReadResultMapper mapper = settings.createReadResultMapper(session, recordMetadata, codecFactory, true);
assertThat(mapper).isNotNull();
ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
verify(session).prepare(argument.capture());
assertThat(argument.getValue()).isEqualTo("SELECT c1, c3 FROM ks.t1 WHERE token(c1) > :start AND token(c1) <= :end");
assertMapping(mapper, C1, C1, C3, C3);
}
use of com.datastax.oss.dsbulk.workflow.commons.schema.ReadResultMapper in project dsbulk by datastax.
the class SchemaSettingsTest method should_create_row_mapper_when_mapping_and_statement_provided.
@Test
void should_create_row_mapper_when_mapping_and_statement_provided() {
Config config = TestConfigUtils.createTestConfig("dsbulk.schema", "mapping", "\" 0 = \\\"COL 2\\\" , 2 = c1 \", ", "query", "\"select \\\"COL 2\\\", c1 from ks.t1\"");
SchemaSettings settings = new SchemaSettings(config, READ_AND_MAP);
settings.init(session, codecFactory, true, false);
ReadResultMapper mapper = settings.createReadResultMapper(session, recordMetadata, codecFactory, true);
assertThat(mapper).isNotNull();
ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
verify(session).prepare(argument.capture());
assertThat(argument.getValue()).isEqualTo("select \"COL 2\", c1 from ks.t1 WHERE token(c1) > :start AND token(c1) <= :end");
assertMapping(mapper, "0", C2, "2", C1);
}
use of com.datastax.oss.dsbulk.workflow.commons.schema.ReadResultMapper in project dsbulk by datastax.
the class SchemaSettingsTest method should_infer_select_query_with_solr_query_column_if_index_is_not_search_index.
@Test
void should_infer_select_query_with_solr_query_column_if_index_is_not_search_index() {
ColumnMetadata solrQueryCol = mock(ColumnMetadata.class);
CqlIdentifier solrQueryColName = CqlIdentifier.fromInternal("solr_query");
when(solrQueryCol.getName()).thenReturn(solrQueryColName);
when(solrQueryCol.getType()).thenReturn(DataTypes.TEXT);
when(table.getColumns()).thenReturn(ImmutableMap.of(C1, col1, C2, col2, C3, col3, solrQueryColName, solrQueryCol));
IndexMetadata idx = mock(IndexMetadata.class);
CqlIdentifier idxName = CqlIdentifier.fromInternal("idx");
when(table.getIndexes()).thenReturn(ImmutableMap.of(idxName, idx));
when(idx.getClassName()).thenReturn(Optional.of("not a search index"));
Config config = TestConfigUtils.createTestConfig("dsbulk.schema", "keyspace", "ks", "table", "t1");
SchemaSettings settings = new SchemaSettings(config, READ_AND_MAP);
settings.init(session, codecFactory, false, true);
ReadResultMapper mapper = settings.createReadResultMapper(session, recordMetadata, codecFactory, true);
ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
verify(session).prepare(argument.capture());
assertThat(argument.getValue()).isEqualTo("SELECT c1, \"COL 2\", c3, solr_query FROM ks.t1 WHERE token(c1) > :start AND token(c1) <= :end");
assertMapping(mapper, C1, C1, C2, C2, C3, C3, "solr_query", "solr_query");
}
use of com.datastax.oss.dsbulk.workflow.commons.schema.ReadResultMapper in project dsbulk by datastax.
the class SchemaSettingsTest method should_create_row_mapper_when_mapping_is_a_list_and_indexed.
@Test
void should_create_row_mapper_when_mapping_is_a_list_and_indexed() {
Config config = TestConfigUtils.createTestConfig("dsbulk.schema", "mapping", "\"\\\"COL 2\\\", c1\", ", "keyspace", "ks", "table", "t1");
SchemaSettings settings = new SchemaSettings(config, READ_AND_MAP);
settings.init(session, codecFactory, true, false);
ReadResultMapper mapper = settings.createReadResultMapper(session, recordMetadata, codecFactory, true);
assertThat(mapper).isNotNull();
ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
verify(session).prepare(argument.capture());
assertThat(argument.getValue()).isEqualTo("SELECT \"COL 2\", c1 FROM ks.t1 WHERE token(c1) > :start AND token(c1) <= :end");
assertMapping(mapper, "0", C2, "1", C1);
}
Aggregations