use of com.datastax.oss.dsbulk.workflow.commons.schema.RecordMapper in project dsbulk by datastax.
the class SchemaSettingsTest method should_create_record_mapper_when_mapping_is_a_list_and_mapped.
@Test
void should_create_record_mapper_when_mapping_is_a_list_and_mapped() {
Config config = TestConfigUtils.createTestConfig("dsbulk.schema", "mapping", "\"\\\"COL 2\\\", c1\", ", "keyspace", "ks", "table", "t1");
SchemaSettings settings = new SchemaSettings(config, MAP_AND_WRITE);
settings.init(session, codecFactory, false, true);
RecordMapper mapper = settings.createRecordMapper(session, recordMetadata, false);
assertThat(mapper).isNotNull();
ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
verify(session).prepare(argument.capture());
assertThat(argument.getValue()).isEqualTo("INSERT INTO ks.t1 (\"COL 2\", c1) VALUES (:\"COL 2\", :c1)");
assertMapping(mapper, C1, C1, C2, C2);
}
use of com.datastax.oss.dsbulk.workflow.commons.schema.RecordMapper in project dsbulk by datastax.
the class SchemaSettingsTest method should_not_error_when_insert_query_does_not_contain_clustering_column_but_mutation_is_static_only.
@Test
void should_not_error_when_insert_query_does_not_contain_clustering_column_but_mutation_is_static_only() {
Config config = TestConfigUtils.createTestConfig("dsbulk.schema", "query", "\"INSERT INTO ks.t1 (c1, c3) VALUES (:c1, :c3)\"");
when(table.getPrimaryKey()).thenReturn(newArrayList(col1, col2));
when(table.getPartitionKey()).thenReturn(singletonList(col1));
when(table.getClusteringColumns()).thenReturn(ImmutableMap.of(col2, ClusteringOrder.ASC));
when(col3.isStatic()).thenReturn(true);
SchemaSettings settings = new SchemaSettings(config, MAP_AND_WRITE);
settings.init(session, codecFactory, false, true);
RecordMapper mapper = settings.createRecordMapper(session, recordMetadata, false);
@SuppressWarnings("unchecked") Set<CQLWord> primaryKeyVariables = (Set<CQLWord>) getInternalState(mapper, "primaryKeyVariables");
assertThat(primaryKeyVariables).doesNotContain(CQLWord.fromCqlIdentifier(C2));
}
use of com.datastax.oss.dsbulk.workflow.commons.schema.RecordMapper in project dsbulk by datastax.
the class SchemaSettingsTest method should_create_record_mapper_when_keyspace_and_table_provided.
@Test
void should_create_record_mapper_when_keyspace_and_table_provided() {
Config config = TestConfigUtils.createTestConfig("dsbulk.schema", "keyspace", "ks", "table", "t1");
SchemaSettings settings = new SchemaSettings(config, MAP_AND_WRITE);
settings.init(session, codecFactory, true, true);
RecordMapper mapper = settings.createRecordMapper(session, recordMetadata, false);
assertThat(mapper).isNotNull();
ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
verify(session).prepare(argument.capture());
assertThat(argument.getValue()).isEqualTo("INSERT INTO ks.t1 (c1, \"COL 2\", c3) VALUES (:c1, :\"COL 2\", :c3)");
assertMapping(mapper, C1, C1, C2, C2, C3, C3);
}
use of com.datastax.oss.dsbulk.workflow.commons.schema.RecordMapper in project dsbulk by datastax.
the class SchemaSettingsTest method should_create_record_mapper_with_inferred_mapping_and_override.
@Test
void should_create_record_mapper_with_inferred_mapping_and_override() {
// Infer mapping, but override to set c4 source field to C3 column.
Config config = TestConfigUtils.createTestConfig("dsbulk.schema", "keyspace", "ks", "table", "t1", "mapping", "\" *=*, c4 = c3 \"");
SchemaSettings settings = new SchemaSettings(config, MAP_AND_WRITE);
settings.init(session, codecFactory, false, true);
RecordMapper mapper = settings.createRecordMapper(session, recordMetadata, false);
assertThat(mapper).isNotNull();
ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
verify(session).prepare(argument.capture());
assertThat(argument.getValue()).isEqualTo("INSERT INTO ks.t1 (c3, c1, \"COL 2\") VALUES (:c3, :c1, :\"COL 2\")");
assertMapping(mapper, C1, C1, C2, C2, C4, C3);
}
use of com.datastax.oss.dsbulk.workflow.commons.schema.RecordMapper in project dsbulk by datastax.
the class SchemaSettingsTest method should_create_record_mapper_with_inferred_mapping_and_skip_multiple.
@Test
void should_create_record_mapper_with_inferred_mapping_and_skip_multiple() {
// Infer mapping, but skip C2 and C3.
Config config = TestConfigUtils.createTestConfig("dsbulk.schema", "keyspace", "ks", "table", "t1", "mapping", "\" *=[-\\\"COL 2\\\", -c3] \"");
SchemaSettings settings = new SchemaSettings(config, MAP_AND_WRITE);
settings.init(session, codecFactory, false, true);
RecordMapper mapper = settings.createRecordMapper(session, recordMetadata, false);
assertThat(mapper).isNotNull();
ArgumentCaptor<String> argument = ArgumentCaptor.forClass(String.class);
verify(session).prepare(argument.capture());
assertThat(argument.getValue()).isEqualTo("INSERT INTO ks.t1 (c1) VALUES (:c1)");
assertMapping(mapper, C1, C1);
}
Aggregations