use of com.datastax.oss.dsbulk.workflow.commons.statement.UnmappableStatement in project dsbulk by datastax.
the class DefaultRecordMapperTest method should_return_unmappable_statement_when_extra_field.
@Test
void should_return_unmappable_statement_when_extra_field() {
when(mapping.fields()).thenReturn(set(F1, F2));
RecordMapper mapper = new DefaultRecordMapper(Collections.singletonList(insertStatement), set(C1), set(C2, C3), V4, mapping, recordMetadata, false, false, false, statement -> boundStatementBuilder);
Statement<?> result = mapper.map(record).single().block();
assertThat(result).isNotSameAs(boundStatement).isInstanceOf(UnmappableStatement.class);
UnmappableStatement unmappableStatement = (UnmappableStatement) result;
assertThat(unmappableStatement.getError()).isInstanceOf(InvalidMappingException.class).hasMessageContaining("Extraneous field field3 was found in record. " + "Please declare it explicitly in the mapping " + "or set schema.allowExtraFields to true.");
}
use of com.datastax.oss.dsbulk.workflow.commons.statement.UnmappableStatement in project dsbulk by datastax.
the class DefaultRecordMapperTest method should_return_unmappable_statement_when_missing_field.
@Test
void should_return_unmappable_statement_when_missing_field() {
when(record.fields()).thenReturn(set(F1, F2));
RecordMapper mapper = new DefaultRecordMapper(Collections.singletonList(insertStatement), set(C1), set(C2, C3), V4, mapping, recordMetadata, false, true, false, statement -> boundStatementBuilder);
Statement<?> result = mapper.map(record).single().block();
assertThat(result).isNotSameAs(boundStatement).isInstanceOf(UnmappableStatement.class);
UnmappableStatement unmappableStatement = (UnmappableStatement) result;
assertThat(unmappableStatement.getError()).isInstanceOf(InvalidMappingException.class).hasMessageContaining("Required field field3 (mapped to column \"My Fancy Column Name\") was missing from record. " + "Please remove it from the mapping " + "or set schema.allowMissingFields to true.");
}
use of com.datastax.oss.dsbulk.workflow.commons.statement.UnmappableStatement in project dsbulk by datastax.
the class DefaultRecordMapperTest method should_return_unmappable_statement_when_pk_column_null.
@Test
void should_return_unmappable_statement_when_pk_column_null() {
when(record.getFieldValue(F1)).thenReturn(null);
RecordMapper mapper = new DefaultRecordMapper(Collections.singletonList(insertStatement), set(C1), set(C2, C3), V4, mapping, recordMetadata, false, true, false, statement -> boundStatementBuilder);
Statement<?> result = mapper.map(record).single().block();
assertThat(result).isNotSameAs(boundStatement).isInstanceOf(UnmappableStatement.class);
UnmappableStatement unmappableStatement = (UnmappableStatement) result;
assertThat(unmappableStatement.getError()).isInstanceOf(InvalidMappingException.class).hasMessageContaining("Primary key column col1 cannot be set to null");
}
use of com.datastax.oss.dsbulk.workflow.commons.statement.UnmappableStatement in project dsbulk by datastax.
the class LogManagerTest method setUp.
@BeforeEach
void setUp() throws Exception {
session = mockSession();
resource1 = new URI("file:///file1.csv");
resource2 = new URI("file:///file2.csv");
resource3 = new URI("file:///file3.csv");
csvRecord1 = new DefaultErrorRecord(source1, resource1, 1, new RuntimeException("error 1"));
csvRecord2 = new DefaultErrorRecord(source2, resource2, 2, new RuntimeException("error 2"));
csvRecord3 = new DefaultErrorRecord(source3, resource3, 3, new RuntimeException("error 3"));
unmappableStmt1 = new UnmappableStatement(csvRecord1, new RuntimeException("error 1"));
unmappableStmt2 = new UnmappableStatement(csvRecord2, new RuntimeException("error 2"));
unmappableStmt3 = new UnmappableStatement(csvRecord3, new RuntimeException("error 3"));
failedWriteResult1 = new DefaultWriteResult(new BulkExecutionException(new DriverTimeoutException("error 1"), new MappedBoundStatement(csvRecord1, mockBoundStatement("INSERT 1"))));
failedWriteResult2 = new DefaultWriteResult(new BulkExecutionException(new DriverTimeoutException("error 2"), new MappedBoundStatement(csvRecord2, mockBoundStatement("INSERT 2"))));
failedWriteResult3 = new DefaultWriteResult(new BulkExecutionException(new DriverTimeoutException("error 3"), new MappedBoundStatement(csvRecord3, mockBoundStatement("INSERT 3"))));
failedReadResult1 = new DefaultReadResult(new BulkExecutionException(new DriverTimeoutException("error 1"), mockBoundStatement("SELECT 1")));
failedReadResult2 = new DefaultReadResult(new BulkExecutionException(new DriverTimeoutException("error 2"), mockBoundStatement("SELECT 2")));
failedReadResult3 = new DefaultReadResult(new BulkExecutionException(new DriverTimeoutException("error 3"), mockBoundStatement("SELECT 3")));
BatchStatement batch = BatchStatement.newInstance(DefaultBatchType.UNLOGGED, new MappedBoundStatement(csvRecord1, mockBoundStatement("INSERT 1", "foo", 42)), new MappedBoundStatement(csvRecord2, mockBoundStatement("INSERT 2", "bar", 43)), new MappedBoundStatement(csvRecord3, mockBoundStatement("INSERT 3", "qix", 44)));
batchWriteResult = new DefaultWriteResult(new BulkExecutionException(new DriverTimeoutException("error batch"), batch));
ExecutionInfo info = mock(ExecutionInfo.class);
row1 = mockRow(1);
Row row2 = mockRow(2);
Row row3 = mockRow(3);
Statement<?> stmt1 = SimpleStatement.newInstance("SELECT 1");
Statement<?> stmt2 = SimpleStatement.newInstance("SELECT 2");
Statement<?> stmt3 = SimpleStatement.newInstance("SELECT 3");
successfulReadResult1 = new DefaultReadResult(stmt1, info, row1);
ReadResult successfulReadResult2 = new DefaultReadResult(stmt2, info, row2);
ReadResult successfulReadResult3 = new DefaultReadResult(stmt3, info, row3);
rowRecord1 = new DefaultErrorRecord(successfulReadResult1, tableResource, 1, new RuntimeException("error 1"));
rowRecord2 = new DefaultErrorRecord(successfulReadResult2, tableResource, 2, new RuntimeException("error 2"));
rowRecord3 = new DefaultErrorRecord(successfulReadResult3, tableResource, 3, new RuntimeException("error 3"));
}
use of com.datastax.oss.dsbulk.workflow.commons.statement.UnmappableStatement in project dsbulk by datastax.
the class LogManagerTest method should_handle_unmappable_statements_without_source.
@Test
void should_handle_unmappable_statements_without_source() throws Exception {
Path outputDir = Files.createTempDirectory("test");
LogManager logManager = new LogManager(session, outputDir, ErrorThreshold.forAbsoluteValue(1), ErrorThreshold.forAbsoluteValue(0), true, statementFormatter, EXTENDED, rowFormatter);
logManager.init();
Record record = DefaultRecord.indexed(null, resource1, 1, "foo", " bar");
UnmappableStatement stmt = new UnmappableStatement(record, new RuntimeException("error 1"));
Flux<BatchableStatement<?>> stmts = Flux.just(stmt);
stmts.transform(logManager.newUnmappableStatementsHandler()).blockLast();
logManager.close();
Path errors = logManager.getOperationDirectory().resolve("mapping-errors.log");
Path positions = logManager.getOperationDirectory().resolve("positions.txt");
assertThat(errors.toFile()).exists();
assertThat(positions.toFile()).exists();
assertThat(FileUtils.listAllFilesInDirectory(logManager.getOperationDirectory())).containsOnly(errors, positions);
List<String> lines = Files.readAllLines(errors, UTF_8);
String content = String.join("\n", lines);
assertThat(content).doesNotContain("Source: ").contains("Resource: " + resource1).contains("Position: 1").contains("java.lang.RuntimeException: error 1");
}
Aggregations