use of com.datastax.oss.dsbulk.workflow.commons.statement.MappedBoundStatement in project dsbulk by datastax.
the class LogManagerTest method should_stop_when_unrecoverable_error_writing.
@Test
void should_stop_when_unrecoverable_error_writing() throws Exception {
Path outputDir = Files.createTempDirectory("test4");
LogManager logManager = new LogManager(session, outputDir, ErrorThreshold.forAbsoluteValue(1000), ErrorThreshold.forAbsoluteValue(0), true, statementFormatter, EXTENDED, rowFormatter);
logManager.init();
DefaultWriteResult result = new DefaultWriteResult(new BulkExecutionException(new DriverExecutionException(new IllegalArgumentException("error 1")), new MappedBoundStatement(csvRecord1, mockBoundStatement("INSERT 1"))));
Flux<WriteResult> stmts = Flux.just(result);
try {
stmts.transform(logManager.newFailedWritesHandler()).blockLast();
fail("Expecting DriverExecutionException to be thrown");
} catch (DriverExecutionException e) {
assertThat(e.getCause()).isInstanceOf(IllegalArgumentException.class).hasMessage("error 1");
}
logManager.close();
Path bad = logManager.getOperationDirectory().resolve("load.bad");
Path errors = logManager.getOperationDirectory().resolve("load-errors.log");
Path positions = logManager.getOperationDirectory().resolve("positions.txt");
assertThat(bad.toFile()).exists();
assertThat(errors.toFile()).exists();
assertThat(positions.toFile()).exists();
List<String> badLines = Files.readAllLines(bad, UTF_8);
assertThat(badLines).hasSize(1);
assertThat(badLines.get(0)).isEqualTo(source1.trim());
assertThat(FileUtils.listAllFilesInDirectory(logManager.getOperationDirectory())).containsOnly(bad, errors, positions);
List<String> lines = Files.readAllLines(errors, UTF_8);
String content = String.join("\n", lines);
assertThat(content).containsOnlyOnce("Resource: " + resource1).containsOnlyOnce("Source: " + LogManagerUtils.formatSingleLine(source1)).contains("Position: 1").contains("INSERT 1").contains("error 1").containsOnlyOnce("com.datastax.oss.dsbulk.executor.api.exception.BulkExecutionException: Statement execution failed: INSERT 1");
List<String> positionLines = Files.readAllLines(positions, UTF_8);
assertThat(positionLines).contains("file:///file1.csv:1");
}
use of com.datastax.oss.dsbulk.workflow.commons.statement.MappedBoundStatement in project dsbulk by datastax.
the class LogManagerTest method setUp.
@BeforeEach
void setUp() throws Exception {
session = mockSession();
resource1 = new URI("file:///file1.csv");
resource2 = new URI("file:///file2.csv");
resource3 = new URI("file:///file3.csv");
csvRecord1 = new DefaultErrorRecord(source1, resource1, 1, new RuntimeException("error 1"));
csvRecord2 = new DefaultErrorRecord(source2, resource2, 2, new RuntimeException("error 2"));
csvRecord3 = new DefaultErrorRecord(source3, resource3, 3, new RuntimeException("error 3"));
unmappableStmt1 = new UnmappableStatement(csvRecord1, new RuntimeException("error 1"));
unmappableStmt2 = new UnmappableStatement(csvRecord2, new RuntimeException("error 2"));
unmappableStmt3 = new UnmappableStatement(csvRecord3, new RuntimeException("error 3"));
failedWriteResult1 = new DefaultWriteResult(new BulkExecutionException(new DriverTimeoutException("error 1"), new MappedBoundStatement(csvRecord1, mockBoundStatement("INSERT 1"))));
failedWriteResult2 = new DefaultWriteResult(new BulkExecutionException(new DriverTimeoutException("error 2"), new MappedBoundStatement(csvRecord2, mockBoundStatement("INSERT 2"))));
failedWriteResult3 = new DefaultWriteResult(new BulkExecutionException(new DriverTimeoutException("error 3"), new MappedBoundStatement(csvRecord3, mockBoundStatement("INSERT 3"))));
failedReadResult1 = new DefaultReadResult(new BulkExecutionException(new DriverTimeoutException("error 1"), mockBoundStatement("SELECT 1")));
failedReadResult2 = new DefaultReadResult(new BulkExecutionException(new DriverTimeoutException("error 2"), mockBoundStatement("SELECT 2")));
failedReadResult3 = new DefaultReadResult(new BulkExecutionException(new DriverTimeoutException("error 3"), mockBoundStatement("SELECT 3")));
BatchStatement batch = BatchStatement.newInstance(DefaultBatchType.UNLOGGED, new MappedBoundStatement(csvRecord1, mockBoundStatement("INSERT 1", "foo", 42)), new MappedBoundStatement(csvRecord2, mockBoundStatement("INSERT 2", "bar", 43)), new MappedBoundStatement(csvRecord3, mockBoundStatement("INSERT 3", "qix", 44)));
batchWriteResult = new DefaultWriteResult(new BulkExecutionException(new DriverTimeoutException("error batch"), batch));
ExecutionInfo info = mock(ExecutionInfo.class);
row1 = mockRow(1);
Row row2 = mockRow(2);
Row row3 = mockRow(3);
Statement<?> stmt1 = SimpleStatement.newInstance("SELECT 1");
Statement<?> stmt2 = SimpleStatement.newInstance("SELECT 2");
Statement<?> stmt3 = SimpleStatement.newInstance("SELECT 3");
successfulReadResult1 = new DefaultReadResult(stmt1, info, row1);
ReadResult successfulReadResult2 = new DefaultReadResult(stmt2, info, row2);
ReadResult successfulReadResult3 = new DefaultReadResult(stmt3, info, row3);
rowRecord1 = new DefaultErrorRecord(successfulReadResult1, tableResource, 1, new RuntimeException("error 1"));
rowRecord2 = new DefaultErrorRecord(successfulReadResult2, tableResource, 2, new RuntimeException("error 2"));
rowRecord3 = new DefaultErrorRecord(successfulReadResult3, tableResource, 3, new RuntimeException("error 3"));
}
use of com.datastax.oss.dsbulk.workflow.commons.statement.MappedBoundStatement in project dsbulk by datastax.
the class DefaultRecordMapper method bindStatement.
private MappedBoundStatement bindStatement(Record record, PreparedStatement insertStatement) {
BoundStatementBuilder builder = boundStatementBuilderFactory.apply(insertStatement);
ColumnDefinitions variableDefinitions = insertStatement.getVariableDefinitions();
for (Field field : record.fields()) {
Set<CQLWord> variables = mapping.fieldToVariables(field);
for (CQLWord variable : variables) {
CqlIdentifier name = variable.asIdentifier();
if (size == 1 || variableDefinitions.contains(name)) {
DataType cqlType = variableDefinitions.get(name).getType();
GenericType<?> fieldType = recordMetadata.getFieldType(field, cqlType);
Object raw = record.getFieldValue(field);
builder = bindColumn(builder, field, variable, raw, cqlType, fieldType);
}
}
}
ensurePrimaryKeySet(builder);
if (protocolVersion.getCode() < DefaultProtocolVersion.V4.getCode()) {
ensureAllVariablesSet(builder, insertStatement);
}
BoundStatement bs = builder.build();
return new MappedBoundStatement(record, bs);
}
Aggregations