use of com.datastax.oss.dsbulk.connectors.api.DefaultErrorRecord in project dsbulk by datastax.
the class DefaultReadResultMapper method map.
@NonNull
@Override
public Record map(@NonNull ReadResult result) {
Object source = retainRecordSources ? result : null;
try {
Row row = result.getRow().orElseThrow(IllegalStateException::new);
ColumnDefinitions columnDefinitions = row.getColumnDefinitions();
DefaultRecord record = new DefaultRecord(source, resource, -1);
for (ColumnDefinition def : columnDefinitions) {
CQLWord variable = CQLWord.fromInternal(def.getName().asInternal());
CqlIdentifier name = variable.asIdentifier();
DataType cqlType = def.getType();
Set<Field> fields = mapping.variableToFields(variable);
for (Field field : fields) {
GenericType<?> fieldType = null;
try {
fieldType = recordMetadata.getFieldType(field, cqlType);
TypeCodec<?> codec = mapping.codec(variable, cqlType, fieldType);
Object value = row.get(name, codec);
record.setFieldValue(field, value);
} catch (Exception e) {
String msg = String.format("Could not deserialize column %s of type %s as %s", name.asCql(true), cqlType, fieldType);
throw new IllegalArgumentException(msg, e);
}
}
}
return record;
} catch (Exception e) {
return new DefaultErrorRecord(source, resource, -1, e);
}
}
use of com.datastax.oss.dsbulk.connectors.api.DefaultErrorRecord in project dsbulk by datastax.
the class LogManagerTest method should_handle_failed_records_without_source.
@Test
void should_handle_failed_records_without_source() throws Exception {
Path outputDir = Files.createTempDirectory("test");
LogManager logManager = new LogManager(session, outputDir, ErrorThreshold.forAbsoluteValue(1), ErrorThreshold.forAbsoluteValue(0), true, statementFormatter, EXTENDED, rowFormatter);
logManager.init();
Record record = new DefaultErrorRecord(null, resource1, 1, new RuntimeException("error 1"));
Flux<Record> stmts = Flux.just(record);
stmts.transform(logManager.newFailedRecordsHandler()).blockLast();
logManager.close();
Path errors = logManager.getOperationDirectory().resolve("connector-errors.log");
Path positions = logManager.getOperationDirectory().resolve("positions.txt");
assertThat(errors.toFile()).exists();
assertThat(positions.toFile()).exists();
assertThat(FileUtils.listAllFilesInDirectory(logManager.getOperationDirectory())).containsOnly(errors, positions);
List<String> lines = Files.readAllLines(errors, UTF_8);
String content = String.join("\n", lines);
assertThat(content).doesNotContain("Source: ").contains("Resource: " + resource1).contains("java.lang.RuntimeException: error 1");
List<String> positionLines = Files.readAllLines(positions, UTF_8);
assertThat(positionLines).containsOnly("file:///file1.csv:1");
}
use of com.datastax.oss.dsbulk.connectors.api.DefaultErrorRecord in project dsbulk by datastax.
the class LogManagerTest method should_handle_unmappable_records_without_source.
@Test
void should_handle_unmappable_records_without_source() throws Exception {
Path outputDir = Files.createTempDirectory("test");
LogManager logManager = new LogManager(session, outputDir, ErrorThreshold.forAbsoluteValue(1), ErrorThreshold.forAbsoluteValue(0), true, statementFormatter, EXTENDED, rowFormatter);
logManager.init();
Record record = new DefaultErrorRecord(null, tableResource, 1, new RuntimeException("error 1"));
Flux<Record> stmts = Flux.just(record);
stmts.transform(logManager.newUnmappableRecordsHandler()).blockLast();
logManager.close();
Path errors = logManager.getOperationDirectory().resolve("mapping-errors.log");
assertThat(errors.toFile()).exists();
assertThat(FileUtils.listAllFilesInDirectory(logManager.getOperationDirectory())).containsOnly(errors);
List<String> lines = Files.readAllLines(errors, UTF_8);
String content = String.join("\n", lines);
assertThat(content).doesNotContain("Source: ").doesNotContain("Resource: ").doesNotContain("Position: ").contains("java.lang.RuntimeException: error 1");
}
use of com.datastax.oss.dsbulk.connectors.api.DefaultErrorRecord in project dsbulk by datastax.
the class LogManagerTest method should_print_raw_bytes_when_column_cannot_be_properly_deserialized.
@Test
void should_print_raw_bytes_when_column_cannot_be_properly_deserialized() throws Exception {
Path outputDir = Files.createTempDirectory("test");
LogManager logManager = new LogManager(session, outputDir, ErrorThreshold.forAbsoluteValue(2), ErrorThreshold.forAbsoluteValue(0), true, statementFormatter, EXTENDED, rowFormatter);
// Emulate bad row with corrupted data, see DefaultReadResultMapper
IllegalArgumentException cause = new IllegalArgumentException("Invalid 32-bits integer value, expecting 4 bytes but got 5");
IllegalArgumentException iae = new IllegalArgumentException("Could not deserialize column c1 of type int as java.lang.Integer", cause);
when(row1.getObject(0)).thenThrow(cause);
when(row1.getBytesUnsafe(0)).thenReturn(ByteBuffer.wrap(new byte[] { 1, 2, 3, 4, 5 }));
rowRecord1 = new DefaultErrorRecord(successfulReadResult1, tableResource, 1, iae);
logManager.init();
Flux<Record> stmts = Flux.just(rowRecord1);
stmts.transform(logManager.newUnmappableRecordsHandler()).blockLast();
logManager.close();
Path errors = logManager.getOperationDirectory().resolve("mapping-errors.log");
assertThat(errors.toFile()).exists();
assertThat(FileUtils.listAllFilesInDirectory(logManager.getOperationDirectory())).containsOnly(errors);
List<String> lines = Files.readAllLines(errors, UTF_8);
String content = String.join("\n", lines);
assertThat(content).doesNotContain("Resource: ").doesNotContain("Position: ").contains("SELECT 1").contains("c1: 0x0102030405 (malformed buffer for type INT)").contains(iae.getMessage()).contains(cause.getMessage());
}
use of com.datastax.oss.dsbulk.connectors.api.DefaultErrorRecord in project dsbulk by datastax.
the class LogManagerTest method setUp.
@BeforeEach
void setUp() throws Exception {
session = mockSession();
resource1 = new URI("file:///file1.csv");
resource2 = new URI("file:///file2.csv");
resource3 = new URI("file:///file3.csv");
csvRecord1 = new DefaultErrorRecord(source1, resource1, 1, new RuntimeException("error 1"));
csvRecord2 = new DefaultErrorRecord(source2, resource2, 2, new RuntimeException("error 2"));
csvRecord3 = new DefaultErrorRecord(source3, resource3, 3, new RuntimeException("error 3"));
unmappableStmt1 = new UnmappableStatement(csvRecord1, new RuntimeException("error 1"));
unmappableStmt2 = new UnmappableStatement(csvRecord2, new RuntimeException("error 2"));
unmappableStmt3 = new UnmappableStatement(csvRecord3, new RuntimeException("error 3"));
failedWriteResult1 = new DefaultWriteResult(new BulkExecutionException(new DriverTimeoutException("error 1"), new MappedBoundStatement(csvRecord1, mockBoundStatement("INSERT 1"))));
failedWriteResult2 = new DefaultWriteResult(new BulkExecutionException(new DriverTimeoutException("error 2"), new MappedBoundStatement(csvRecord2, mockBoundStatement("INSERT 2"))));
failedWriteResult3 = new DefaultWriteResult(new BulkExecutionException(new DriverTimeoutException("error 3"), new MappedBoundStatement(csvRecord3, mockBoundStatement("INSERT 3"))));
failedReadResult1 = new DefaultReadResult(new BulkExecutionException(new DriverTimeoutException("error 1"), mockBoundStatement("SELECT 1")));
failedReadResult2 = new DefaultReadResult(new BulkExecutionException(new DriverTimeoutException("error 2"), mockBoundStatement("SELECT 2")));
failedReadResult3 = new DefaultReadResult(new BulkExecutionException(new DriverTimeoutException("error 3"), mockBoundStatement("SELECT 3")));
BatchStatement batch = BatchStatement.newInstance(DefaultBatchType.UNLOGGED, new MappedBoundStatement(csvRecord1, mockBoundStatement("INSERT 1", "foo", 42)), new MappedBoundStatement(csvRecord2, mockBoundStatement("INSERT 2", "bar", 43)), new MappedBoundStatement(csvRecord3, mockBoundStatement("INSERT 3", "qix", 44)));
batchWriteResult = new DefaultWriteResult(new BulkExecutionException(new DriverTimeoutException("error batch"), batch));
ExecutionInfo info = mock(ExecutionInfo.class);
row1 = mockRow(1);
Row row2 = mockRow(2);
Row row3 = mockRow(3);
Statement<?> stmt1 = SimpleStatement.newInstance("SELECT 1");
Statement<?> stmt2 = SimpleStatement.newInstance("SELECT 2");
Statement<?> stmt3 = SimpleStatement.newInstance("SELECT 3");
successfulReadResult1 = new DefaultReadResult(stmt1, info, row1);
ReadResult successfulReadResult2 = new DefaultReadResult(stmt2, info, row2);
ReadResult successfulReadResult3 = new DefaultReadResult(stmt3, info, row3);
rowRecord1 = new DefaultErrorRecord(successfulReadResult1, tableResource, 1, new RuntimeException("error 1"));
rowRecord2 = new DefaultErrorRecord(successfulReadResult2, tableResource, 2, new RuntimeException("error 2"));
rowRecord3 = new DefaultErrorRecord(successfulReadResult3, tableResource, 3, new RuntimeException("error 3"));
}
Aggregations