use of com.github.jcustenborder.vertica.QueryBuilder in project kafka-connect-vertica by jcustenborder.
the class VerticaSinkTask method put.
@Override
public void put(Collection<SinkRecord> records) {
Multimap<String, SinkRecord> recordsByTable = HashMultimap.create(this.config.expectedTopics, this.config.expectedRecords);
Multiset<String> countsByTable = HashMultiset.create(this.config.expectedTopics);
for (SinkRecord record : records) {
String table = record.topic();
countsByTable.add(table);
recordsByTable.put(table, record);
}
for (String table : countsByTable.elementSet()) {
log.trace("put() - Writing {} record(s) to {}", countsByTable.count(table), table);
}
DataSource dataSource = PoolOfPools.get(this.config);
try (Connection connection = dataSource.getConnection()) {
VerticaConnection verticaConnection = connection.unwrap(VerticaConnection.class);
try {
for (final String tableName : recordsByTable.keys()) {
log.trace("put() - Processing records for table '{}'", tableName);
Collection<SinkRecord> tableRecords = recordsByTable.get(tableName);
VerticaStreamWriterBuilder builder = configureBuilder(verticaConnection, tableName);
final String statement = new QueryBuilder(builder).toString();
log.info("put() - Creating VerticaCopyStream with statement:\n{}", statement);
VerticaCopyStream copyStream = new VerticaCopyStream(verticaConnection, statement);
copyStream.start();
final PipedInputStream inputStream = new PipedInputStream(this.config.inputBufferSize);
final PipedOutputStream outputStream = new PipedOutputStream(inputStream);
try {
Stopwatch stopwatch = Stopwatch.createStarted();
Future<?> importFuture = executorService.submit(() -> {
try {
copyStream.addStream(inputStream);
copyStream.execute();
} catch (SQLException e) {
throw new IllegalStateException(e);
}
});
int count = 0;
try (VerticaStreamWriter writer = builder.build(outputStream)) {
for (SinkRecord record : tableRecords) {
Struct value = (Struct) record.value();
int i = 0;
Object[] values = new Object[writer.columns().size()];
for (VerticaColumnInfo columnInfo : writer.columns()) {
values[i] = value.get(columnInfo.name());
i++;
}
log.trace("Writing row");
writer.write(values);
count++;
}
log.info("Wrote {} record(s) to stream", count);
}
outputStream.close();
log.info("Waiting for import to complete.");
try {
importFuture.get(this.config.streamTimeoutMs, TimeUnit.MILLISECONDS);
} catch (TimeoutException e) {
log.warn("Import exceeded timeout of {} ms. Rolling back", this.config.streamTimeoutMs);
connection.rollback();
}
log.info("put() - Imported {} record(s) in {} millisecond(s).", count, stopwatch.elapsed(TimeUnit.MILLISECONDS));
final int rejectedCount = copyStream.getRejects().size();
if (rejectedCount > 0) {
log.warn("put() - Rejected {} record(s).", copyStream.getRejects().size());
for (Long l : copyStream.getRejects()) {
log.warn("Rejected row {}", l);
}
}
} catch (InterruptedException | ExecutionException e) {
log.error("Exception thrown", e);
} finally {
inputStream.close();
}
}
} catch (IOException ex) {
throw new RetriableException(ex);
} catch (ExecutionException ex) {
throw new RetriableException(ex);
}
log.trace("put() - committing transaction");
connection.commit();
} catch (SQLException ex) {
throw new RetriableException(ex);
}
}
Aggregations