use of org.thingsboard.server.common.data.exception.ThingsboardKafkaClientError in project thingsboard by thingsboard.
the class TbKafkaNode method init.
@Override
public void init(TbContext ctx, TbNodeConfiguration configuration) throws TbNodeException {
this.config = TbNodeUtils.convert(configuration, TbKafkaNodeConfiguration.class);
this.initError = null;
Properties properties = new Properties();
properties.put(ProducerConfig.CLIENT_ID_CONFIG, "producer-tb-kafka-node-" + ctx.getSelfId().getId().toString() + "-" + ctx.getServiceId());
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, config.getBootstrapServers());
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, config.getValueSerializer());
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, config.getKeySerializer());
properties.put(ProducerConfig.ACKS_CONFIG, config.getAcks());
properties.put(ProducerConfig.RETRIES_CONFIG, config.getRetries());
properties.put(ProducerConfig.BATCH_SIZE_CONFIG, config.getBatchSize());
properties.put(ProducerConfig.LINGER_MS_CONFIG, config.getLinger());
properties.put(ProducerConfig.BUFFER_MEMORY_CONFIG, config.getBufferMemory());
if (config.getOtherProperties() != null) {
config.getOtherProperties().forEach(properties::put);
}
addMetadataKeyValuesAsKafkaHeaders = BooleanUtils.toBooleanDefaultIfNull(config.isAddMetadataKeyValuesAsKafkaHeaders(), false);
toBytesCharset = config.getKafkaHeadersCharset() != null ? Charset.forName(config.getKafkaHeadersCharset()) : StandardCharsets.UTF_8;
try {
this.producer = new KafkaProducer<>(properties);
Thread ioThread = (Thread) ReflectionUtils.getField(IO_THREAD_FIELD, producer);
ioThread.setUncaughtExceptionHandler((thread, throwable) -> {
if (throwable instanceof ThingsboardKafkaClientError) {
initError = throwable;
destroy();
}
});
} catch (Exception e) {
throw new TbNodeException(e);
}
}
use of org.thingsboard.server.common.data.exception.ThingsboardKafkaClientError in project thingsboard by thingsboard.
the class NetworkReceive method readFrom.
public long readFrom(ScatteringByteChannel channel) throws IOException {
int read = 0;
if (size.hasRemaining()) {
int bytesRead = channel.read(size);
if (bytesRead < 0)
throw new EOFException();
read += bytesRead;
if (!size.hasRemaining()) {
size.rewind();
int receiveSize = size.getInt();
if (receiveSize < 0)
throw new InvalidReceiveException("Invalid receive (size = " + receiveSize + ")");
if (maxSize != UNLIMITED && receiveSize > maxSize) {
throw new ThingsboardKafkaClientError("Invalid receive (size = " + receiveSize + " larger than " + maxSize + ")");
}
// may be 0 for some payloads (SASL)
requestedBufferSize = receiveSize;
if (receiveSize == 0) {
buffer = EMPTY_BUFFER;
}
}
}
if (buffer == null && requestedBufferSize != -1) {
// we know the size we want but havent been able to allocate it yet
if (requestedBufferSize > TB_LOG_REQUESTED_BUFFER_SIZE) {
String stackTrace = Arrays.stream(Thread.currentThread().getStackTrace()).map(StackTraceElement::toString).collect(Collectors.joining("|"));
log.error("Allocating buffer of size {} for source {}", requestedBufferSize, source);
log.error("Stack Trace: {}", stackTrace);
}
buffer = memoryPool.tryAllocate(requestedBufferSize);
if (buffer == null)
log.trace("Broker low on memory - could not allocate buffer of size {} for source {}", requestedBufferSize, source);
}
if (buffer != null) {
int bytesRead = channel.read(buffer);
if (bytesRead < 0)
throw new EOFException();
read += bytesRead;
}
return read;
}
Aggregations