use of org.apache.kafka.common.KafkaException in project kafka by apache.
the class MemoryRecordsBuilder method appendUncheckedWithOffset.
/**
* Add a legacy record without doing offset/magic validation (this should only be used in testing).
* @param offset The offset of the record
* @param record The record to add
*/
public void appendUncheckedWithOffset(long offset, LegacyRecord record) {
ensureOpenForRecordAppend();
try {
int size = record.sizeInBytes();
AbstractLegacyRecordBatch.writeHeader(appendStream, toInnerOffset(offset), size);
ByteBuffer buffer = record.buffer().duplicate();
appendStream.write(buffer.array(), buffer.arrayOffset(), buffer.limit());
recordWritten(offset, record.timestamp(), size + Records.LOG_OVERHEAD);
} catch (IOException e) {
throw new KafkaException("I/O exception when writing to the append stream, closing", e);
}
}
use of org.apache.kafka.common.KafkaException in project kafka by apache.
the class MultiRecordsSend method writeTo.
@Override
public long writeTo(TransferableChannel channel) throws IOException {
if (completed())
throw new KafkaException("This operation cannot be invoked on a complete request.");
int totalWrittenPerCall = 0;
boolean sendComplete;
do {
long written = current.writeTo(channel);
totalWrittenPerCall += written;
sendComplete = current.completed();
if (sendComplete) {
updateRecordConversionStats(current);
current = sendQueue.poll();
}
} while (!completed() && sendComplete);
totalWritten += totalWrittenPerCall;
if (completed() && totalWritten != size)
log.error("mismatch in sending bytes over socket; expected: {} actual: {}", size, totalWritten);
log.trace("Bytes written as part of multi-send call: {}, total bytes written so far: {}, expected bytes to write: {}", totalWrittenPerCall, totalWritten, size);
return totalWrittenPerCall;
}
use of org.apache.kafka.common.KafkaException in project kafka by apache.
the class SslChannelBuilder method configure.
public void configure(Map<String, ?> configs) throws KafkaException {
try {
this.configs = configs;
String sslPrincipalMappingRules = (String) configs.get(BrokerSecurityConfigs.SSL_PRINCIPAL_MAPPING_RULES_CONFIG);
if (sslPrincipalMappingRules != null)
sslPrincipalMapper = SslPrincipalMapper.fromRules(sslPrincipalMappingRules);
this.sslFactory = new SslFactory(mode, null, isInterBrokerListener);
this.sslFactory.configure(this.configs);
} catch (KafkaException e) {
throw e;
} catch (Exception e) {
throw new KafkaException(e);
}
}
use of org.apache.kafka.common.KafkaException in project kafka by apache.
the class ScramFormatter method generateCredential.
public ScramCredential generateCredential(String password, int iterations) {
try {
byte[] salt = secureRandomBytes();
byte[] saltedPassword = saltedPassword(password, salt, iterations);
return generateCredential(salt, saltedPassword, iterations);
} catch (InvalidKeyException e) {
throw new KafkaException("Could not create credential", e);
}
}
use of org.apache.kafka.common.KafkaException in project kafka by apache.
the class OAuthBearerLoginCallbackHandler method init.
/*
* Package-visible for testing.
*/
void init(AccessTokenRetriever accessTokenRetriever, AccessTokenValidator accessTokenValidator) {
this.accessTokenRetriever = accessTokenRetriever;
this.accessTokenValidator = accessTokenValidator;
try {
this.accessTokenRetriever.init();
} catch (IOException e) {
throw new KafkaException("The OAuth login configuration encountered an error when initializing the AccessTokenRetriever", e);
}
isInitialized = true;
}
Aggregations