use of org.folio.kafka.KafkaConfig in project mod-inventory by folio-org.
the class QuickMarcConsumerVerticle method getKafkaConfig.
private KafkaConfig getKafkaConfig(JsonObject config) {
KafkaConfig kafkaConfig = KafkaConfig.builder().envId(config.getString(KAFKA_ENV)).kafkaHost(config.getString(KAFKA_HOST)).kafkaPort(config.getString(KAFKA_PORT)).okapiUrl(config.getString(OKAPI_URL)).replicationFactor(Integer.parseInt(config.getString(KAFKA_REPLICATION_FACTOR))).maxRequestSize(Integer.parseInt(config.getString(KAFKA_MAX_REQUEST_SIZE))).build();
LOGGER.info("kafkaConfig: {}", kafkaConfig);
return kafkaConfig;
}
use of org.folio.kafka.KafkaConfig in project mod-inventory by folio-org.
the class DataImportKafkaHandlerTest method setUp.
@Before
public void setUp() {
MockitoAnnotations.openMocks(this);
String[] hostAndPort = cluster.getBrokerList().split(":");
WireMock.stubFor(get(new UrlPathPattern(new RegexPattern(JOB_PROFILE_URL + "/.*"), true)).willReturn(WireMock.ok().withBody(Json.encode(profileSnapshotWrapper))));
KafkaConfig kafkaConfig = KafkaConfig.builder().kafkaHost(hostAndPort[0]).kafkaPort(hostAndPort[1]).maxRequestSize(1048576).build();
HttpClient client = vertx.createHttpClient(new HttpClientOptions().setConnectTimeout(3000));
dataImportKafkaHandler = new DataImportKafkaHandler(vertx, mockedStorage, client, new ProfileSnapshotCache(vertx, client, 3600), kafkaConfig, new MappingMetadataCache(vertx, client, 3600));
EventManager.clearEventHandlers();
EventManager.registerKafkaEventPublisher(kafkaConfig, vertx, 1);
}
use of org.folio.kafka.KafkaConfig in project mod-source-record-storage by folio-org.
the class ApplicationConfig method kafkaConfigBean.
@Bean(name = "newKafkaConfig")
public KafkaConfig kafkaConfigBean() {
KafkaConfig kafkaConfig = KafkaConfig.builder().envId(envId).kafkaHost(kafkaHost).kafkaPort(kafkaPort).okapiUrl(okapiUrl).replicationFactor(replicationFactor).maxRequestSize(maxRequestSize).build();
LOGGER.debug("kafkaConfig: {}", kafkaConfig);
return kafkaConfig;
}
use of org.folio.kafka.KafkaConfig in project mod-source-record-manager by folio-org.
the class ApplicationConfig method kafkaConfigBean.
@Bean(name = "newKafkaConfig")
public KafkaConfig kafkaConfigBean() {
KafkaConfig kafkaConfig = KafkaConfig.builder().envId(envId).kafkaHost(kafkaHost).kafkaPort(kafkaPort).okapiUrl(okapiUrl).replicationFactor(replicationFactor).maxRequestSize(maxRequestSize).build();
LOGGER.info("kafkaConfig: " + kafkaConfig);
return kafkaConfig;
}
use of org.folio.kafka.KafkaConfig in project mod-inventory by folio-org.
the class DataImportConsumerVerticleTest method setUpClass.
@BeforeClass
public static void setUpClass(TestContext context) {
Async async = context.async();
String[] hostAndPort = cluster.getBrokerList().split(":");
KafkaConfig kafkaConfig = KafkaConfig.builder().kafkaHost(hostAndPort[0]).kafkaPort(hostAndPort[1]).build();
EventManager.registerKafkaEventPublisher(kafkaConfig, vertx, 1);
vertx = Vertx.vertx();
DeploymentOptions options = new DeploymentOptions().setConfig(new JsonObject().put(KAFKA_HOST, hostAndPort[0]).put(KAFKA_PORT, hostAndPort[1]).put(KAFKA_REPLICATION_FACTOR, "1").put(KAFKA_ENV, KAFKA_ENV_NAME).put(KAFKA_MAX_REQUEST_SIZE, "1048576"));
vertx.deployVerticle(DataImportConsumerVerticle.class.getName(), options, deployAr -> async.complete());
}
Aggregations