use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.
the class ConnectorFactory method getConnectorClass.
@SuppressWarnings("unchecked")
private static Class<? extends Connector> getConnectorClass(String connectorClassOrAlias) {
// Avoid the classpath scan if the full class name was provided
try {
Class<?> clazz = Class.forName(connectorClassOrAlias);
if (!Connector.class.isAssignableFrom(clazz))
throw new ConnectException("Class " + connectorClassOrAlias + " does not implement Connector");
return (Class<? extends Connector>) clazz;
} catch (ClassNotFoundException e) {
// Fall through to scan for the alias
}
// Iterate over our entire classpath to find all the connectors and hopefully one of them matches the alias from the connector configration
Reflections reflections = new Reflections(new ConfigurationBuilder().setUrls(ClasspathHelper.forJavaClassPath()));
Set<Class<? extends Connector>> connectors = reflections.getSubTypesOf(Connector.class);
List<Class<? extends Connector>> results = new ArrayList<>();
for (Class<? extends Connector> connector : connectors) {
// Configuration included the class name but not package
if (connector.getSimpleName().equals(connectorClassOrAlias))
results.add(connector);
// Configuration included a short version of the name (i.e. FileStreamSink instead of FileStreamSinkConnector)
if (connector.getSimpleName().equals(connectorClassOrAlias + "Connector"))
results.add(connector);
}
if (results.isEmpty())
throw new ConnectException("Failed to find any class that implements Connector and which name matches " + connectorClassOrAlias + ", available connectors are: " + connectorNames(connectors));
if (results.size() > 1) {
throw new ConnectException("More than one connector matches alias " + connectorClassOrAlias + ". Please use full package and class name instead. Classes found: " + connectorNames(results));
}
// We just validated that we have exactly one result, so this is safe
return results.get(0);
}
use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.
the class RestServer method start.
public void start(Herder herder) {
log.info("Starting REST server");
ResourceConfig resourceConfig = new ResourceConfig();
resourceConfig.register(new JacksonJsonProvider());
resourceConfig.register(RootResource.class);
resourceConfig.register(new ConnectorsResource(herder));
resourceConfig.register(new ConnectorPluginsResource(herder));
resourceConfig.register(ConnectExceptionMapper.class);
ServletContainer servletContainer = new ServletContainer(resourceConfig);
ServletHolder servletHolder = new ServletHolder(servletContainer);
ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
context.setContextPath("/");
context.addServlet(servletHolder, "/*");
String allowedOrigins = config.getString(WorkerConfig.ACCESS_CONTROL_ALLOW_ORIGIN_CONFIG);
if (allowedOrigins != null && !allowedOrigins.trim().isEmpty()) {
FilterHolder filterHolder = new FilterHolder(new CrossOriginFilter());
filterHolder.setName("cross-origin");
filterHolder.setInitParameter(CrossOriginFilter.ALLOWED_ORIGINS_PARAM, allowedOrigins);
String allowedMethods = config.getString(WorkerConfig.ACCESS_CONTROL_ALLOW_METHODS_CONFIG);
if (allowedMethods != null && !allowedOrigins.trim().isEmpty()) {
filterHolder.setInitParameter(CrossOriginFilter.ALLOWED_METHODS_PARAM, allowedMethods);
}
context.addFilter(filterHolder, "/*", EnumSet.of(DispatcherType.REQUEST));
}
RequestLogHandler requestLogHandler = new RequestLogHandler();
Slf4jRequestLog requestLog = new Slf4jRequestLog();
requestLog.setLoggerName(RestServer.class.getCanonicalName());
requestLog.setLogLatency(true);
requestLogHandler.setRequestLog(requestLog);
HandlerCollection handlers = new HandlerCollection();
handlers.setHandlers(new Handler[] { context, new DefaultHandler(), requestLogHandler });
/* Needed for graceful shutdown as per `setStopTimeout` documentation */
StatisticsHandler statsHandler = new StatisticsHandler();
statsHandler.setHandler(handlers);
jettyServer.setHandler(statsHandler);
jettyServer.setStopTimeout(GRACEFUL_SHUTDOWN_TIMEOUT_MS);
jettyServer.setStopAtShutdown(true);
try {
jettyServer.start();
} catch (Exception e) {
throw new ConnectException("Unable to start REST server", e);
}
log.info("REST server listening at " + jettyServer.getURI() + ", advertising URL " + advertisedUrl());
}
use of org.apache.kafka.connect.errors.ConnectException in project kafka by apache.
the class Worker method connectorTaskConfigs.
/**
* Get a list of updated task properties for the tasks of this connector.
*
* @param connName the connector name.
* @param maxTasks the maxinum number of tasks.
* @param sinkTopics a list of sink topics.
* @return a list of updated tasks properties.
*/
public List<Map<String, String>> connectorTaskConfigs(String connName, int maxTasks, List<String> sinkTopics) {
log.trace("Reconfiguring connector tasks for {}", connName);
WorkerConnector workerConnector = connectors.get(connName);
if (workerConnector == null)
throw new ConnectException("Connector " + connName + " not found in this worker.");
Connector connector = workerConnector.connector();
List<Map<String, String>> result = new ArrayList<>();
String taskClassName = connector.taskClass().getName();
for (Map<String, String> taskProps : connector.taskConfigs(maxTasks)) {
// Ensure we don't modify the connector's copy of the config
Map<String, String> taskConfig = new HashMap<>(taskProps);
taskConfig.put(TaskConfig.TASK_CLASS_CONFIG, taskClassName);
if (sinkTopics != null)
taskConfig.put(SinkTask.TOPICS_CONFIG, Utils.join(sinkTopics, ","));
result.add(taskConfig);
}
return result;
}
use of org.apache.kafka.connect.errors.ConnectException in project ignite by apache.
the class IgniteSinkTask method put.
/**
* Buffers records.
*
* @param records Records to inject into grid.
*/
@SuppressWarnings("unchecked")
@Override
public void put(Collection<SinkRecord> records) {
try {
for (SinkRecord record : records) {
// Data is flushed asynchronously when CACHE_PER_NODE_DATA_SIZE is reached.
if (extractor != null) {
Map.Entry<Object, Object> entry = extractor.extract(record);
StreamerContext.getStreamer().addData(entry.getKey(), entry.getValue());
} else {
if (record.key() != null) {
StreamerContext.getStreamer().addData(record.key(), record.value());
} else {
log.error("Failed to stream a record with null key!");
}
}
}
} catch (ConnectException e) {
log.error("Failed adding record", e);
throw new ConnectException(e);
}
}
use of org.apache.kafka.connect.errors.ConnectException in project ignite by apache.
the class IgniteSourceConnector method start.
/**
* {@inheritDoc}
*/
@Override
public void start(Map<String, String> props) {
try {
A.notNullOrEmpty(props.get(IgniteSourceConstants.CACHE_NAME), "cache name");
A.notNullOrEmpty(props.get(IgniteSourceConstants.CACHE_CFG_PATH), "path to cache config file");
A.notNullOrEmpty(props.get(IgniteSourceConstants.CACHE_EVENTS), "Registered cache events");
A.notNullOrEmpty(props.get(IgniteSourceConstants.TOPIC_NAMES), "Kafka topics");
} catch (IllegalArgumentException e) {
throw new ConnectException("Cannot start IgniteSourceConnector due to configuration error", e);
}
configProps = props;
}
Aggregations