use of org.apache.accumulo.start.spi.KeywordExecutable in project accumulo by apache.
the class Main method main.
public static void main(final String[] args) throws Exception {
// Preload classes that cause a deadlock between the ServiceLoader and the DFSClient when
// using the VFSClassLoader with jars in HDFS.
ClassLoader loader = getClassLoader();
Class<?> confClass = null;
try {
@SuppressWarnings("deprecation") var deprecatedConfClass = org.apache.accumulo.start.classloader.AccumuloClassLoader.getClassLoader().loadClass("org.apache.hadoop.conf.Configuration");
confClass = deprecatedConfClass;
} catch (ClassNotFoundException e) {
log.error("Unable to find Hadoop Configuration class on classpath, check configuration.", e);
throw e;
}
Object conf = null;
try {
conf = confClass.getDeclaredConstructor().newInstance();
} catch (Exception e) {
log.error("Error creating new instance of Hadoop Configuration", e);
throw e;
}
try {
Method getClassByNameOrNullMethod = conf.getClass().getMethod("getClassByNameOrNull", String.class);
getClassByNameOrNullMethod.invoke(conf, "org.apache.hadoop.mapred.JobConf");
getClassByNameOrNullMethod.invoke(conf, "org.apache.hadoop.mapred.JobConfigurable");
} catch (Exception e) {
log.error("Error pre-loading JobConf and JobConfigurable classes, VFS classloader with " + "system classes in HDFS may not work correctly", e);
throw e;
}
if (args.length == 0) {
printUsage();
System.exit(1);
}
if (args[0].equals("-h") || args[0].equals("-help") || args[0].equals("--help")) {
printUsage();
return;
}
// determine whether a keyword was used or a class name, and execute it with the remaining
// args
String keywordOrClassName = args[0];
KeywordExecutable keywordExec = getExecutables(loader).get(keywordOrClassName);
if (keywordExec != null) {
execKeyword(keywordExec, stripArgs(args, 1));
} else {
execMainClassName(keywordOrClassName, stripArgs(args, 1));
}
}
use of org.apache.accumulo.start.spi.KeywordExecutable in project accumulo by apache.
the class Main method checkDuplicates.
public static Map<String, KeywordExecutable> checkDuplicates(final Iterable<? extends KeywordExecutable> services) {
TreeSet<String> banList = new TreeSet<>();
TreeMap<String, KeywordExecutable> results = new TreeMap<>();
for (KeywordExecutable service : services) {
String keyword = service.keyword();
if (banList.contains(keyword)) {
// subsequent times a duplicate is found, just warn and exclude it
warnDuplicate(service);
} else if (results.containsKey(keyword)) {
// the first time a duplicate is found, banList it and warn
banList.add(keyword);
warnDuplicate(results.remove(keyword));
warnDuplicate(service);
} else {
// first observance of this keyword, so just add it to the list
results.put(service.keyword(), service);
}
}
return Collections.unmodifiableSortedMap(results);
}
Aggregations