use of com.google.inject.ProvisionException in project gerrit by GerritCodeReview.
the class AbstractVersionManager method fail.
private ProvisionException fail(Throwable t) {
ProvisionException e = new ProvisionException("Error scanning indexes");
e.initCause(t);
return e;
}
use of com.google.inject.ProvisionException in project gerrit by GerritCodeReview.
the class DataSourceProvider method intercept.
private DataSource intercept(String interceptor, DataSource ds) {
if (interceptor == null) {
return ds;
}
try {
Constructor<?> c = Class.forName(interceptor).getConstructor();
DataSourceInterceptor datasourceInterceptor = (DataSourceInterceptor) c.newInstance();
return datasourceInterceptor.intercept("reviewDb", ds);
} catch (ClassNotFoundException | SecurityException | NoSuchMethodException | IllegalArgumentException | InstantiationException | IllegalAccessException | InvocationTargetException e) {
throw new ProvisionException("Cannot intercept datasource", e);
}
}
use of com.google.inject.ProvisionException in project cdap by caskdata.
the class HBaseCheck method run.
@Override
public void run() {
LOG.info("Checking HBase version.");
HBaseTableUtil hBaseTableUtil;
try {
hBaseTableUtil = new HBaseTableUtilFactory(cConf).get();
} catch (ProvisionException e) {
throw new RuntimeException("Unsupported Hbase version " + HBaseVersion.getVersionString());
}
LOG.info(" HBase version successfully verified.");
LOG.info("Checking HBase availability.");
try (HConnection hbaseConnection = HConnectionManager.createConnection(hConf)) {
hbaseConnection.listTables();
LOG.info(" HBase availability successfully verified.");
} catch (IOException e) {
throw new RuntimeException("Unable to connect to HBase. " + "Please check that HBase is running and that the correct HBase configuration (hbase-site.xml) " + "and libraries are included in the CDAP master classpath.", e);
}
if (hConf.getBoolean("hbase.security.authorization", false)) {
if (cConf.getBoolean(TxConstants.TransactionPruning.PRUNE_ENABLE)) {
LOG.info("HBase authorization and transaction pruning are enabled. Checking global admin privileges for cdap.");
try {
boolean isGlobalAdmin = hBaseTableUtil.isGlobalAdmin(hConf);
LOG.info("Global admin privileges check status: {}", isGlobalAdmin);
if (isGlobalAdmin) {
return;
}
// if global admin was false then depend on the TX_PRUNE_ACL_CHECK value
if (cConf.getBoolean(Constants.Startup.TX_PRUNE_ACL_CHECK, false)) {
LOG.info("Found {} to be set to true. Continuing with cdap master startup even though global admin check " + "returned false", Constants.Startup.TX_PRUNE_ACL_CHECK);
return;
}
StringBuilder builder = new StringBuilder("Transaction pruning is enabled and cdap does not have global " + "admin privileges in HBase. Global admin privileges for cdap " + "are required for transaction pruning. " + "Either disable transaction pruning or grant global admin " + "privilege to cdap in HBase or can override this " + "check by setting ");
builder.append(Constants.Startup.TX_PRUNE_ACL_CHECK);
builder.append(" in cdap-site.xml.");
if (HBaseVersion.get().equals(HBaseVersion.Version.HBASE_96) || HBaseVersion.get().equals(HBaseVersion.Version.HBASE_98)) {
builder.append(" Detected HBase version ");
builder.append(HBaseVersion.get());
builder.append(" CDAP will not be able determine if it has global admin privilege in HBase.");
builder.append(" After granting global admin privilege please set ");
builder.append(Constants.Startup.TX_PRUNE_ACL_CHECK);
}
throw new RuntimeException(builder.toString());
} catch (IOException e) {
throw new RuntimeException("Unable to determines cdap privileges as global admin in HBase.");
}
}
}
LOG.info("Hbase authorization is disabled. Skipping global admin check for transaction pruning.");
}
use of com.google.inject.ProvisionException in project cdap by caskdata.
the class HBaseVersionSpecificFactory method get.
@Override
public T get() {
T instance = null;
boolean useLatestVersionForUnsupported = Constants.HBase.HBASE_AUTO_LATEST_VERSION.equals(cConf.get(Constants.HBase.HBASE_VERSION_RESOLUTION_STRATEGY));
try {
HBaseVersion.Version hbaseVersion = HBaseVersion.get();
switch(hbaseVersion) {
case HBASE_94:
throw new ProvisionException("HBase 0.94 is no longer supported. Please upgrade to HBase 0.96 or newer.");
case HBASE_96:
instance = createInstance(getHBase96Classname());
break;
case HBASE_98:
instance = createInstance(getHBase98Classname());
break;
case HBASE_10:
instance = createInstance(getHBase10Classname());
break;
case HBASE_10_CDH:
instance = createInstance(getHBase10CDHClassname());
break;
case HBASE_10_CDH55:
case HBASE_10_CDH56:
instance = createInstance(getHBase10CHD550ClassName());
break;
case HBASE_11:
instance = createInstance(getHBase11Classname());
break;
case HBASE_12_CDH57:
instance = createInstance(getHBase12CHD570ClassName());
break;
case UNKNOWN_CDH:
if (useLatestVersionForUnsupported) {
instance = createInstance(getLatestHBaseCDHClassName());
LOG.info("CDH HBase version '{}' is unsupported. Continuing with latest CDH HBase version '{}'.", hbaseVersion.getMajorVersion(), getLatestHBaseCDHClassName());
break;
} else {
throw new ProvisionException("Unknown HBase version: " + HBaseVersion.getVersionString());
}
case UNKNOWN:
if (useLatestVersionForUnsupported) {
instance = createInstance(getLatestHBaseClassName());
LOG.info("HBase version '{}' is unsupported. Continuing with latest HBase version '{}'.", hbaseVersion.getMajorVersion(), getLatestHBaseClassName());
break;
} else {
throw new ProvisionException("Unknown HBase version: " + HBaseVersion.getVersionString());
}
}
} catch (ClassNotFoundException cnfe) {
throw new ProvisionException(cnfe.getMessage(), cnfe);
}
return instance;
}
use of com.google.inject.ProvisionException in project cdap by caskdata.
the class MapReduceRuntimeService method buildJobJar.
/**
* Creates a jar that contains everything that are needed for running the MapReduce program by Hadoop.
*
* @return a new {@link File} containing the job jar
*/
private File buildJobJar(Job job, File tempDir) throws IOException, URISyntaxException {
File jobJar = new File(tempDir, "job.jar");
LOG.debug("Creating Job jar: {}", jobJar);
// For local mode, nothing is needed in the job jar since we use the classloader in the configuration object.
if (MapReduceTaskContextProvider.isLocal(job.getConfiguration())) {
JarOutputStream output = new JarOutputStream(new FileOutputStream(jobJar));
output.close();
return jobJar;
}
// Excludes libraries that are for sure not needed.
// Hadoop - Available from the cluster
// Spark - MR never uses Spark
final HadoopClassExcluder hadoopClassExcluder = new HadoopClassExcluder();
ApplicationBundler appBundler = new ApplicationBundler(new ClassAcceptor() {
@Override
public boolean accept(String className, URL classUrl, URL classPathUrl) {
if (className.startsWith("org.apache.spark") || classPathUrl.toString().contains("spark-assembly")) {
return false;
}
return hadoopClassExcluder.accept(className, classUrl, classPathUrl);
}
});
Set<Class<?>> classes = Sets.newHashSet();
classes.add(MapReduce.class);
classes.add(MapperWrapper.class);
classes.add(ReducerWrapper.class);
classes.add(SLF4JBridgeHandler.class);
// take over the classloading.
if (cConf.getBoolean(Constants.AppFabric.MAPREDUCE_INCLUDE_CUSTOM_CLASSES)) {
try {
Class<? extends InputFormat<?, ?>> inputFormatClass = job.getInputFormatClass();
classes.add(inputFormatClass);
// If it is StreamInputFormat, also add the StreamEventCodec class as well.
if (MapReduceStreamInputFormat.class.isAssignableFrom(inputFormatClass)) {
Class<? extends StreamEventDecoder> decoderType = MapReduceStreamInputFormat.getDecoderClass(job.getConfiguration());
if (decoderType != null) {
classes.add(decoderType);
}
}
} catch (Throwable t) {
LOG.debug("InputFormat class not found: {}", t.getMessage(), t);
// Ignore
}
try {
Class<? extends OutputFormat<?, ?>> outputFormatClass = job.getOutputFormatClass();
classes.add(outputFormatClass);
} catch (Throwable t) {
LOG.debug("OutputFormat class not found: {}", t.getMessage(), t);
// Ignore
}
}
// Add KMS class
if (SecureStoreUtils.isKMSBacked(cConf) && SecureStoreUtils.isKMSCapable()) {
classes.add(SecureStoreUtils.getKMSSecureStore());
}
Class<? extends HBaseDDLExecutor> ddlExecutorClass = new HBaseDDLExecutorFactory(cConf, hConf).get().getClass();
try {
Class<?> hbaseTableUtilClass = HBaseTableUtilFactory.getHBaseTableUtilClass(cConf);
classes.add(hbaseTableUtilClass);
classes.add(ddlExecutorClass);
} catch (ProvisionException e) {
LOG.warn("Not including HBaseTableUtil classes in submitted Job Jar since they are not available");
}
ClassLoader oldCLassLoader = ClassLoaders.setContextClassLoader(new CombineClassLoader(getClass().getClassLoader(), Collections.singleton(ddlExecutorClass.getClassLoader())));
try {
appBundler.createBundle(Locations.toLocation(jobJar), classes);
} finally {
ClassLoaders.setContextClassLoader(oldCLassLoader);
}
LOG.debug("Built MapReduce Job Jar at {}", jobJar.toURI());
return jobJar;
}
Aggregations