use of org.smartdata.metastore.MetaStoreException in project SSM by Intel-bigdata.
the class StatesManager method getStorageUtilization.
public Utilization getStorageUtilization(String resourceName) throws IOException {
try {
long now = System.currentTimeMillis();
if (!resourceName.equals("cache")) {
long capacity = serverContext.getMetaStore().getStoreCapacityOfDifferentStorageType(resourceName);
long free = serverContext.getMetaStore().getStoreFreeOfDifferentStorageType(resourceName);
return new Utilization(now, capacity, capacity - free);
} else {
StorageCapacity storageCapacity = serverContext.getMetaStore().getStorageCapacity("cache");
return new Utilization(now, storageCapacity.getCapacity(), storageCapacity.getCapacity() - storageCapacity.getFree());
}
} catch (MetaStoreException e) {
throw new IOException(e);
}
}
use of org.smartdata.metastore.MetaStoreException in project SSM by Intel-bigdata.
the class MetaStoreUtils method dropAllTablesSqlite.
public static void dropAllTablesSqlite(Connection conn) throws MetaStoreException {
try {
Statement s = conn.createStatement();
ResultSet rs = s.executeQuery("SELECT tbl_name FROM sqlite_master;");
List<String> list = new ArrayList<>();
while (rs.next()) {
list.add(rs.getString(1));
}
for (String tb : list) {
if (!"sqlite_sequence".equals(tb)) {
s.execute("DROP TABLE IF EXISTS '" + tb + "';");
}
}
} catch (Exception e) {
throw new MetaStoreException(e);
}
}
use of org.smartdata.metastore.MetaStoreException in project SSM by Intel-bigdata.
the class MetaStoreUtils method getDBAdapter.
public static MetaStore getDBAdapter(SmartConf conf) throws MetaStoreException {
URL pathUrl = ClassLoader.getSystemResource("");
String path = pathUrl.getPath();
characterTakeUpBytes = conf.getInt(SmartConfKeys.SMART_METASTORE_CHARACTER_TAKEUP_BYTES_KEY, SmartConfKeys.SMART_METASTORE_CHARACTER_TAKEUP_BYTES_DEFAULT);
String fileName = "druid.xml";
String expectedCpPath = path + fileName;
LOG.info("Expected DB connection pool configuration path = " + expectedCpPath);
File cpConfigFile = new File(expectedCpPath);
if (cpConfigFile.exists()) {
LOG.info("Using pool configure file: " + expectedCpPath);
Properties p = new Properties();
try {
p.loadFromXML(new FileInputStream(cpConfigFile));
String url = conf.get(SmartConfKeys.SMART_METASTORE_DB_URL_KEY);
if (url != null) {
p.setProperty("url", url);
}
String purl = p.getProperty("url");
if (purl == null || purl.length() == 0) {
// For testing
purl = getDefaultSqliteDB();
p.setProperty("url", purl);
LOG.warn("Database URL not specified, using " + purl);
}
if (purl.startsWith(MetaStoreUtils.MYSQL_URL_PREFIX)) {
String dbName = getMysqlDBName(purl);
for (String name : DB_NAME_NOT_ALLOWED) {
if (dbName.equals(name)) {
throw new MetaStoreException(String.format("The database %s in mysql is for DB system use, " + "please appoint other database in druid.xml.", name));
}
}
}
try {
String pw = conf.getPasswordFromHadoop(SmartConfKeys.SMART_METASTORE_PASSWORD);
if (pw != null && pw != "") {
p.setProperty("password", pw);
}
} catch (IOException e) {
LOG.info("Can not get metastore password from hadoop provision credentials," + " use the one configured in druid.xml .");
}
for (String key : p.stringPropertyNames()) {
if (key.equals("password")) {
LOG.info("\t" + key + " = **********");
} else {
LOG.info("\t" + key + " = " + p.getProperty(key));
}
}
return new MetaStore(new DruidPool(p));
} catch (Exception e) {
if (e instanceof InvalidPropertiesFormatException) {
throw new MetaStoreException("Malformat druid.xml, please check the file.", e);
} else {
throw new MetaStoreException(e);
}
}
} else {
LOG.info("DB connection pool config file " + expectedCpPath + " NOT found.");
}
// Get Default configure from druid-template.xml
fileName = "druid-template.xml";
expectedCpPath = path + fileName;
LOG.info("Expected DB connection pool configuration path = " + expectedCpPath);
cpConfigFile = new File(expectedCpPath);
LOG.info("Using pool configure file: " + expectedCpPath);
Properties p = new Properties();
try {
p.loadFromXML(new FileInputStream(cpConfigFile));
} catch (Exception e) {
throw new MetaStoreException(e);
}
String url = conf.get(SmartConfKeys.SMART_METASTORE_DB_URL_KEY);
if (url != null) {
p.setProperty("url", url);
}
for (String key : p.stringPropertyNames()) {
LOG.info("\t" + key + " = " + p.getProperty(key));
}
return new MetaStore(new DruidPool(p));
}
use of org.smartdata.metastore.MetaStoreException in project SSM by Intel-bigdata.
the class MetaStoreUtils method getDefaultSqliteDB.
/**
* This default behavior provided here is mainly for convenience.
*
* @return
*/
private static String getDefaultSqliteDB() throws MetaStoreException {
String absFilePath = System.getProperty("user.home") + "/smart-test-default.db";
File file = new File(absFilePath);
if (file.exists()) {
return MetaStoreUtils.SQLITE_URL_PREFIX + absFilePath;
}
try {
Connection conn = MetaStoreUtils.createSqliteConnection(absFilePath);
MetaStoreUtils.initializeDataBase(conn);
conn.close();
} catch (Exception e) {
throw new MetaStoreException(e);
}
return MetaStoreUtils.SQLITE_URL_PREFIX + absFilePath;
}
use of org.smartdata.metastore.MetaStoreException in project SSM by Intel-bigdata.
the class MetaStoreUtils method dropAllTablesMysql.
public static void dropAllTablesMysql(Connection conn, String url) throws MetaStoreException {
try {
Statement stat = conn.createStatement();
String dbName = getMysqlDBName(url);
LOG.info("Drop All tables of Current DBname: " + dbName);
ResultSet rs = stat.executeQuery("SELECT TABLE_NAME FROM " + "INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = '" + dbName + "';");
List<String> tbList = new ArrayList<>();
while (rs.next()) {
tbList.add(rs.getString(1));
}
for (String tb : tbList) {
LOG.info(tb);
stat.execute("DROP TABLE IF EXISTS " + tb + ";");
}
} catch (Exception e) {
throw new MetaStoreException(e);
}
}
Aggregations