use of doitincloud.commons.exceptions.ServerErrorException in project rdbcache by rdbcache.
the class DbaseOps method fetchTablesAutoIncrementColumns.
// get auto increment column per table
//
private Map<String, Object> fetchTablesAutoIncrementColumns(Context context) {
if (databaseType.equals("mysql")) {
Map<String, Object> autoIncMap = new LinkedHashMap<>();
JdbcTemplate jdbcTemplate = AppCtx.getJdbcTemplate();
fetchMysqlTableAutoIncrementColumns(context, jdbcTemplate, autoIncMap);
JdbcTemplate systemJdbcTemplate = AppCtx.getSystemJdbcTemplate();
if (jdbcTemplate != systemJdbcTemplate) {
fetchMysqlTableAutoIncrementColumns(context, systemJdbcTemplate, autoIncMap);
}
return autoIncMap;
}
if (databaseType.equals("h2")) {
Map<String, Object> autoIncMap = new LinkedHashMap<>();
JdbcTemplate jdbcTemplate = AppCtx.getJdbcTemplate();
fetchH2TableAutoIncrementColumns(context, jdbcTemplate, autoIncMap);
JdbcTemplate systemJdbcTemplate = AppCtx.getSystemJdbcTemplate();
if (jdbcTemplate != systemJdbcTemplate) {
fetchH2TableAutoIncrementColumns(context, systemJdbcTemplate, autoIncMap);
}
return autoIncMap;
}
throw new ServerErrorException("database type not supported");
}
use of doitincloud.commons.exceptions.ServerErrorException in project rdbcache by rdbcache.
the class DbaseOps method handleApplicationReadyEvent.
@EventListener
public void handleApplicationReadyEvent(ApplicationReadyEvent event) {
try {
String driverName = AppCtx.getJdbcDataSource().getConnection().getMetaData().getDriverName();
if (driverName.indexOf("MySQL") >= 0) {
databaseType = "mysql";
} else if (driverName.indexOf("H2") >= 0) {
databaseType = "h2";
} else {
throw new ServerErrorException("database drive not support");
}
} catch (Exception e) {
e.printStackTrace();
throw new ServerErrorException(e.getCause().getMessage());
}
setDefaultToDbTimeZone();
cacheAllTablesInfo();
}
use of doitincloud.commons.exceptions.ServerErrorException in project rdbcache by rdbcache.
the class RedisRepoImpl method update.
@Override
public boolean update(final Context context, final KvPairs pairs, final AnyKey anyKey) {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("update pairs(" + pairs.size() + "): " + pairs.printKey() + "anyKey(" + anyKey.size() + "): " + anyKey.printTable());
}
boolean foundAll = true;
for (int i = 0; i < pairs.size(); i++) {
KvPair pair = pairs.get(i);
if (enableDataCache) {
AppCtx.getCacheOps().updateData(pair);
}
String key = pair.getId();
String type = pair.getType();
String hashKey = hdataPrefix + "::" + type + ":" + key;
Map<String, Object> map = pair.getData();
StopWatch stopWatch = context.startStopWatch("redis", "hashOps.putAll");
try {
hashOps.putAll(hashKey, map);
if (stopWatch != null)
stopWatch.stopNow();
LOGGER.debug("update redis for " + key);
} catch (Exception e) {
if (stopWatch != null)
stopWatch.stopNow();
foundAll = false;
String msg = e.getCause().getMessage();
LOGGER.error(msg);
e.printStackTrace();
throw new ServerErrorException(context, msg);
}
}
LOGGER.debug("update returns " + foundAll);
return foundAll;
}
use of doitincloud.commons.exceptions.ServerErrorException in project rdbcache by rdbcache.
the class RedisRepoImpl method update.
@Override
public boolean update(final Context context, final KvPair pair, final KeyInfo keyInfo) {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("update: : " + pair.printKey() + " " + keyInfo.toString());
}
if (enableDataCache) {
AppCtx.getCacheOps().updateData(pair);
}
String key = pair.getId();
String type = pair.getType();
String hashKey = hdataPrefix + "::" + type + ":" + key;
Map<String, Object> map = pair.getData();
StopWatch stopWatch = context.startStopWatch("redis", "hashOps.putAll");
try {
hashOps.putAll(hashKey, map);
if (stopWatch != null)
stopWatch.stopNow();
LOGGER.debug("update redis for " + key);
} catch (Exception e) {
if (stopWatch != null)
stopWatch.stopNow();
String msg = e.getCause().getMessage();
LOGGER.error(msg);
e.printStackTrace();
throw new ServerErrorException(context, msg);
}
LOGGER.debug("update returns true");
return true;
}
use of doitincloud.commons.exceptions.ServerErrorException in project rdbcache by rdbcache.
the class RedisRepoImpl method findAndSave.
@Override
public boolean findAndSave(final Context context, final KvPair pair, final KeyInfo keyInfo) {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("findAndSave: " + pair.printKey() + " " + keyInfo.toString());
}
boolean allOk = true;
String key = pair.getId();
String type = pair.getType();
String hashKey = hdataPrefix + "::" + type + ":" + key;
Map<String, Object> map = pair.getData();
Map<String, Object> fmap = null;
if (enableDataCache) {
fmap = AppCtx.getCacheOps().getData(pair.getIdType());
if (fmap != null && fmap.size() > 0) {
LOGGER.debug("findAndSave - found from cache " + key);
}
}
StopWatch stopWatch = null;
if (fmap == null) {
try {
stopWatch = context.startStopWatch("redis", "hashOps.entries");
fmap = hashOps.entries(hashKey);
if (stopWatch != null)
stopWatch.stopNow();
if (fmap != null && fmap.size() > 0) {
LOGGER.debug("findAndSave - found from redis " + key);
}
} catch (Exception e) {
if (stopWatch != null)
stopWatch.stopNow();
String msg = e.getCause().getMessage();
LOGGER.error(msg);
e.printStackTrace();
throw new ServerErrorException(context, msg);
}
}
if (enableDataCache) {
AppCtx.getCacheOps().putData(pair, keyInfo);
}
pair.setData(fmap);
try {
stopWatch = context.startStopWatch("redis", "hashOps.putAll");
hashOps.putAll(hashKey, map);
if (stopWatch != null)
stopWatch.stopNow();
LOGGER.debug("findAndSave - save " + key);
} catch (Exception e) {
if (stopWatch != null)
stopWatch.stopNow();
if (enableDataCache) {
AppCtx.getCacheOps().removeData(pair.getIdType());
}
allOk = false;
String msg = e.getCause().getMessage();
LOGGER.error(msg);
e.printStackTrace();
}
if (allOk) {
allOk = fmap != null && fmap.size() > 0;
}
if (LOGGER.isTraceEnabled())
LOGGER.trace("findAndSave returns " + allOk);
return allOk;
}
Aggregations