use of com.developmentontheedge.dbms.DbmsConnector in project be5 by DevelopmentOnTheEdge.
the class OracleSchemaReader method readColumns.
@Override
public Map<String, List<SqlColumnInfo>> readColumns(SqlExecutor sql, String defSchema, ProcessController controller) throws SQLException {
DbmsConnector connector = sql.getConnector();
Map<String, List<SqlColumnInfo>> result = new HashMap<>();
ResultSet rs = connector.executeQuery("SELECT " + "c.table_name," + "c.column_name," + "c.data_type," + "c.char_length," + "c.data_precision," + "c.data_scale," + "c.nullable " + "from user_tab_cols c" + " JOIN entities e ON (UPPER(e.name)=c.table_name)" + " WHERE NOT(c.column_id IS NULL) ORDER BY c.table_name,c.column_id");
try {
while (rs.next()) {
String tableName = rs.getString(1).toLowerCase();
List<SqlColumnInfo> list = result.get(tableName);
if (list == null) {
list = new ArrayList<>();
result.put(tableName, list);
}
SqlColumnInfo info = new SqlColumnInfo();
list.add(info);
info.setName(rs.getString(2));
info.setType(rs.getString(3));
info.setCanBeNull("Y".equals(rs.getString(7)));
info.setSize(rs.getInt(5));
if (rs.wasNull()) {
info.setSize(rs.getInt(4));
}
info.setPrecision(rs.getInt(6));
}
} finally {
connector.close(rs);
}
// Read default values as separate query, because it's LONG column which is streaming and
// transmitted slowly
rs = connector.executeQuery("SELECT " + "c.data_default," + "c.table_name," + "c.column_name " + "from user_tab_cols c" + " JOIN entities e ON (UPPER(e.name)=c.table_name)" + " WHERE NOT(c.column_id IS NULL) AND NOT (data_default IS NULL) ORDER BY c.table_name");
try {
while (rs.next()) {
// Read streaming column at first
String defaultValue = rs.getString(1);
String tableName = rs.getString(2);
String columnName = rs.getString(3);
SqlColumnInfo column = findColumn(result, tableName, columnName);
if (column == null)
continue;
defaultValue = defaultValue.trim();
defaultValue = DEFAULT_DATE_PATTERN.matcher(defaultValue).replaceFirst("'$1'");
if ("'auto-identity'".equals(defaultValue)) {
column.setAutoIncrement(true);
} else {
column.setDefaultValue(defaultValue);
}
}
} finally {
connector.close(rs);
}
/*rs = connector.executeQuery( "SELECT uc.SEARCH_CONDITION,uc.TABLE_NAME FROM user_constraints uc "
+ " JOIN entities e ON (UPPER(e.name)=uc.table_name)"
+ " WHERE uc.CONSTRAINT_TYPE = 'C'" );*/
// The following query works faster (much faster!) as it doesn't return "NOT NULL" constraints
// though it's probably Oracle version specific (at least undocumented)
// tested on Oracle 11r2
rs = connector.executeQuery("SELECT c.condition,o.name " + "FROM sys.cdef$ c, sys.\"_CURRENT_EDITION_OBJ\" o,entities e " + "WHERE c.type#=1 AND c.obj# = o.obj# " + "AND o.owner# = userenv('SCHEMAID') " + "AND UPPER(e.name)=o.name");
try {
while (rs.next()) {
String constr = rs.getString(1);
String table = rs.getString(2);
// ENUM VALUES
// Copied from OperationSupport.loadEntityEnums
StringTokenizer st = new StringTokenizer(constr.trim());
int nTok = st.countTokens();
if (nTok < 3) {
continue;
}
String colName = st.nextToken().toUpperCase();
String in = st.nextToken();
if (!"IN".equalsIgnoreCase(in)) {
continue;
}
SqlColumnInfo column = findColumn(result, table, colName);
if (column == null) {
continue;
}
List<String> values = new ArrayList<>();
try {
do {
String val = st.nextToken("(,')");
if (!val.trim().isEmpty()) {
values.add(val);
}
} while (st.hasMoreTokens());
} catch (NoSuchElementException ignore) {
}
if (values.size() > 0) {
column.setEnumValues(values.toArray(new String[values.size()]));
}
}
} finally {
connector.close(rs);
}
rs = connector.executeQuery("SELECT trigger_name,table_name,trigger_body FROM user_triggers " + "WHERE triggering_event='INSERT OR UPDATE' " + "AND TRIGGER_TYPE='BEFORE EACH ROW'");
try {
while (rs.next()) {
// Read streaming column as first
String triggerBody = rs.getString(3);
String tableName = rs.getString(2);
Matcher matcher = GENERATED_TRIGGER_PATTERN.matcher(triggerBody);
if (matcher.find()) {
String columnName = matcher.group(1);
String targetName = matcher.group(3);
SqlColumnInfo column = findColumn(result, tableName, columnName);
if (column == null)
continue;
column.setDefaultValue(new ColumnFunction(targetName, ColumnFunction.TRANSFORM_GENERIC).toString());
}
}
} finally {
connector.close(rs);
}
return result;
}
use of com.developmentontheedge.dbms.DbmsConnector in project be5 by DevelopmentOnTheEdge.
the class OracleSchemaReader method readIndices.
@Override
public Map<String, List<IndexInfo>> readIndices(SqlExecutor sql, String defSchema, ProcessController controller) throws SQLException, ProcessInterruptedException {
DbmsConnector connector = sql.getConnector();
Map<String, List<IndexInfo>> result = new HashMap<>();
ResultSet rs = connector.executeQuery("SELECT " + "i.table_name,i.index_name,ic.column_name,i.uniqueness " + "FROM user_indexes i " + "JOIN user_ind_columns ic ON i.index_name=ic.index_name " + "JOIN entities e ON (UPPER(e.name)=i.table_name) " + "ORDER BY i.table_name,i.index_name,ic.column_position");
try {
IndexInfo curIndex = null;
String lastTable = null;
while (rs.next()) {
String tableName = rs.getString(1).toLowerCase();
String indexName = rs.getString(2);
if (!tableName.equals(lastTable) || curIndex == null || !curIndex.getName().equals(indexName)) {
List<IndexInfo> list = result.get(tableName);
if (list == null) {
list = new ArrayList<>();
result.put(tableName, list);
}
curIndex = new IndexInfo();
lastTable = tableName;
list.add(curIndex);
curIndex.setName(indexName);
curIndex.setUnique("UNIQUE".equals(rs.getString(4)));
}
String column = rs.getString(3);
curIndex.addColumn(column);
}
} finally {
connector.close(rs);
}
// Read functional indices separately
// as this query contains streaming column which is read slowly
rs = connector.executeQuery("SELECT " + "i.table_name,i.index_name,ic.column_position,c.data_default " + "FROM user_indexes i " + "JOIN user_ind_columns ic ON i.index_name=ic.index_name " + "JOIN entities e ON (UPPER(e.name)=i.table_name) " + "JOIN user_tab_cols c ON (c.column_name=ic.column_name AND c.table_name=ic.table_name) " + "WHERE c.virtual_column='YES' " + "ORDER BY i.table_name,i.index_name,ic.column_position");
try {
while (rs.next()) {
// Read streaming column at first
String defaultValue = rs.getString(4);
String tableName = rs.getString(1).toLowerCase();
String indexName = rs.getString(2);
int pos = rs.getInt(3) - 1;
List<IndexInfo> list = result.get(tableName);
if (list == null)
continue;
for (IndexInfo indexInfo : list) {
if (indexInfo.getName().equals(indexName)) {
defaultValue = GENERIC_REF_INDEX_PATTERN.matcher(defaultValue).replaceFirst("generic($2)");
defaultValue = UPPER_INDEX_PATTERN.matcher(defaultValue).replaceFirst("upper($1)");
defaultValue = LOWER_INDEX_PATTERN.matcher(defaultValue).replaceFirst("lower($1)");
indexInfo.getColumns().set(pos, defaultValue);
}
}
}
} finally {
connector.close(rs);
}
return result;
}
use of com.developmentontheedge.dbms.DbmsConnector in project be5 by DevelopmentOnTheEdge.
the class PostgresSchemaReader method readIndices.
@Override
public Map<String, List<IndexInfo>> readIndices(SqlExecutor sql, String defSchema, ProcessController controller) throws SQLException, ProcessInterruptedException {
DbmsConnector connector = sql.getConnector();
Map<String, List<IndexInfo>> result = new HashMap<>();
ResultSet rs = connector.executeQuery("SELECT ct.relname AS TABLE_NAME, i.indisunique AS IS_UNIQUE, ci.relname AS INDEX_NAME, " + "pg_catalog.pg_get_indexdef(ci.oid, (i.keys).n, false) AS COLUMN_NAME, (i.keys).n AS ORDINAL " + "FROM pg_catalog.pg_class ct " + "JOIN pg_catalog.pg_namespace n " + "ON (ct.relnamespace = n.oid)" + "JOIN (" + "SELECT i.indexrelid, i.indrelid, i.indisunique, " + "information_schema._pg_expandarray(i.indkey) AS keys FROM pg_catalog.pg_index i " + ") i " + "ON (ct.oid = i.indrelid) " + "JOIN pg_catalog.pg_class ci ON (ci.oid = i.indexrelid) " + (defSchema == null ? "" : "AND n.nspname = '" + defSchema + "' ") + " ORDER BY 1,3,5");
try {
IndexInfo curIndex = null;
String lastTable = null;
while (rs.next()) {
String tableName = rs.getString(1);
String indexName = rs.getString(3);
if (indexName == null)
continue;
if (!tableName.equals(lastTable) || curIndex == null || !curIndex.getName().equals(indexName)) {
List<IndexInfo> list = result.get(tableName);
if (list == null) {
list = new ArrayList<>();
result.put(tableName, list);
}
curIndex = new IndexInfo();
lastTable = tableName;
list.add(curIndex);
curIndex.setName(indexName);
curIndex.setUnique(rs.getBoolean(2));
}
String column = rs.getString(4);
column = GENERIC_REF_INDEX_PATTERN.matcher(column).replaceFirst("generic($2)");
column = UPPER_INDEX_PATTERN.matcher(column).replaceFirst("upper($1)");
column = LOWER_INDEX_PATTERN.matcher(column).replaceFirst("lower($1)");
column = QUOTE_INDEX_PATTERN.matcher(column).replaceFirst("$1");
curIndex.addColumn(column);
controller.setProgress(0);
}
} finally {
connector.close(rs);
}
return result;
}
use of com.developmentontheedge.dbms.DbmsConnector in project be5 by DevelopmentOnTheEdge.
the class PostgresSchemaReader method readColumns.
@Override
public Map<String, List<SqlColumnInfo>> readColumns(SqlExecutor sql, String defSchema, ProcessController controller) throws SQLException, ProcessInterruptedException {
DbmsConnector connector = sql.getConnector();
Map<String, List<SqlColumnInfo>> result = new HashMap<>();
ResultSet rs = connector.executeQuery("SELECT " + "c.table_name, " + "c.column_name, " + "c.column_default, " + "c.udt_name, " + "c.character_maximum_length, " + "c.numeric_precision, " + "c.numeric_scale, " + "c.is_nullable, " + "ch.check_clause " + "FROM information_schema.columns c " + "LEFT JOIN information_schema.constraint_column_usage cc ON (cc.table_name=c.table_name AND cc.table_schema=c.table_schema AND cc.column_name=c.column_name) " + "LEFT JOIN information_schema.table_constraints tc ON (cc.constraint_name = tc.constraint_name) " + "LEFT JOIN information_schema.check_constraints ch ON (cc.constraint_name = ch.constraint_name) " + "WHERE (ch.check_clause IS NULL OR tc.constraint_type = 'CHECK') " + (defSchema == null ? "" : "AND c.table_schema='" + defSchema + "' ") + "ORDER BY c.table_name,c.ordinal_position");
try {
while (rs.next()) {
String tableName = rs.getString(1);
if (!tableName.equals(tableName.toLowerCase()))
continue;
List<SqlColumnInfo> list = result.get(tableName);
if (list == null) {
list = new ArrayList<>();
result.put(tableName, list);
}
SqlColumnInfo info = new SqlColumnInfo();
list.add(info);
info.setName(rs.getString(2));
info.setType(rs.getString(4));
info.setCanBeNull("YES".equals(rs.getString(8)));
String defaultValue = rs.getString(3);
if (defaultValue != null) {
for (String suffix : SUFFICES) {
if (defaultValue.endsWith(suffix))
defaultValue = defaultValue.substring(0, defaultValue.length() - suffix.length());
}
defaultValue = DEFAULT_DATE_PATTERN.matcher(defaultValue).replaceFirst("'$1'");
}
info.setDefaultValue(defaultValue);
info.setSize(rs.getInt(5));
if (rs.wasNull()) {
info.setSize(rs.getInt(6));
}
info.setPrecision(rs.getInt(7));
info.setAutoIncrement(defaultValue != null && defaultValue.startsWith("nextval"));
String check = rs.getString(9);
if (check == null)
continue;
Matcher matcher = ENUM_VALUES_PATTERN.matcher(check);
List<String> vals = new ArrayList<>();
while (matcher.find()) {
vals.add(matcher.group(1));
}
info.setEnumValues(vals.toArray(new String[vals.size()]));
// Just to check for interrupts
controller.setProgress(0);
}
} finally {
connector.close(rs);
}
return result;
}
use of com.developmentontheedge.dbms.DbmsConnector in project be5 by DevelopmentOnTheEdge.
the class SqlServerSchemaReader method readTableNames.
@Override
public Map<String, String> readTableNames(SqlExecutor sql, String defSchema, ProcessController controller) throws SQLException {
DbmsConnector connector = sql.getConnector();
Map<String, String> result = new HashMap<>();
ResultSet rs = connector.executeQuery("SELECT table_name,table_type FROM information_schema.tables t WHERE table_schema='" + defSchema + "' AND table_type IN ('BASE TABLE','VIEW')");
try {
while (rs.next()) {
String tableName = rs.getString(1).toLowerCase();
String type = rs.getString(2);
if ("BASE TABLE".equals(type)) {
type = "TABLE";
}
result.put(tableName, type);
}
} finally {
connector.close(rs);
}
return result;
}
Aggregations