use of henplus.sqlmodel.Table in project henplus by neurolabs.
the class DumpCommand method execute.
/**
* execute the command given.
*/
@Override
public int execute(final SQLSession session, final String cmd, final String param) {
// final String FILE_ENCODING = System.getProperty("file.encoding");
final StringTokenizer st = new StringTokenizer(param);
final int argc = st.countTokens();
if ("dump-select".equals(cmd)) {
if (session == null) {
Logger.error("not connected.");
return EXEC_FAILED;
}
if (argc < 4) {
return SYNTAX_ERROR;
}
final String fileName = st.nextToken();
final String tabName = st.nextToken();
final String select = st.nextToken();
if (!select.toUpperCase().equals("SELECT")) {
Logger.error("'select' expected.");
return SYNTAX_ERROR;
}
final StringBuilder statement = new StringBuilder("select");
while (st.hasMoreElements()) {
statement.append(" ").append(st.nextToken());
}
PrintStream out = null;
beginInterruptableSection();
try {
out = openOutputStream(fileName, FILE_ENCODING);
final int result = dumpSelect(session, tabName, statement.toString(), out, FILE_ENCODING);
return result;
} catch (final Exception e) {
Logger.error("failed: ", e);
return EXEC_FAILED;
} finally {
if (out != null) {
out.close();
}
endInterruptableSection();
}
} else if ("dump-conditional".equals(cmd)) {
if (session == null) {
Logger.error("not connected.");
return EXEC_FAILED;
}
if (argc < 2) {
return SYNTAX_ERROR;
}
final String fileName = (String) st.nextElement();
final String tabName = (String) st.nextElement();
String whereClause = null;
if (argc >= 3) {
// till EOL
whereClause = st.nextToken("\n");
whereClause = whereClause.trim();
if (whereClause.toUpperCase().startsWith("WHERE")) {
whereClause = whereClause.substring(5);
whereClause = whereClause.trim();
}
}
PrintStream out = null;
beginInterruptableSection();
try {
out = openOutputStream(fileName, FILE_ENCODING);
final int result = dumpTable(session, tabName, whereClause, out, FILE_ENCODING);
return result;
} catch (final Exception e) {
Logger.error("failed: ", e);
e.printStackTrace();
return EXEC_FAILED;
} finally {
if (out != null) {
out.close();
}
endInterruptableSection();
}
} else if ("dump-out".equals(cmd)) {
if (session == null) {
Logger.error("not connected.");
return EXEC_FAILED;
}
if (argc < 2) {
return SYNTAX_ERROR;
}
final String fileName = (String) st.nextElement();
PrintStream out = null;
final String tabName = null;
beginInterruptableSection();
try {
final long startTime = System.currentTimeMillis();
// which tables got already
final Set<String> alreadyDumped = new HashSet<String>();
// dumped?
out = openOutputStream(fileName, FILE_ENCODING);
final Set<String> tableSet = new LinkedHashSet<String>();
/*
* right now, we do only a sort, if there is any '*' found in
* tables. Probably we might want to make this an option to
* dump-in
*/
boolean needsSort = false;
int dumpResult = SUCCESS;
/* 1) collect tables */
while (st.hasMoreElements()) {
final String nextToken = st.nextToken();
if ("*".equals(nextToken) || nextToken.indexOf('*') > -1) {
needsSort = true;
Iterator<String> iter = null;
if ("*".equals(nextToken)) {
iter = _tableCompleter.getTableNamesIteratorForSession(session);
} else if (nextToken.indexOf('*') > -1) {
final String tablePrefix = nextToken.substring(0, nextToken.length() - 1);
final SortedSet<String> tableNames = _tableCompleter.getTableNamesForSession(session);
final NameCompleter compl = new NameCompleter(tableNames);
iter = compl.getAlternatives(tablePrefix);
}
while (iter.hasNext()) {
tableSet.add(iter.next());
}
} else {
tableSet.add(nextToken);
}
}
/* 2) resolve dependencies */
ResolverResult resolverResult = null;
List<String> tableSequence;
if (needsSort) {
tableSequence = new ArrayList<String>();
Logger.info("Retrieving and sorting tables. This may take a while, please be patient.");
// get sorted tables
final SQLMetaData meta = new SQLMetaDataBuilder().getMetaData(session, tableSet.iterator());
final DependencyResolver dr = new DependencyResolver(meta.getTables());
resolverResult = dr.sortTables();
final Collection<Table> tabs = resolverResult.getTables();
final Iterator<Table> it = tabs.iterator();
while (it.hasNext()) {
tableSequence.add(it.next().getName());
}
} else {
tableSequence = new ArrayList<String>(tableSet);
}
/* 3) dump out */
if (tableSequence.size() > 1) {
Logger.info("%s tables to dump.", tableSequence.size());
}
final Iterator<String> it = tableSequence.iterator();
while (_running && it.hasNext()) {
final String table = it.next();
if (!alreadyDumped.contains(table)) {
final int result = dumpTable(session, table, null, out, FILE_ENCODING, alreadyDumped);
if (result != SUCCESS) {
dumpResult = result;
}
}
}
if (tableSequence.size() > 1) {
final long duration = System.currentTimeMillis() - startTime;
// TODO: move to Logger, timerenderer returns strings.
HenPlus.msg().print("Dumping " + tableSequence.size() + " tables took ");
TimeRenderer.printTime(duration, HenPlus.msg());
HenPlus.msg().println();
}
/* 4) warn about cycles */
if (resolverResult != null && resolverResult.getCyclicDependencies() != null && resolverResult.getCyclicDependencies().size() > 0) {
HenPlus.msg().println("-----------\n" + "NOTE: There have been cyclic dependencies between several tables detected.\n" + "These may cause trouble when dumping in the currently dumped data.");
// TODO: soll count nicht vielleicht hochgezählt werden
final int count = 0;
final StringBuilder sb = new StringBuilder();
for (Collection<Table> tables : resolverResult.getCyclicDependencies()) {
sb.append("Cycle ").append(count).append(": ");
for (Table table : tables) {
sb.append(table.getName()).append(" -> ");
}
sb.delete(sb.length() - 4, sb.length()).append('\n');
}
HenPlus.msg().print(sb.toString());
/* todo: print out, what constraint to disable */
}
return dumpResult;
} catch (final Exception e) {
HenPlus.msg().println("dump table '" + tabName + "' failed: " + e.getMessage());
e.printStackTrace();
return EXEC_FAILED;
} finally {
if (out != null) {
out.close();
}
endInterruptableSection();
}
} else if ("dump-in".equals(cmd)) {
if (session == null) {
HenPlus.msg().println("not connected. Only verify-dump possible.");
return EXEC_FAILED;
}
if (argc < 1 || argc > 2) {
return SYNTAX_ERROR;
}
final String fileName = (String) st.nextElement();
int commitPoint = -1;
if (argc == 2) {
try {
final String val = (String) st.nextElement();
commitPoint = Integer.valueOf(val).intValue();
} catch (final NumberFormatException e) {
HenPlus.msg().println("commit point number expected: " + e);
return SYNTAX_ERROR;
}
}
return retryReadDump(fileName, session, commitPoint);
} else if ("verify-dump".equals(cmd)) {
if (argc != 1) {
return SYNTAX_ERROR;
}
final String fileName = (String) st.nextElement();
return retryReadDump(fileName, null, -1);
}
return SYNTAX_ERROR;
}
use of henplus.sqlmodel.Table in project henplus by neurolabs.
the class DependencyResolver method sortTables.
/**
* @return
*
*/
public ResolverResult sortTables() {
final LinkedHashMap<String, Table> resolved = new LinkedHashMap<String, Table>();
Map<String, Table> unresolved = null;
// first run: separate tables with and without dependencies
while (_tableIter.hasNext()) {
final Table t = _tableIter.next();
if (t == null) {
continue;
}
final Set<ColumnFkInfo> fks = t.getForeignKeys();
// no dependency / foreign key?
Logger.debug("[sortTables] put %s to resolved.", t);
if (fks == null) {
resolved.put(t.getName(), t);
} else {
// dependency fulfilled?
boolean nodep = true;
final Iterator<ColumnFkInfo> iter2 = fks.iterator();
while (iter2.hasNext() && nodep) {
final ColumnFkInfo fk = iter2.next();
if (!resolved.containsKey(fk.getPkTable())) {
nodep = false;
}
}
if (nodep) {
resolved.put(t.getName(), t);
} else {
if (unresolved == null) {
unresolved = new HashMap<String, Table>();
}
unresolved.put(t.getName(), t);
}
}
}
// second run: we check remaining deps
if (unresolved != null) {
for (Table t : unresolved.values()) {
resolveDep(t, null, resolved, unresolved);
}
}
// do we need a second run?
// unresolved = cleanUnresolved( resolved, unresolved );
// add all unresolved/conflicting tables to the resulting list
final Collection<Table> result = resolved.values();
if (unresolved != null) {
for (Table table : unresolved.values()) {
if (!result.contains(table)) {
result.add(table);
}
}
}
return new ResolverResult(result, _cyclicDependencies);
}
use of henplus.sqlmodel.Table in project henplus by neurolabs.
the class SQLMetaDataBuilder method getMetaData.
public SQLMetaData getMetaData(final SQLSession session, final Iterator<String> tableNamesIter) {
final SQLMetaData result = new SQLMetaData();
ResultSet rset = null;
try {
_interrupted = false;
final String catalog = session.getConnection().getCatalog();
if (_interrupted) {
return null;
}
final DatabaseMetaData meta = session.getConnection().getMetaData();
while (tableNamesIter.hasNext() && !_interrupted) {
final String tableName = tableNamesIter.next();
rset = meta.getColumns(catalog, null, tableName, null);
final Table table = buildTable(catalog, meta, tableName, rset);
result.addTable(table);
}
} catch (final Exception e) {
if (VERBOSE) {
e.printStackTrace();
}
HenPlus.msg().println("Database problem reading meta data: " + e.getMessage().trim());
} finally {
if (rset != null) {
try {
rset.close();
} catch (final Exception e) {
}
}
}
return result;
}
use of henplus.sqlmodel.Table in project henplus by neurolabs.
the class TableDiffCommand method diffTable.
private void diffTable(final SQLSession first, final SQLSession second, final String tableName, final boolean colNameIgnoreCase) {
final Table ref = first.getTable(tableName);
final Table diff = second.getTable(tableName);
final TableDiffResult diffResult = TableDiffer.diffTables(ref, diff, colNameIgnoreCase);
if (diffResult == null) {
Logger.info("No diff for table " + tableName);
} else {
Logger.info("Diff result for table " + tableName + ":");
ResultTablePrinter.printResult(diffResult);
}
}
use of henplus.sqlmodel.Table in project henplus by neurolabs.
the class SQLMetaDataBuilder method buildTable.
private Table buildTable(final String catalog, final DatabaseMetaData meta, final String tableName, final ResultSet rset) throws SQLException {
Table table = null;
if (rset != null) {
table = new Table(tableName);
final PrimaryKey pk = getPrimaryKey(meta, tableName);
final Map<String, ColumnFkInfo> fks = getForeignKeys(meta, tableName);
// rset = meta.getColumns(catalog, null, tableName, null);
while (!_interrupted && rset.next()) {
final String colname = rset.getString(COLUMN_NAME);
final Column column = new Column(colname);
column.setType(rset.getString(TYPE_NAME));
column.setSize(rset.getInt(COLUMN_SIZE));
final boolean nullable = rset.getInt(NULLABLE) == DatabaseMetaData.columnNullable ? true : false;
column.setNullable(nullable);
final String defaultVal = rset.getString(COLUMN_DEF);
column.setDefault(defaultVal != null ? defaultVal.trim() : null);
column.setPosition(rset.getInt(ORDINAL_POSITION));
column.setPkInfo(pk.getColumnPkInfo(colname));
column.setFkInfo(fks.get(colname));
table.addColumn(column);
}
rset.close();
}
return table;
}
Aggregations