use of org.voltdb.catalog.Catalog in project voltdb by VoltDB.
the class RegressionSuite method tearDown.
/**
* JUnit special method called to shutdown the test. This instance will
* stop the VoltDB server using the VoltServerConfig instance provided.
*/
@Override
public void tearDown() throws Exception {
if (m_completeShutdown) {
m_config.shutDown();
} else {
Catalog currentCataog = getCurrentCatalog();
if (currentCataog != null) {
CatalogDiffEngine diff = new CatalogDiffEngine(m_config.getInitialCatalog(), currentCataog);
// We will ignore this case.
if (diff.commands().split("\n").length > 1) {
fail("Catalog changed in test " + getName() + " while the regression suite optimization is on: \n" + diff.getDescriptionOfChanges(false));
}
}
Client client = getClient();
VoltTable tableList = client.callProcedure("@SystemCatalog", "TABLES").getResults()[0];
ArrayList<String> tableNames = new ArrayList<>(tableList.getRowCount());
int tableNameColIdx = tableList.getColumnIndex("TABLE_NAME");
int tableTypeColIdx = tableList.getColumnIndex("TABLE_TYPE");
while (tableList.advanceRow()) {
String tableType = tableList.getString(tableTypeColIdx);
if (!tableType.equalsIgnoreCase("EXPORT")) {
tableNames.add(tableList.getString(tableNameColIdx));
}
}
for (String tableName : tableNames) {
try {
client.callProcedure("@AdHoc", "DELETE FROM " + tableName);
} catch (ProcCallException pce) {
if (!pce.getMessage().contains("Illegal to modify a materialized view.")) {
fail("Hit an exception when cleaning up tables between tests: " + pce.getMessage());
}
}
}
client.drain();
}
for (final Client c : m_clients) {
c.close();
}
synchronized (m_clientChannels) {
for (final SocketChannel sc : m_clientChannels) {
try {
ConnectionUtil.closeConnection(sc);
} catch (final IOException e) {
e.printStackTrace();
}
}
m_clientChannels.clear();
}
m_clients.clear();
}
use of org.voltdb.catalog.Catalog in project voltdb by VoltDB.
the class VoltCompiler method compileInternal.
/**
* Internal method for compiling with and without a project.xml file or DDL files.
*
* @param projectReader Reader for project file or null if a project file is not used.
* @param ddlFilePaths The list of DDL files to compile (when no project is provided).
* @param jarOutputRet The in-memory jar to populate or null if the caller doesn't provide one.
* @return The InMemoryJarfile containing the compiled catalog if
* successful, null if not. If the caller provided an InMemoryJarfile, the
* return value will be the same object, not a copy.
*/
private InMemoryJarfile compileInternal(final VoltCompilerReader cannonicalDDLIfAny, final Catalog previousCatalogIfAny, final List<VoltCompilerReader> ddlReaderList, final InMemoryJarfile jarOutputRet) {
// Expect to have either >1 ddl file or a project file.
assert (ddlReaderList.size() > 0);
// Make a temporary local output jar if one wasn't provided.
final InMemoryJarfile jarOutput = (jarOutputRet != null ? jarOutputRet : new InMemoryJarfile());
if (ddlReaderList == null || ddlReaderList.isEmpty()) {
addErr("One or more DDL files are required.");
return null;
}
// clear out the warnings and errors
m_warnings.clear();
m_infos.clear();
m_errors.clear();
// do all the work to get the catalog
final Catalog catalog = compileCatalogInternal(cannonicalDDLIfAny, previousCatalogIfAny, ddlReaderList, jarOutput);
if (catalog == null) {
return null;
}
Cluster cluster = catalog.getClusters().get("cluster");
assert (cluster != null);
Database database = cluster.getDatabases().get("database");
assert (database != null);
// Build DDL from Catalog Data
String ddlWithBatchSupport = CatalogSchemaTools.toSchema(catalog, m_importLines);
m_canonicalDDL = CatalogSchemaTools.toSchemaWithoutInlineBatches(ddlWithBatchSupport);
// generate the catalog report and write it to disk
try {
generateCatalogReport(ddlWithBatchSupport);
} catch (IOException e) {
e.printStackTrace();
return null;
}
jarOutput.put(AUTOGEN_DDL_FILE_NAME, m_canonicalDDL.getBytes(Constants.UTF8ENCODING));
if (DEBUG_VERIFY_CATALOG) {
debugVerifyCatalog(jarOutput, catalog);
}
// WRITE CATALOG TO JAR HERE
final String catalogCommands = catalog.serialize();
byte[] catalogBytes = catalogCommands.getBytes(Constants.UTF8ENCODING);
try {
// Note when upgrading the version has already been updated by the caller.
if (!jarOutput.containsKey(CatalogUtil.CATALOG_BUILDINFO_FILENAME)) {
addBuildInfo(jarOutput);
}
jarOutput.put(CatalogUtil.CATALOG_FILENAME, catalogBytes);
// put the compiler report into the jarfile
jarOutput.put("catalog-report.html", m_report.getBytes(Constants.UTF8ENCODING));
} catch (final Exception e) {
e.printStackTrace();
return null;
}
assert (!hasErrors());
if (hasErrors()) {
return null;
}
return jarOutput;
}
use of org.voltdb.catalog.Catalog in project voltdb by VoltDB.
the class VoltCompiler method loadSchema.
/**
* Simplified interface for loading a ddl file with full support for VoltDB
* extensions (partitioning, procedures, export), but no support for "project file" input.
* This is, at least initially, only a back door to create a fully functional catalog for
* the purposes of planner unit testing.
* @param hsql an interface to the hsql frontend, initialized and potentially reused by the caller.
* @param whichProcs indicates which ddl-defined procedures to load: none, single-statement, or all
* @param ddlFilePaths schema file paths
* @throws VoltCompilerException
*/
public Catalog loadSchema(HSQLInterface hsql, DdlProceduresToLoad whichProcs, String... ddlFilePaths) throws VoltCompilerException {
//
m_catalog = new Catalog();
m_catalog.execute("add / clusters cluster");
Database db = initCatalogDatabase(m_catalog);
List<VoltCompilerReader> ddlReaderList = DDLPathsToReaderList(ddlFilePaths);
final VoltDDLElementTracker voltDdlTracker = new VoltDDLElementTracker(this);
InMemoryJarfile jarOutput = new InMemoryJarfile();
compileDatabase(db, hsql, voltDdlTracker, null, null, ddlReaderList, null, whichProcs, jarOutput);
return m_catalog;
}
use of org.voltdb.catalog.Catalog in project voltdb by VoltDB.
the class VoltCompiler method compileCatalogInternal.
/**
* Internal method for compiling the catalog.
*
* @param database catalog-related info parsed from a project file
* @param ddlReaderList Reader objects for ddl files.
* @param jarOutput The in-memory jar to populate or null if the caller doesn't provide one.
* @return true if successful
*/
private Catalog compileCatalogInternal(final VoltCompilerReader cannonicalDDLIfAny, final Catalog previousCatalogIfAny, final List<VoltCompilerReader> ddlReaderList, final InMemoryJarfile jarOutput) {
m_catalog = new Catalog();
// Initialize the catalog for one cluster
m_catalog.execute("add / clusters cluster");
m_catalog.getClusters().get("cluster").setSecurityenabled(false);
// shutdown and make a new hsqldb
try {
Database previousDBIfAny = null;
if (previousCatalogIfAny != null) {
previousDBIfAny = previousCatalogIfAny.getClusters().get("cluster").getDatabases().get("database");
}
compileDatabaseNode(cannonicalDDLIfAny, previousDBIfAny, ddlReaderList, jarOutput);
} catch (final VoltCompilerException e) {
return null;
}
assert (m_catalog != null);
// add epoch info to catalog
final int epoch = (int) (TransactionIdManager.getEpoch() / 1000);
m_catalog.getClusters().get("cluster").setLocalepoch(epoch);
return m_catalog;
}
use of org.voltdb.catalog.Catalog in project voltdb by VoltDB.
the class VoltCompiler method debugVerifyCatalog.
/**
* Internal method that takes the generated DDL from the catalog and builds a new catalog.
* The generated catalog is diffed with the original catalog to verify compilation and
* catalog generation consistency.
*/
private void debugVerifyCatalog(InMemoryJarfile origJarFile, Catalog origCatalog) {
final VoltCompiler autoGenCompiler = new VoltCompiler(m_isXDCR);
// Make the new compiler use the original jarfile's classloader so it can
// pull in the class files for procedures and imports
autoGenCompiler.m_classLoader = origJarFile.getLoader();
List<VoltCompilerReader> autogenReaderList = new ArrayList<>(1);
autogenReaderList.add(new VoltCompilerJarFileReader(origJarFile, AUTOGEN_DDL_FILE_NAME));
InMemoryJarfile autoGenJarOutput = new InMemoryJarfile();
autoGenCompiler.m_currentFilename = AUTOGEN_DDL_FILE_NAME;
// This call is purposely replicated in retryFailedCatalogRebuildUnderDebug,
// where it provides an opportunity to set a breakpoint on a do-over when this
// mainline call produces a flawed catalog that fails the catalog diff.
// Keep the two calls in synch to allow debugging under the same exact conditions.
Catalog autoGenCatalog = autoGenCompiler.compileCatalogInternal(null, null, autogenReaderList, autoGenJarOutput);
if (autoGenCatalog == null) {
Log.info("Did not verify catalog because it could not be compiled.");
return;
}
FilteredCatalogDiffEngine diffEng = new FilteredCatalogDiffEngine(origCatalog, autoGenCatalog, false);
String diffCmds = diffEng.commands();
if (diffCmds != null && !diffCmds.equals("")) {
// that is only triggered in hopeless cases.
if (RETRY_FAILED_CATALOG_REBUILD_UNDER_DEBUG) {
autoGenCatalog = replayFailedCatalogRebuildUnderDebug(autoGenCompiler, autogenReaderList, autoGenJarOutput);
}
// Re-run a failed diff more verbosely as a pre-crash test diagnostic.
diffEng = new FilteredCatalogDiffEngine(origCatalog, autoGenCatalog, true);
diffCmds = diffEng.commands();
String crashAdvice = "Catalog Verification from Generated DDL failed! " + "VoltDB dev: Consider" + (RETRY_FAILED_CATALOG_REBUILD_UNDER_DEBUG ? "" : " setting VoltCompiler.RETRY_FAILED_CATALOG_REBUILD_UNDER_DEBUG = true and") + " setting a breakpoint in VoltCompiler.replayFailedCatalogRebuildUnderDebug" + " to debug a replay of the faulty catalog rebuild roundtrip. ";
VoltDB.crashLocalVoltDB(crashAdvice + "The offending diffcmds were: " + diffCmds);
} else {
Log.info("Catalog verification completed successfuly.");
}
}
Aggregations