use of org.voltdb.CatalogContext in project voltdb by VoltDB.
the class TestAdHocPlans method setUp.
@Before
public void setUp() throws Exception {
// For planner-only testing, we shouldn't care about IV2
VoltDB.Configuration config = setUpSPDB();
byte[] bytes = MiscUtils.fileToBytes(new File(config.m_pathToCatalog));
String serializedCatalog = CatalogUtil.getSerializedCatalogStringFromJar(CatalogUtil.loadAndUpgradeCatalogFromJar(bytes, false).getFirst());
Catalog catalog = new Catalog();
catalog.execute(serializedCatalog);
DbSettings dbSettings = new DbSettings(config.asClusterSettings().asSupplier(), NodeSettings.create(config.asPathSettingsMap()));
CatalogContext context = new CatalogContext(0, 0, catalog, dbSettings, bytes, null, new byte[] {}, 0, mock(HostMessenger.class));
m_pt = new PlannerTool(context.database, context.getCatalogHash());
}
use of org.voltdb.CatalogContext in project voltdb by VoltDB.
the class ExplainProc method run.
public CompletableFuture<ClientResponse> run(String procedureNames) {
// Go to the catalog and fetch all the "explain plan" strings of the queries in the procedure.
CatalogContext context = VoltDB.instance().getCatalogContext();
/*
* TODO: We don't actually support multiple proc names in an ExplainProc call,
* so I THINK that the string is always a single procname symbol and all this
* splitting and iterating is a no-op.
*/
List<String> procNames = SQLLexer.splitStatements(procedureNames);
int size = procNames.size();
VoltTable[] vt = new VoltTable[size];
for (int i = 0; i < size; i++) {
String procName = procNames.get(i);
// look in the catalog
Procedure proc = context.procedures.get(procName);
if (proc == null) {
// check default procs and send them off to be explained using the regular
// adhoc explain process
proc = context.m_defaultProcs.checkForDefaultProcedure(procName);
if (proc != null) {
String sql = DefaultProcedureManager.sqlForDefaultProc(proc);
List<String> sqlStatements = new ArrayList<>(1);
sqlStatements.add(sql);
return runNonDDLAdHoc(context, sqlStatements, true, null, ExplainMode.EXPLAIN_DEFAULT_PROC, false, new Object[0]);
}
return makeQuickResponse(ClientResponse.GRACEFUL_FAILURE, "Procedure " + procName + " not in catalog");
}
vt[i] = new VoltTable(new VoltTable.ColumnInfo("SQL_STATEMENT", VoltType.STRING), new VoltTable.ColumnInfo("EXECUTION_PLAN", VoltType.STRING));
for (Statement stmt : proc.getStatements()) {
vt[i].addRow(stmt.getSqltext(), Encoder.hexDecodeToString(stmt.getExplainplan()));
}
}
ClientResponseImpl response = new ClientResponseImpl(ClientResponseImpl.SUCCESS, ClientResponse.UNINITIALIZED_APP_STATUS_CODE, null, vt, null);
CompletableFuture<ClientResponse> fut = new CompletableFuture<>();
fut.complete(response);
return fut;
}
use of org.voltdb.CatalogContext in project voltdb by VoltDB.
the class ExplainView method run.
public CompletableFuture<ClientResponse> run(String fullViewNames) {
CatalogContext context = VoltDB.instance().getCatalogContext();
/*
* TODO: We don't actually support multiple view names in an ExplainView call,
* so I THINK that the string is always a single view symbol and all this
* splitting and iterating is a no-op.
*/
List<String> viewNames = SQLLexer.splitStatements(fullViewNames);
int size = viewNames.size();
VoltTable[] vt = new VoltTable[size];
CatalogMap<Table> tables = context.database.getTables();
for (int i = 0; i < size; i++) {
String viewName = viewNames.get(i);
// look in the catalog
// get the view table from the catalog
Table viewTable = tables.getIgnoreCase(viewName);
if (viewTable == null) {
return makeQuickResponse(ClientResponse.GRACEFUL_FAILURE, "View " + viewName + " does not exist.");
}
vt[i] = new VoltTable(new VoltTable.ColumnInfo("TASK", VoltType.STRING), new VoltTable.ColumnInfo("EXECUTION_PLAN", VoltType.STRING));
try {
ArrayList<String[]> viewExplanation = ViewExplainer.explain(viewTable);
for (String[] row : viewExplanation) {
vt[i].addRow(row[0], row[1]);
}
} catch (Exception e) {
return makeQuickResponse(ClientResponse.GRACEFUL_FAILURE, e.getMessage());
}
}
ClientResponseImpl response = new ClientResponseImpl(ClientResponseImpl.SUCCESS, ClientResponse.UNINITIALIZED_APP_STATUS_CODE, null, vt, null);
CompletableFuture<ClientResponse> fut = new CompletableFuture<>();
fut.complete(response);
return fut;
}
use of org.voltdb.CatalogContext in project voltdb by VoltDB.
the class UpdateApplicationBase method prepareApplicationCatalogDiff.
/**
*
* @param operationBytes The bytes for the catalog operation, if any. May be null in all cases.
* For UpdateApplicationCatalog, this will contain the compiled catalog jarfile bytes
* For UpdateClasses, this will contain the class jarfile bytes
* For AdHoc DDL work, this will be null
* @param operationString The string for the catalog operation, if any. May be null in all cases.
* For UpdateApplicationCatalog, this will contain the deployment string to apply
* For UpdateClasses, this will contain the class deletion patterns
* For AdHoc DDL work, this will be null
*/
public static CatalogChangeResult prepareApplicationCatalogDiff(String invocationName, final byte[] operationBytes, final String operationString, final String[] adhocDDLStmts, final byte[] replayHashOverride, final boolean isPromotion, final DrRoleType drRole, final boolean useAdhocDDL, boolean adminConnection, String hostname, String user) throws PrepareDiffFailureException {
// create the change result and set up all the boiler plate
CatalogChangeResult retval = new CatalogChangeResult();
// ensure non-null
retval.tablesThatMustBeEmpty = new String[0];
retval.hasSchemaChange = true;
if (replayHashOverride != null) {
retval.isForReplay = true;
}
try {
// catalog change specific boiler plate
CatalogContext context = VoltDB.instance().getCatalogContext();
// Start by assuming we're doing an @UpdateApplicationCatalog. If-ladder below
// will complete with newCatalogBytes actually containing the bytes of the
// catalog to be applied, and deploymentString will contain an actual deployment string,
// or null if it still needs to be filled in.
InMemoryJarfile newCatalogJar = null;
InMemoryJarfile oldJar = context.getCatalogJar().deepCopy();
boolean updatedClass = false;
String deploymentString = operationString;
if ("@UpdateApplicationCatalog".equals(invocationName)) {
// Grab the current catalog bytes if @UAC had a null catalog from deployment-only update
if ((operationBytes == null) || (operationBytes.length == 0)) {
newCatalogJar = oldJar;
} else {
newCatalogJar = CatalogUtil.loadInMemoryJarFile(operationBytes);
}
// If the deploymentString is null, we'll fill it in with current deployment later
// Otherwise, deploymentString has the right contents, don't need to touch it
} else if ("@UpdateClasses".equals(invocationName)) {
// provided newCatalogJar is the jarfile with the new classes
if (operationBytes != null) {
newCatalogJar = new InMemoryJarfile(operationBytes);
}
try {
InMemoryJarfile modifiedJar = modifyCatalogClasses(context.catalog, oldJar, operationString, newCatalogJar, drRole == DrRoleType.XDCR);
if (modifiedJar == null) {
newCatalogJar = oldJar;
} else {
newCatalogJar = modifiedJar;
updatedClass = true;
}
} catch (ClassNotFoundException e) {
throw new PrepareDiffFailureException(ClientResponse.GRACEFUL_FAILURE, "Unexpected error in @UpdateClasses modifying classes from catalog: " + e.getMessage());
}
// Real deploymentString should be the current deployment, just set it to null
// here and let it get filled in correctly later.
deploymentString = null;
// mark it as non-schema change
retval.hasSchemaChange = false;
} else if ("@AdHoc".equals(invocationName)) {
// work.adhocDDLStmts should be applied to the current catalog
try {
newCatalogJar = addDDLToCatalog(context.catalog, oldJar, adhocDDLStmts, drRole == DrRoleType.XDCR);
} catch (VoltCompilerException vce) {
throw new PrepareDiffFailureException(ClientResponse.GRACEFUL_FAILURE, vce.getMessage());
} catch (IOException ioe) {
throw new PrepareDiffFailureException(ClientResponse.UNEXPECTED_FAILURE, ioe.getMessage());
} catch (Throwable t) {
String msg = "Unexpected condition occurred applying DDL statements: " + t.toString();
compilerLog.error(msg);
throw new PrepareDiffFailureException(ClientResponse.UNEXPECTED_FAILURE, msg);
}
assert (newCatalogJar != null);
if (newCatalogJar == null) {
// Shouldn't ever get here
String msg = "Unexpected failure in applying DDL statements to original catalog";
compilerLog.error(msg);
throw new PrepareDiffFailureException(ClientResponse.UNEXPECTED_FAILURE, msg);
}
// Real deploymentString should be the current deployment, just set it to null
// here and let it get filled in correctly later.
deploymentString = null;
} else {
// TODO: this if-chain doesn't feel like it even should exist
assert (false);
}
// get the diff between catalogs
// try to get the new catalog from the params
Pair<InMemoryJarfile, String> loadResults = null;
try {
loadResults = CatalogUtil.loadAndUpgradeCatalogFromJar(newCatalogJar, drRole == DrRoleType.XDCR);
} catch (IOException ioe) {
// falling through to the ZOMG message in the big catch
throw new PrepareDiffFailureException(ClientResponse.GRACEFUL_FAILURE, ioe.getMessage());
}
retval.catalogBytes = loadResults.getFirst().getFullJarBytes();
if (!retval.isForReplay) {
retval.catalogHash = loadResults.getFirst().getSha1Hash();
} else {
retval.catalogHash = replayHashOverride;
}
String newCatalogCommands = CatalogUtil.getSerializedCatalogStringFromJar(loadResults.getFirst());
retval.upgradedFromVersion = loadResults.getSecond();
if (newCatalogCommands == null) {
throw new PrepareDiffFailureException(ClientResponse.GRACEFUL_FAILURE, "Unable to read from catalog bytes");
}
Catalog newCatalog = new Catalog();
newCatalog.execute(newCatalogCommands);
// Retrieve the original deployment string, if necessary
if (deploymentString == null) {
// Go get the deployment string from the current catalog context
byte[] deploymentBytes = context.getDeploymentBytes();
if (deploymentBytes != null) {
deploymentString = new String(deploymentBytes, Constants.UTF8ENCODING);
}
if (deploymentBytes == null || deploymentString == null) {
throw new PrepareDiffFailureException(ClientResponse.GRACEFUL_FAILURE, "No deployment file provided and unable to recover previous deployment settings.");
}
}
DeploymentType dt = CatalogUtil.parseDeploymentFromString(deploymentString);
if (dt == null) {
throw new PrepareDiffFailureException(ClientResponse.GRACEFUL_FAILURE, "Unable to update deployment configuration: Error parsing deployment string");
}
if (isPromotion && drRole == DrRoleType.REPLICA) {
assert dt.getDr().getRole() == DrRoleType.REPLICA;
dt.getDr().setRole(DrRoleType.MASTER);
}
String result = CatalogUtil.compileDeployment(newCatalog, dt, false);
if (result != null) {
throw new PrepareDiffFailureException(ClientResponse.GRACEFUL_FAILURE, "Unable to update deployment configuration: " + result);
}
//In non legacy mode discard the path element.
if (!VoltDB.instance().isRunningWithOldVerbs()) {
dt.setPaths(null);
}
//Always get deployment after its adjusted.
retval.deploymentString = CatalogUtil.getDeployment(dt, true);
retval.deploymentHash = CatalogUtil.makeDeploymentHash(retval.deploymentString.getBytes(Constants.UTF8ENCODING));
// store the version of the catalog the diffs were created against.
// verified when / if the update procedure runs in order to verify
// catalogs only move forward
retval.expectedCatalogVersion = context.catalogVersion;
// compute the diff in StringBuilder
CatalogDiffEngine diff = new CatalogDiffEngine(context.catalog, newCatalog);
if (!diff.supported()) {
throw new PrepareDiffFailureException(ClientResponse.GRACEFUL_FAILURE, "The requested catalog change(s) are not supported:\n" + diff.errors());
}
String commands = diff.commands();
compilerLog.info(diff.getDescriptionOfChanges(updatedClass));
retval.requireCatalogDiffCmdsApplyToEE = diff.requiresCatalogDiffCmdsApplyToEE();
// since diff commands can be stupidly big, compress them here
retval.encodedDiffCommands = Encoder.compressAndBase64Encode(commands);
retval.diffCommandsLength = commands.length();
String[][] emptyTablesAndReasons = diff.tablesThatMustBeEmpty();
assert (emptyTablesAndReasons.length == 2);
assert (emptyTablesAndReasons[0].length == emptyTablesAndReasons[1].length);
retval.tablesThatMustBeEmpty = emptyTablesAndReasons[0];
retval.reasonsForEmptyTables = emptyTablesAndReasons[1];
retval.requiresSnapshotIsolation = diff.requiresSnapshotIsolation();
retval.requiresNewExportGeneration = diff.requiresNewExportGeneration();
retval.worksWithElastic = diff.worksWithElastic();
} catch (PrepareDiffFailureException e) {
throw e;
} catch (Exception e) {
String msg = "Unexpected error in adhoc or catalog update: " + e.getClass() + ", " + e.getMessage();
compilerLog.warn(msg, e);
throw new PrepareDiffFailureException(ClientResponse.UNEXPECTED_FAILURE, msg);
}
return retval;
}
use of org.voltdb.CatalogContext in project voltdb by VoltDB.
the class RegressionSuite method getCurrentCatalog.
private static Catalog getCurrentCatalog() {
CatalogContext context = VoltDB.instance().getCatalogContext();
if (context == null) {
return null;
}
InMemoryJarfile currentCatalogJar = context.getCatalogJar();
String serializedCatalogString = CatalogUtil.getSerializedCatalogStringFromJar(currentCatalogJar);
assertNotNull(serializedCatalogString);
Catalog c = new Catalog();
c.execute(serializedCatalogString);
return c;
}
Aggregations