use of org.voltdb.compiler.VoltCompiler.VoltCompilerException in project voltdb by VoltDB.
the class DDLCompiler method addIndexToCatalog.
private static void addIndexToCatalog(Database db, Table table, VoltXMLElement node, Map<String, String> indexReplacementMap, HashMap<String, Index> indexMap, HashMap<String, Column> columnMap, VoltCompiler compiler) throws VoltCompilerException {
assert node.name.equals("index");
String name = node.attributes.get("name");
boolean unique = Boolean.parseBoolean(node.attributes.get("unique"));
boolean assumeUnique = Boolean.parseBoolean(node.attributes.get("assumeunique"));
AbstractParsedStmt dummy = new ParsedSelectStmt(null, db);
dummy.setDDLIndexedTable(table);
StringBuffer msg = new StringBuffer(String.format("Index \"%s\" ", name));
// "parse" the expression trees for an expression-based index (vs. a simple column value index)
List<AbstractExpression> exprs = null;
// "parse" the WHERE expression for partial index if any
AbstractExpression predicate = null;
// Some expressions have special validation in indices. Not all the expression
// can be indexed. We scan for result type at first here and block those which
// can't be indexed like boolean, geo ... We gather rest of expression into
// checkExpressions list. We will check on them all at once.
List<AbstractExpression> checkExpressions = new ArrayList<>();
for (VoltXMLElement subNode : node.children) {
if (subNode.name.equals("exprs")) {
exprs = new ArrayList<>();
for (VoltXMLElement exprNode : subNode.children) {
AbstractExpression expr = dummy.parseExpressionTree(exprNode);
expr.resolveForTable(table);
expr.finalizeValueTypes();
// string will be populated with an expression's details when
// its value type is not indexable
StringBuffer exprMsg = new StringBuffer();
if (!expr.isValueTypeIndexable(exprMsg)) {
// indexing on expression with boolean result is not supported.
throw compiler.new VoltCompilerException("Cannot create index \"" + name + "\" because it contains " + exprMsg + ", which is not supported.");
}
if ((unique || assumeUnique) && !expr.isValueTypeUniqueIndexable(exprMsg)) {
// indexing on expression with boolean result is not supported.
throw compiler.new VoltCompilerException("Cannot create unique index \"" + name + "\" because it contains " + exprMsg + ", which is not supported.");
}
// rest of the validity guards will be evaluated after collecting all the expressions.
checkExpressions.add(expr);
exprs.add(expr);
}
} else if (subNode.name.equals("predicate")) {
assert (subNode.children.size() == 1);
VoltXMLElement predicateXML = subNode.children.get(0);
assert (predicateXML != null);
predicate = buildPartialIndexPredicate(dummy, name, predicateXML, table, compiler);
}
}
// Check all the subexpressions we gathered up.
if (!AbstractExpression.validateExprsForIndexesAndMVs(checkExpressions, msg)) {
// The error message will be in the StringBuffer msg.
throw compiler.new VoltCompilerException(msg.toString());
}
String colList = node.attributes.get("columns");
String[] colNames = colList.split(",");
Column[] columns = new Column[colNames.length];
boolean has_nonint_col = false;
boolean has_geo_col = false;
String nonint_col_name = null;
for (int i = 0; i < colNames.length; i++) {
columns[i] = columnMap.get(colNames[i]);
if (columns[i] == null) {
return;
}
}
UnsafeOperatorsForDDL unsafeOps = new UnsafeOperatorsForDDL();
if (exprs == null) {
for (int i = 0; i < colNames.length; i++) {
VoltType colType = VoltType.get((byte) columns[i].getType());
if (!colType.isIndexable()) {
String emsg = "Cannot create index \"" + name + "\" because " + colType.getName() + " values are not currently supported as index keys: \"" + colNames[i] + "\"";
throw compiler.new VoltCompilerException(emsg);
}
if ((unique || assumeUnique) && !colType.isUniqueIndexable()) {
String emsg = "Cannot create index \"" + name + "\" because " + colType.getName() + " values are not currently supported as unique index keys: \"" + colNames[i] + "\"";
throw compiler.new VoltCompilerException(emsg);
}
if (!colType.isBackendIntegerType()) {
has_nonint_col = true;
nonint_col_name = colNames[i];
has_geo_col = colType.equals(VoltType.GEOGRAPHY);
if (has_geo_col && colNames.length > 1) {
String emsg = "Cannot create index \"" + name + "\" because " + colType.getName() + " values must be the only component of an index key: \"" + nonint_col_name + "\"";
throw compiler.new VoltCompilerException(emsg);
}
}
}
} else {
for (AbstractExpression expression : exprs) {
VoltType colType = expression.getValueType();
if (!colType.isIndexable()) {
String emsg = "Cannot create index \"" + name + "\" because " + colType.getName() + " valued expressions are not currently supported as index keys.";
throw compiler.new VoltCompilerException(emsg);
}
if ((unique || assumeUnique) && !colType.isUniqueIndexable()) {
String emsg = "Cannot create index \"" + name + "\" because " + colType.getName() + " valued expressions are not currently supported as unique index keys.";
throw compiler.new VoltCompilerException(emsg);
}
if (!colType.isBackendIntegerType()) {
has_nonint_col = true;
nonint_col_name = "<expression>";
has_geo_col = colType.equals(VoltType.GEOGRAPHY);
if (has_geo_col) {
if (exprs.size() > 1) {
String emsg = "Cannot create index \"" + name + "\" because " + colType.getName() + " values must be the only component of an index key.";
throw compiler.new VoltCompilerException(emsg);
} else if (!(expression instanceof TupleValueExpression)) {
String emsg = "Cannot create index \"" + name + "\" because " + colType.getName() + " expressions must be simple column expressions.";
throw compiler.new VoltCompilerException(emsg);
}
}
}
expression.findUnsafeOperatorsForDDL(unsafeOps);
}
}
Index index = table.getIndexes().add(name);
index.setCountable(false);
index.setIssafewithnonemptysources(!unsafeOps.isUnsafe());
// Set the index type. It will be one of:
// - Covering cell index (geo index for CONTAINS predicates)
// - HASH index (set in HSQL because "hash" is in the name of the
// constraint or the index
// - TREE index, which is the default
boolean isHashIndex = node.attributes.get("ishashindex").equals("true");
if (has_geo_col) {
index.setType(IndexType.COVERING_CELL_INDEX.getValue());
} else if (isHashIndex) {
// warn user that hash index will be deprecated
compiler.addWarn("Hash indexes are deprecated. In a future release, VoltDB will only support tree indexes, even if the index name contains the string \"hash\"");
// make the index a hash.
if (has_nonint_col) {
String emsg = "Index " + name + " in table " + table.getTypeName() + " uses a non-hashable column " + nonint_col_name;
throw compiler.new VoltCompilerException(emsg);
}
index.setType(IndexType.HASH_TABLE.getValue());
} else {
index.setType(IndexType.BALANCED_TREE.getValue());
index.setCountable(true);
}
// but they still represent the columns that will trigger an index update when their values change.
for (int i = 0; i < columns.length; i++) {
ColumnRef cref = index.getColumns().add(columns[i].getTypeName());
cref.setColumn(columns[i]);
cref.setIndex(i);
}
if (exprs != null) {
try {
index.setExpressionsjson(convertToJSONArray(exprs));
} catch (JSONException e) {
throw compiler.new VoltCompilerException("Unexpected error serializing non-column expressions for index '" + name + "' on type '" + table.getTypeName() + "': " + e.toString());
}
}
index.setUnique(unique);
if (assumeUnique) {
index.setUnique(true);
}
index.setAssumeunique(assumeUnique);
if (predicate != null) {
try {
index.setPredicatejson(convertToJSONObject(predicate));
} catch (JSONException e) {
throw compiler.new VoltCompilerException("Unexpected error serializing predicate for partial index '" + name + "' on type '" + table.getTypeName() + "': " + e.toString());
}
}
// will make two indexes different
for (Index existingIndex : table.getIndexes()) {
// skip thineself
if (existingIndex == index) {
continue;
}
if (indexesAreDups(existingIndex, index)) {
// replace any constraints using one index with the other
//for () TODO
// get ready for replacements from constraints created later
indexReplacementMap.put(index.getTypeName(), existingIndex.getTypeName());
// if the index is a user-named index...
if (index.getTypeName().startsWith(HSQLInterface.AUTO_GEN_PREFIX) == false) {
// on dup-detection, add a warning but don't fail
String emsg = String.format("Dropping index %s on table %s because it duplicates index %s.", index.getTypeName(), table.getTypeName(), existingIndex.getTypeName());
compiler.addWarn(emsg);
}
// drop the index and GTFO
table.getIndexes().delete(index.getTypeName());
return;
}
}
String smsg = "Created index: " + name + " on table: " + table.getTypeName() + " of type: " + IndexType.get(index.getType()).name();
compiler.addInfo(smsg);
indexMap.put(name, index);
}
use of org.voltdb.compiler.VoltCompiler.VoltCompilerException in project voltdb by VoltDB.
the class DDLCompiler method parseCreateProcedureClauses.
/**
* Parse and validate the substring containing ALLOW and PARTITION
* clauses for CREATE PROCEDURE.
* @param clauses the substring to parse
* @param descriptor procedure descriptor populated with role names from ALLOW clause
* @return parsed and validated partition data or null if there was no PARTITION clause
* @throws VoltCompilerException
*/
private CreateProcedurePartitionData parseCreateProcedureClauses(ProcedureDescriptor descriptor, String clauses) throws VoltCompilerException {
// There's also no roles to add.
if (clauses == null || clauses.isEmpty()) {
return null;
}
CreateProcedurePartitionData data = null;
Matcher matcher = SQLParser.matchAnyCreateProcedureStatementClause(clauses);
int start = 0;
while (matcher.find(start)) {
start = matcher.end();
if (matcher.group(1) != null) {
// Add roles if it's an ALLOW clause. More that one ALLOW clause is okay.
for (String roleName : StringUtils.split(matcher.group(1), ',')) {
// Don't put the same role in the list more than once.
String roleNameFixed = roleName.trim().toLowerCase();
if (!descriptor.m_authGroups.contains(roleNameFixed)) {
descriptor.m_authGroups.add(roleNameFixed);
}
}
} else {
// Add partition info if it's a PARTITION clause. Only one is allowed.
if (data != null) {
throw m_compiler.new VoltCompilerException("Only one PARTITION clause is allowed for CREATE PROCEDURE.");
}
data = new CreateProcedurePartitionData();
data.tableName = matcher.group(2);
data.columnName = matcher.group(3);
data.parameterNo = matcher.group(4);
}
}
return data;
}
use of org.voltdb.compiler.VoltCompiler.VoltCompilerException in project voltdb by VoltDB.
the class DDLCompiler method getNextStatement.
private DDLStatement getNextStatement(Reader reader, VoltCompiler compiler) throws VoltCompiler.VoltCompilerException {
int state = kStateInvalid;
char[] nchar = new char[1];
@SuppressWarnings("synthetic-access") DDLStatement retval = new DDLStatement();
retval.lineNo = m_currLineNo;
try {
// or return null if there is no next statement to be found
do {
if (reader.read(nchar) == -1) {
return null;
}
// trim leading whitespace outside of a statement
if (nchar[0] == '\n') {
m_currLineNo++;
} else if (nchar[0] == '\r') {
} else if (nchar[0] == ' ') {
} else // trim leading comments outside of a statement
if (nchar[0] == '-') {
// found, read until the next newline.
if (reader.read(nchar) == -1) {
// garbage at the end of a file but easy to tolerable?
return null;
}
if (nchar[0] != '-') {
String msg = "Invalid content before or between DDL statements.";
throw compiler.new VoltCompilerException(msg, m_currLineNo);
} else {
do {
if (reader.read(nchar) == -1) {
// a comment extending to EOF means no statement
return null;
}
} while (nchar[0] != '\n');
// process the newline and loop
m_currLineNo++;
}
} else // not whitespace or comment: start of a statement.
{
retval.statement += nchar[0];
state = kStateReading;
// Set the line number to the start of the real statement.
retval.lineNo = m_currLineNo;
break;
}
} while (true);
while (state != kStateCompleteStatement) {
if (reader.read(nchar) == -1) {
String msg = "Schema file ended mid-statement (no semicolon found).";
throw compiler.new VoltCompilerException(msg, retval.lineNo);
}
if (state == kStateReading) {
state = readingState(nchar, retval);
} else if (state == kStateReadingCommentDelim) {
state = readingCommentDelimState(nchar, retval);
} else if (state == kStateReadingComment) {
state = readingCommentState(nchar, retval);
} else if (state == kStateReadingStringLiteral) {
state = readingStringLiteralState(nchar, retval);
} else if (state == kStateReadingStringLiteralSpecialChar) {
state = readingStringLiteralSpecialChar(nchar, retval);
} else if (state == kStateReadingCodeBlockDelim) {
state = readingCodeBlockStateDelim(nchar, retval);
} else if (state == kStateReadingCodeBlockNextDelim) {
state = readingCodeBlockStateNextDelim(nchar, retval);
} else if (state == kStateReadingCodeBlock) {
state = readingCodeBlock(nchar, retval);
} else if (state == kStateReadingEndCodeBlockDelim) {
state = readingEndCodeBlockStateDelim(nchar, retval);
} else if (state == kStateReadingEndCodeBlockNextDelim) {
state = readingEndCodeBlockStateNextDelim(nchar, retval);
} else {
throw compiler.new VoltCompilerException("Unrecoverable error parsing DDL.");
}
}
return retval;
} catch (IOException e) {
throw compiler.new VoltCompilerException("Unable to read from file");
}
}
use of org.voltdb.compiler.VoltCompiler.VoltCompilerException in project voltdb by VoltDB.
the class DDLCompiler method validateTupleLimitDeleteStmt.
/** Makes sure that the DELETE statement on a LIMIT PARTITION ROWS EXECUTE (DELETE ...)
* - Contains no parse errors
* - Is actually a DELETE statement
* - Targets the table being constrained
* Throws VoltCompilerException if any of these does not hold
* @param catStmt The catalog statement whose sql text field is the DELETE to be validated
**/
private void validateTupleLimitDeleteStmt(Statement catStmt) throws VoltCompilerException {
String tableName = catStmt.getParent().getTypeName();
String msgPrefix = "Error: Table " + tableName + " has invalid DELETE statement for LIMIT PARTITION ROWS constraint: ";
VoltXMLElement deleteXml = null;
try {
// We parse the statement here and cache the XML below if the statement passes
// validation.
deleteXml = m_hsql.getXMLCompiledStatement(catStmt.getSqltext());
} catch (HSQLInterface.HSQLParseException e) {
throw m_compiler.new VoltCompilerException(msgPrefix + "parse error: " + e.getMessage());
}
if (!deleteXml.name.equals("delete")) {
// Could in theory allow TRUNCATE TABLE here too.
throw m_compiler.new VoltCompilerException(msgPrefix + "not a DELETE statement");
}
String deleteTarget = deleteXml.attributes.get("table");
if (!deleteTarget.equals(tableName)) {
throw m_compiler.new VoltCompilerException(msgPrefix + "target of DELETE must be " + tableName);
}
m_limitDeleteStmtToXml.put(catStmt, deleteXml);
}
use of org.voltdb.compiler.VoltCompiler.VoltCompilerException in project voltdb by VoltDB.
the class DDLCompiler method processVoltDBStatements.
private void processVoltDBStatements(final Database db, final DdlProceduresToLoad whichProcs, DDLStatement stmt) throws VoltCompilerException {
boolean processed = false;
try {
// Process a VoltDB-specific DDL statement, like PARTITION, REPLICATE,
// CREATE PROCEDURE, CREATE FUNCTION, and CREATE ROLE.
processed = m_voltStatementProcessor.process(stmt, db, whichProcs);
} catch (VoltCompilerException e) {
// Reformat the message thrown by VoltDB DDL processing to have a line number.
String msg = "VoltDB DDL Error: \"" + e.getMessage() + "\" in statement starting on lineno: " + stmt.lineNo;
throw m_compiler.new VoltCompilerException(msg);
}
if (!processed) {
try {
//* enable to debug */ System.out.println("DEBUG: " + stmt.statement);
// kind of ugly. We hex-encode each statement so we can
// avoid embedded newlines so we can delimit statements
// with newline.
m_fullDDL += Encoder.hexEncode(stmt.statement) + "\n";
// figure out what table this DDL might affect to minimize diff processing
HSQLDDLInfo ddlStmtInfo = HSQLLexer.preprocessHSQLDDL(stmt.statement);
// Get the diff that results from applying this statement and apply it
// to our local tree (with Volt-specific additions)
VoltXMLDiff thisStmtDiff = m_hsql.runDDLCommandAndDiff(ddlStmtInfo, stmt.statement);
// null diff means no change (usually drop if exists for non-existent thing)
if (thisStmtDiff != null) {
applyDiff(thisStmtDiff);
}
// special treatment for stream syntax
if (ddlStmtInfo.creatStream) {
processCreateStreamStatement(stmt, db, whichProcs);
}
} catch (HSQLParseException e) {
String msg = "DDL Error: \"" + e.getMessage() + "\" in statement starting on lineno: " + stmt.lineNo;
throw m_compiler.new VoltCompilerException(msg, stmt.lineNo);
}
}
}
Aggregations