use of org.apache.ignite.internal.sql.command.SqlAnalyzeCommand in project ignite by apache.
the class SqlParserAnalyzeSelfTest method validate.
/**
* Validate command.
*
* @param cmd Command to validate.
* @param targets Expected targets.
*/
private static void validate(SqlAnalyzeCommand cmd, StatisticsTarget... targets) {
assertEquals(cmd.configurations().size(), targets.length);
Set<StatisticsTarget> cmdTargets = cmd.configurations().stream().map(c -> new StatisticsTarget(c.key(), c.columns().keySet().toArray(new String[0]))).collect(Collectors.toSet());
for (StatisticsTarget target : targets) assertTrue(cmdTargets.contains(target));
}
use of org.apache.ignite.internal.sql.command.SqlAnalyzeCommand in project ignite by apache.
the class QueryParser method parseNative.
/**
* Tries to parse sql query text using native parser. Only first (leading) sql command of the multi-statement is
* actually parsed.
*
* @param schemaName Schema name.
* @param qry which sql text to parse.
* @param remainingAllowed Whether multiple statements are allowed.
* @return Command or {@code null} if cannot parse this query.
*/
@SuppressWarnings("IfMayBeConditional")
@Nullable
private QueryParserResult parseNative(String schemaName, SqlFieldsQuery qry, boolean remainingAllowed) {
String sql = qry.getSql();
// Heuristic check for fast return.
if (!INTERNAL_CMD_RE.matcher(sql.trim()).find())
return null;
try {
SqlParser parser = new SqlParser(schemaName, sql);
SqlCommand nativeCmd = parser.nextCommand();
assert nativeCmd != null : "Empty query. Parser met end of data";
if (!(nativeCmd instanceof SqlCreateIndexCommand || nativeCmd instanceof SqlDropIndexCommand || nativeCmd instanceof SqlBeginTransactionCommand || nativeCmd instanceof SqlCommitTransactionCommand || nativeCmd instanceof SqlRollbackTransactionCommand || nativeCmd instanceof SqlBulkLoadCommand || nativeCmd instanceof SqlAlterTableCommand || nativeCmd instanceof SqlSetStreamingCommand || nativeCmd instanceof SqlCreateUserCommand || nativeCmd instanceof SqlAlterUserCommand || nativeCmd instanceof SqlDropUserCommand || nativeCmd instanceof SqlKillQueryCommand || nativeCmd instanceof SqlKillComputeTaskCommand || nativeCmd instanceof SqlKillServiceCommand || nativeCmd instanceof SqlKillTransactionCommand || nativeCmd instanceof SqlKillScanQueryCommand || nativeCmd instanceof SqlKillContinuousQueryCommand || nativeCmd instanceof SqlAnalyzeCommand || nativeCmd instanceof SqlRefreshStatitsicsCommand || nativeCmd instanceof SqlDropStatisticsCommand))
return null;
SqlFieldsQuery newQry = cloneFieldsQuery(qry).setSql(parser.lastCommandSql());
QueryDescriptor newPlanKey = queryDescriptor(schemaName, newQry);
SqlFieldsQuery remainingQry = null;
if (!F.isEmpty(parser.remainingSql())) {
checkRemainingAllowed(remainingAllowed);
remainingQry = cloneFieldsQuery(qry).setSql(parser.remainingSql()).setArgs(qry.getArgs());
}
QueryParserResultCommand cmd = new QueryParserResultCommand(nativeCmd, null, false);
return new QueryParserResult(newPlanKey, queryParameters(newQry), remainingQry, // Currently none of native statements supports parameters.
Collections.emptyList(), null, null, cmd);
} catch (SqlStrictParseException e) {
throw new IgniteSQLException(e.getMessage(), e.errorCode(), e);
} catch (Exception e) {
// Cannot parse, return.
if (log.isDebugEnabled())
log.debug("Failed to parse SQL with native parser [qry=" + sql + ", err=" + e + ']');
if (!IgniteSystemProperties.getBoolean(IgniteSystemProperties.IGNITE_SQL_PARSER_DISABLE_H2_FALLBACK))
return null;
int code = IgniteQueryErrorCode.PARSING;
if (e instanceof SqlParseException)
code = ((SqlParseException) e).code();
throw new IgniteSQLException("Failed to parse DDL statement: " + sql + ": " + e.getMessage(), code, e);
}
}
use of org.apache.ignite.internal.sql.command.SqlAnalyzeCommand in project ignite by apache.
the class CommandProcessor method runCommandNativeDdl.
/**
* Run DDL statement.
*
* @param sql Original SQL.
* @param cmd Command.
*/
private void runCommandNativeDdl(String sql, SqlCommand cmd) {
IgniteInternalFuture fut = null;
try {
isDdlOnSchemaSupported(cmd.schemaName());
finishActiveTxIfNecessary();
if (cmd instanceof SqlCreateIndexCommand) {
SqlCreateIndexCommand cmd0 = (SqlCreateIndexCommand) cmd;
GridH2Table tbl = schemaMgr.dataTable(cmd0.schemaName(), cmd0.tableName());
if (tbl == null)
throw new SchemaOperationException(SchemaOperationException.CODE_TABLE_NOT_FOUND, cmd0.tableName());
assert tbl.rowDescriptor() != null;
ensureDdlSupported(tbl);
QueryIndex newIdx = new QueryIndex();
newIdx.setName(cmd0.indexName());
newIdx.setIndexType(cmd0.spatial() ? QueryIndexType.GEOSPATIAL : QueryIndexType.SORTED);
LinkedHashMap<String, Boolean> flds = new LinkedHashMap<>();
// Let's replace H2's table and property names by those operated by GridQueryProcessor.
GridQueryTypeDescriptor typeDesc = tbl.rowDescriptor().type();
for (SqlIndexColumn col : cmd0.columns()) {
GridQueryProperty prop = typeDesc.property(col.name());
if (prop == null)
throw new SchemaOperationException(SchemaOperationException.CODE_COLUMN_NOT_FOUND, col.name());
flds.put(prop.name(), !col.descending());
}
newIdx.setFields(flds);
newIdx.setInlineSize(cmd0.inlineSize());
fut = ctx.query().dynamicIndexCreate(tbl.cacheName(), cmd.schemaName(), typeDesc.tableName(), newIdx, cmd0.ifNotExists(), cmd0.parallel());
} else if (cmd instanceof SqlDropIndexCommand) {
SqlDropIndexCommand cmd0 = (SqlDropIndexCommand) cmd;
GridH2Table tbl = schemaMgr.dataTableForIndex(cmd0.schemaName(), cmd0.indexName());
if (tbl != null) {
ensureDdlSupported(tbl);
fut = ctx.query().dynamicIndexDrop(tbl.cacheName(), cmd0.schemaName(), cmd0.indexName(), cmd0.ifExists());
} else {
if (cmd0.ifExists())
fut = new GridFinishedFuture();
else
throw new SchemaOperationException(SchemaOperationException.CODE_INDEX_NOT_FOUND, cmd0.indexName());
}
} else if (cmd instanceof SqlAlterTableCommand) {
SqlAlterTableCommand cmd0 = (SqlAlterTableCommand) cmd;
GridH2Table tbl = schemaMgr.dataTable(cmd0.schemaName(), cmd0.tableName());
if (tbl == null) {
throw new SchemaOperationException(SchemaOperationException.CODE_TABLE_NOT_FOUND, cmd0.tableName());
}
Boolean logging = cmd0.logging();
assert logging != null : "Only LOGGING/NOLOGGING are supported at the moment.";
IgniteCluster cluster = ctx.grid().cluster();
if (logging) {
boolean res = cluster.enableWal(tbl.cacheName());
if (!res)
throw new IgniteSQLException("Logging already enabled for table: " + cmd0.tableName());
} else {
boolean res = cluster.disableWal(tbl.cacheName());
if (!res)
throw new IgniteSQLException("Logging already disabled for table: " + cmd0.tableName());
}
fut = new GridFinishedFuture();
} else if (cmd instanceof SqlCreateUserCommand) {
SqlCreateUserCommand addCmd = (SqlCreateUserCommand) cmd;
ctx.security().createUser(addCmd.userName(), addCmd.password().toCharArray());
} else if (cmd instanceof SqlAlterUserCommand) {
SqlAlterUserCommand altCmd = (SqlAlterUserCommand) cmd;
ctx.security().alterUser(altCmd.userName(), altCmd.password().toCharArray());
} else if (cmd instanceof SqlDropUserCommand) {
SqlDropUserCommand dropCmd = (SqlDropUserCommand) cmd;
ctx.security().dropUser(dropCmd.userName());
} else if (cmd instanceof SqlAnalyzeCommand)
processAnalyzeCommand((SqlAnalyzeCommand) cmd);
else if (cmd instanceof SqlRefreshStatitsicsCommand)
processRefreshStatisticsCommand((SqlRefreshStatitsicsCommand) cmd);
else if (cmd instanceof SqlDropStatisticsCommand)
processDropStatisticsCommand((SqlDropStatisticsCommand) cmd);
else
throw new IgniteSQLException("Unsupported DDL operation: " + sql, IgniteQueryErrorCode.UNSUPPORTED_OPERATION);
if (fut != null)
fut.get();
} catch (SchemaOperationException e) {
throw convert(e);
} catch (IgniteSQLException e) {
throw e;
} catch (Exception e) {
throw new IgniteSQLException(e.getMessage(), e);
}
}
use of org.apache.ignite.internal.sql.command.SqlAnalyzeCommand in project ignite by apache.
the class CommandProcessor method processAnalyzeCommand.
/**
* Process analyze command.
*
* @param cmd Sql analyze command.
*/
private void processAnalyzeCommand(SqlAnalyzeCommand cmd) throws IgniteCheckedException {
ctx.security().authorize(SecurityPermission.CHANGE_STATISTICS);
IgniteH2Indexing indexing = (IgniteH2Indexing) ctx.query().getIndexing();
StatisticsObjectConfiguration[] objCfgs = cmd.configurations().stream().map(t -> {
if (t.key().schema() == null) {
StatisticsKey key = new StatisticsKey(cmd.schemaName(), t.key().obj());
return new StatisticsObjectConfiguration(key, t.columns().values(), t.maxPartitionObsolescencePercent());
} else
return t;
}).toArray(StatisticsObjectConfiguration[]::new);
indexing.statsManager().collectStatistics(objCfgs);
}
use of org.apache.ignite.internal.sql.command.SqlAnalyzeCommand in project ignite by apache.
the class SqlParserAnalyzeSelfTest method parseValidate.
/**
* Parse command and validate it.
*
* @param schema Schema.
* @param sql SQL text.
* @param targets Expected targets.
*/
private void parseValidate(String schema, String sql, StatisticsTarget... targets) {
SqlAnalyzeCommand cmd = (SqlAnalyzeCommand) new SqlParser(schema, sql).nextCommand();
validate(cmd, targets);
}
Aggregations