use of org.apache.hadoop.hive.conf.VariableSubstitution in project hive by apache.
the class DeleteResourceProcessor method run.
@Override
public CommandProcessorResponse run(String command) throws CommandProcessorException {
SessionState ss = SessionState.get();
command = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(ss.getConf(), command);
String[] tokens = command.split("\\s+");
SessionState.ResourceType t;
if (tokens.length < 1 || (t = SessionState.find_resource_type(tokens[0])) == null) {
console.printError("Usage: delete [" + StringUtils.join(SessionState.ResourceType.values(), "|") + "] <value> [<value>]*");
throw new CommandProcessorException(1);
}
CommandProcessorResponse authErrResp = CommandUtil.authorizeCommand(ss, HiveOperationType.DELETE, Arrays.asList(tokens));
if (authErrResp != null) {
// there was an authorization issue
return authErrResp;
}
if (tokens.length >= 2) {
ss.delete_resources(t, Arrays.asList(Arrays.copyOfRange(tokens, 1, tokens.length)));
} else {
ss.delete_resources(t);
}
return new CommandProcessorResponse();
}
use of org.apache.hadoop.hive.conf.VariableSubstitution in project hive by apache.
the class ColumnStatsSemanticAnalyzer method genRewrittenQuery.
private static String genRewrittenQuery(Table tbl, List<String> colNames, List<String> colTypes, HiveConf conf, Map<String, String> partSpec, boolean isPartitionStats, boolean useTableValues) throws SemanticException {
StringBuilder rewrittenQueryBuilder = new StringBuilder("select ");
StringBuilder columnNamesBuilder = new StringBuilder();
StringBuilder columnDummyValuesBuilder = new StringBuilder();
for (int i = 0; i < colNames.size(); i++) {
if (i > 0) {
rewrittenQueryBuilder.append(" , ");
columnNamesBuilder.append(" , ");
columnDummyValuesBuilder.append(" , ");
}
final String columnName = unparseIdentifier(colNames.get(i), conf);
final TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(colTypes.get(i));
genComputeStats(rewrittenQueryBuilder, conf, i, columnName, typeInfo);
columnNamesBuilder.append(unparseIdentifier(columnName, conf));
columnDummyValuesBuilder.append("cast(null as " + typeInfo.toString() + ")");
}
if (isPartitionStats) {
for (FieldSchema fs : tbl.getPartCols()) {
String identifier = unparseIdentifier(fs.getName(), conf);
rewrittenQueryBuilder.append(" , ").append(identifier);
columnNamesBuilder.append(" , ").append(identifier);
columnDummyValuesBuilder.append(" , cast(null as ").append(TypeInfoUtils.getTypeInfoFromTypeString(fs.getType()).toString()).append(")");
}
}
rewrittenQueryBuilder.append(" from ");
if (useTableValues) {
// TABLE(VALUES(cast(null as int),cast(null as string))) AS tablename(col1,col2)
rewrittenQueryBuilder.append("table(values(");
// Values
rewrittenQueryBuilder.append(columnDummyValuesBuilder.toString());
rewrittenQueryBuilder.append(")) as ");
rewrittenQueryBuilder.append(unparseIdentifier(tbl.getTableName(), conf));
rewrittenQueryBuilder.append("(");
// Columns
rewrittenQueryBuilder.append(columnNamesBuilder.toString());
rewrittenQueryBuilder.append(")");
} else {
rewrittenQueryBuilder.append(unparseIdentifier(tbl.getDbName(), conf));
rewrittenQueryBuilder.append(".");
rewrittenQueryBuilder.append(unparseIdentifier(tbl.getTableName(), conf));
}
// query
if (isPartitionStats) {
rewrittenQueryBuilder.append(genPartitionClause(tbl, partSpec, conf));
}
String rewrittenQuery = rewrittenQueryBuilder.toString();
rewrittenQuery = new VariableSubstitution(() -> SessionState.get().getHiveVariables()).substitute(conf, rewrittenQuery);
return rewrittenQuery;
}
use of org.apache.hadoop.hive.conf.VariableSubstitution in project hive by apache.
the class LlapClusterResourceProcessor method run.
@Override
public CommandProcessorResponse run(String command) throws CommandProcessorException {
SessionState ss = SessionState.get();
command = new VariableSubstitution(() -> SessionState.get().getHiveVariables()).substitute(ss.getConf(), command);
String[] tokens = command.split("\\s+");
if (tokens.length < 1) {
throw new CommandProcessorException("LLAP Cluster Processor Helper Failed: Command arguments are empty.");
}
String[] params = Arrays.copyOfRange(tokens, 1, tokens.length);
try {
return llapClusterCommandHandler(ss, params);
} catch (Exception e) {
throw new CommandProcessorException("LLAP Cluster Processor Helper Failed: " + e.getMessage());
}
}
use of org.apache.hadoop.hive.conf.VariableSubstitution in project hive by apache.
the class SetProcessor method setVariable.
public static CommandProcessorResponse setVariable(String varname, String varvalue) throws Exception {
SessionState ss = SessionState.get();
if (varvalue.contains("\n")) {
ss.err.println("Warning: Value had a \\n character in it.");
}
varname = varname.trim();
String nonErrorMessage = null;
if (varname.startsWith(ENV_PREFIX)) {
ss.err.println("env:* variables can not be set.");
// Should we propagate the error message properly?
throw new CommandProcessorException(1);
} else if (varname.startsWith(SYSTEM_PREFIX)) {
String propName = varname.substring(SYSTEM_PREFIX.length());
System.getProperties().setProperty(propName, new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(ss.getConf(), varvalue));
} else if (varname.startsWith(HIVECONF_PREFIX)) {
String propName = varname.substring(HIVECONF_PREFIX.length());
nonErrorMessage = setConf(varname, propName, varvalue, false);
} else if (varname.startsWith(HIVEVAR_PREFIX)) {
String propName = varname.substring(HIVEVAR_PREFIX.length());
ss.getHiveVariables().put(propName, new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(ss.getConf(), varvalue));
} else if (varname.startsWith(METACONF_PREFIX)) {
String propName = varname.substring(METACONF_PREFIX.length());
Hive hive = Hive.get(ss.getConf());
hive.setMetaConf(propName, new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(ss.getConf(), varvalue));
} else {
nonErrorMessage = setConf(varname, varname, varvalue, true);
if (varname.equals(HiveConf.ConfVars.HIVE_SESSION_HISTORY_ENABLED.toString())) {
SessionState.get().updateHistory(Boolean.parseBoolean(varvalue), ss);
}
}
return new CommandProcessorResponse(null, nonErrorMessage);
}
use of org.apache.hadoop.hive.conf.VariableSubstitution in project hive by apache.
the class ColumnStatsSemanticAnalyzer method genRewrittenQuery.
private String genRewrittenQuery(List<String> colNames, int numBitVectors, Map<String, String> partSpec, boolean isPartitionStats) throws SemanticException {
StringBuilder rewrittenQueryBuilder = new StringBuilder("select ");
String rewrittenQuery;
for (int i = 0; i < colNames.size(); i++) {
if (i > 0) {
rewrittenQueryBuilder.append(" , ");
}
rewrittenQueryBuilder.append("compute_stats(`");
rewrittenQueryBuilder.append(colNames.get(i));
rewrittenQueryBuilder.append("` , ");
rewrittenQueryBuilder.append(numBitVectors);
rewrittenQueryBuilder.append(" )");
}
if (isPartitionStats) {
for (FieldSchema fs : tbl.getPartCols()) {
rewrittenQueryBuilder.append(" , `" + fs.getName() + "`");
}
}
rewrittenQueryBuilder.append(" from `");
rewrittenQueryBuilder.append(tbl.getDbName());
rewrittenQueryBuilder.append("`.");
rewrittenQueryBuilder.append("`" + tbl.getTableName() + "`");
isRewritten = true;
// query
if (isPartitionStats) {
rewrittenQueryBuilder.append(genPartitionClause(partSpec));
}
rewrittenQuery = rewrittenQueryBuilder.toString();
rewrittenQuery = new VariableSubstitution(new HiveVariableSource() {
@Override
public Map<String, String> getHiveVariable() {
return SessionState.get().getHiveVariables();
}
}).substitute(conf, rewrittenQuery);
return rewrittenQuery;
}
Aggregations