use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class CreateDatabaseAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
String databaseName = unescapeIdentifier(root.getChild(0).getText());
boolean ifNotExists = false;
String comment = null;
String locationUri = null;
String managedLocationUri = null;
String type = DatabaseType.NATIVE.name();
String connectorName = null;
Map<String, String> props = null;
for (int i = 1; i < root.getChildCount(); i++) {
ASTNode childNode = (ASTNode) root.getChild(i);
switch(childNode.getToken().getType()) {
case HiveParser.TOK_IFNOTEXISTS:
ifNotExists = true;
break;
case HiveParser.TOK_DATABASECOMMENT:
comment = unescapeSQLString(childNode.getChild(0).getText());
break;
case HiveParser.TOK_DATABASEPROPERTIES:
props = getProps((ASTNode) childNode.getChild(0));
break;
case HiveParser.TOK_DATABASELOCATION:
locationUri = unescapeSQLString(childNode.getChild(0).getText());
outputs.add(toWriteEntity(locationUri));
break;
case HiveParser.TOK_DATABASE_MANAGEDLOCATION:
managedLocationUri = unescapeSQLString(childNode.getChild(0).getText());
outputs.add(toWriteEntity(managedLocationUri));
break;
case HiveParser.TOK_DATACONNECTOR:
type = DatabaseType.REMOTE.name();
ASTNode nextNode = (ASTNode) root.getChild(i);
connectorName = ((ASTNode) nextNode).getChild(0).getText();
DataConnector connector = getDataConnector(connectorName, true);
if (connector == null) {
throw new SemanticException("Cannot retrieve connector with name: " + connectorName);
}
inputs.add(new ReadEntity(connector));
break;
default:
throw new SemanticException("Unrecognized token in CREATE DATABASE statement");
}
}
CreateDatabaseDesc desc = null;
Database database = new Database(databaseName, comment, locationUri, props);
if (type.equalsIgnoreCase(DatabaseType.NATIVE.name())) {
desc = new CreateDatabaseDesc(databaseName, comment, locationUri, managedLocationUri, ifNotExists, props);
database.setType(DatabaseType.NATIVE);
// database = new Database(databaseName, comment, locationUri, props);
if (managedLocationUri != null) {
database.setManagedLocationUri(managedLocationUri);
}
} else {
String remoteDbName = databaseName;
if (// TODO finalize the property name
props != null && props.get("connector.remoteDbName") != null)
remoteDbName = props.get("connector.remoteDbName");
desc = new CreateDatabaseDesc(databaseName, comment, locationUri, null, ifNotExists, props, type, connectorName, remoteDbName);
database.setConnector_name(connectorName);
database.setType(DatabaseType.REMOTE);
database.setRemote_dbname(remoteDbName);
}
rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class AlterDataConnectorSetPropertiesAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
String connectorName = unescapeIdentifier(root.getChild(0).getText());
Map<String, String> dbProps = null;
for (int i = 1; i < root.getChildCount(); i++) {
ASTNode childNode = (ASTNode) root.getChild(i);
if (childNode.getToken().getType() == HiveParser.TOK_DATACONNECTORPROPERTIES) {
dbProps = getProps((ASTNode) childNode.getChild(0));
break;
} else {
throw new SemanticException("Unrecognized token in ALTER CONNECTOR statement");
}
}
AlterDataConnectorSetPropertiesDesc desc = new AlterDataConnectorSetPropertiesDesc(connectorName, dbProps);
addAlterDataConnectorDesc(desc);
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class TruncateTableAnalyzer method analyzeInternal.
@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
// TOK_TABLE_PARTITION
ASTNode tableNode = (ASTNode) root.getChild(0);
String tableNameString = getUnescapedName((ASTNode) tableNode.getChild(0));
Table table = getTable(tableNameString, true);
TableName tableName = HiveTableName.of(table);
checkTruncateEligibility(root, tableNode, tableNameString, table);
Map<String, String> partitionSpec = getPartSpec((ASTNode) tableNode.getChild(1));
addTruncateTableOutputs(tableNode, table, partitionSpec);
Task<?> truncateTask = null;
ASTNode colNamesNode = (ASTNode) root.getFirstChildWithType(HiveParser.TOK_TABCOLNAME);
if (colNamesNode == null) {
truncateTask = getTruncateTaskWithoutColumnNames(tableName, partitionSpec, table);
} else {
truncateTask = getTruncateTaskWithColumnNames(tableNode, tableName, table, partitionSpec, colNamesNode);
}
rootTasks.add(truncateTask);
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class AbstractAddPartitionAnalyzer method createPartitions.
private List<AlterTableAddPartitionDesc.PartitionDesc> createPartitions(ASTNode command, Table table, boolean ifNotExists) throws SemanticException {
String currentLocation = null;
Map<String, String> currentPart = null;
List<AlterTableAddPartitionDesc.PartitionDesc> partitions = new ArrayList<>();
for (int num = ifNotExists ? 1 : 0; num < command.getChildCount(); num++) {
ASTNode child = (ASTNode) command.getChild(num);
switch(child.getToken().getType()) {
case HiveParser.TOK_PARTSPEC:
if (currentPart != null) {
partitions.add(createPartitionDesc(table, currentLocation, currentPart));
currentLocation = null;
}
currentPart = getValidatedPartSpec(table, child, conf, true);
// validate reserved values
PartitionUtils.validatePartitions(conf, currentPart);
break;
case HiveParser.TOK_PARTITIONLOCATION:
// if location specified, set in partition
if (!allowLocation()) {
throw new SemanticException("LOCATION clause illegal for view partition");
}
currentLocation = unescapeSQLString(child.getChild(0).getText());
inputs.add(toReadEntity(currentLocation));
break;
default:
throw new SemanticException("Unknown child: " + child);
}
}
if (currentPart != null) {
// add the last one
partitions.add(createPartitionDesc(table, currentLocation, currentPart));
}
return partitions;
}
use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.
the class ShowPartitionAnalyzer method getShowPartitionsFilter.
@VisibleForTesting
ExprNodeDesc getShowPartitionsFilter(Table table, ASTNode command) throws SemanticException {
ExprNodeDesc showFilter = null;
for (int childIndex = 0; childIndex < command.getChildCount(); childIndex++) {
ASTNode astChild = (ASTNode) command.getChild(childIndex);
if (astChild.getType() == HiveParser.TOK_WHERE) {
RowResolver rwsch = new RowResolver();
Map<String, String> colTypes = new HashMap<String, String>();
for (FieldSchema fs : table.getPartCols()) {
rwsch.put(table.getTableName(), fs.getName(), new ColumnInfo(fs.getName(), TypeInfoFactory.stringTypeInfo, null, true));
colTypes.put(fs.getName().toLowerCase(), fs.getType());
}
TypeCheckCtx tcCtx = new TypeCheckCtx(rwsch);
ASTNode conds = (ASTNode) astChild.getChild(0);
Map<ASTNode, ExprNodeDesc> nodeOutputs = ExprNodeTypeCheck.genExprNode(conds, tcCtx);
ExprNodeDesc target = nodeOutputs.get(conds);
if (!(target instanceof ExprNodeGenericFuncDesc) || !target.getTypeInfo().equals(TypeInfoFactory.booleanTypeInfo)) {
String errorMsg = tcCtx.getError() != null ? ". " + tcCtx.getError() : "";
throw new SemanticException("Not a filter expr: " + (target == null ? "null" : target.getExprString()) + errorMsg);
}
showFilter = replaceDefaultPartNameAndCastType(target, colTypes, HiveConf.getVar(conf, HiveConf.ConfVars.DEFAULTPARTITIONNAME));
}
}
return showFilter;
}
Aggregations