Search in sources :

Example 96 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class CreateDatabaseAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    String databaseName = unescapeIdentifier(root.getChild(0).getText());
    boolean ifNotExists = false;
    String comment = null;
    String locationUri = null;
    String managedLocationUri = null;
    String type = DatabaseType.NATIVE.name();
    String connectorName = null;
    Map<String, String> props = null;
    for (int i = 1; i < root.getChildCount(); i++) {
        ASTNode childNode = (ASTNode) root.getChild(i);
        switch(childNode.getToken().getType()) {
            case HiveParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveParser.TOK_DATABASECOMMENT:
                comment = unescapeSQLString(childNode.getChild(0).getText());
                break;
            case HiveParser.TOK_DATABASEPROPERTIES:
                props = getProps((ASTNode) childNode.getChild(0));
                break;
            case HiveParser.TOK_DATABASELOCATION:
                locationUri = unescapeSQLString(childNode.getChild(0).getText());
                outputs.add(toWriteEntity(locationUri));
                break;
            case HiveParser.TOK_DATABASE_MANAGEDLOCATION:
                managedLocationUri = unescapeSQLString(childNode.getChild(0).getText());
                outputs.add(toWriteEntity(managedLocationUri));
                break;
            case HiveParser.TOK_DATACONNECTOR:
                type = DatabaseType.REMOTE.name();
                ASTNode nextNode = (ASTNode) root.getChild(i);
                connectorName = ((ASTNode) nextNode).getChild(0).getText();
                DataConnector connector = getDataConnector(connectorName, true);
                if (connector == null) {
                    throw new SemanticException("Cannot retrieve connector with name: " + connectorName);
                }
                inputs.add(new ReadEntity(connector));
                break;
            default:
                throw new SemanticException("Unrecognized token in CREATE DATABASE statement");
        }
    }
    CreateDatabaseDesc desc = null;
    Database database = new Database(databaseName, comment, locationUri, props);
    if (type.equalsIgnoreCase(DatabaseType.NATIVE.name())) {
        desc = new CreateDatabaseDesc(databaseName, comment, locationUri, managedLocationUri, ifNotExists, props);
        database.setType(DatabaseType.NATIVE);
        // database = new Database(databaseName, comment, locationUri, props);
        if (managedLocationUri != null) {
            database.setManagedLocationUri(managedLocationUri);
        }
    } else {
        String remoteDbName = databaseName;
        if (// TODO finalize the property name
        props != null && props.get("connector.remoteDbName") != null)
            remoteDbName = props.get("connector.remoteDbName");
        desc = new CreateDatabaseDesc(databaseName, comment, locationUri, null, ifNotExists, props, type, connectorName, remoteDbName);
        database.setConnector_name(connectorName);
        database.setType(DatabaseType.REMOTE);
        database.setRemote_dbname(remoteDbName);
    }
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
    outputs.add(new WriteEntity(database, WriteEntity.WriteType.DDL_NO_LOCK));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) Database(org.apache.hadoop.hive.metastore.api.Database) DataConnector(org.apache.hadoop.hive.metastore.api.DataConnector) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 97 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class AlterDataConnectorSetPropertiesAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    String connectorName = unescapeIdentifier(root.getChild(0).getText());
    Map<String, String> dbProps = null;
    for (int i = 1; i < root.getChildCount(); i++) {
        ASTNode childNode = (ASTNode) root.getChild(i);
        if (childNode.getToken().getType() == HiveParser.TOK_DATACONNECTORPROPERTIES) {
            dbProps = getProps((ASTNode) childNode.getChild(0));
            break;
        } else {
            throw new SemanticException("Unrecognized token in ALTER CONNECTOR statement");
        }
    }
    AlterDataConnectorSetPropertiesDesc desc = new AlterDataConnectorSetPropertiesDesc(connectorName, dbProps);
    addAlterDataConnectorDesc(desc);
}
Also used : ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 98 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class TruncateTableAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    // TOK_TABLE_PARTITION
    ASTNode tableNode = (ASTNode) root.getChild(0);
    String tableNameString = getUnescapedName((ASTNode) tableNode.getChild(0));
    Table table = getTable(tableNameString, true);
    TableName tableName = HiveTableName.of(table);
    checkTruncateEligibility(root, tableNode, tableNameString, table);
    Map<String, String> partitionSpec = getPartSpec((ASTNode) tableNode.getChild(1));
    addTruncateTableOutputs(tableNode, table, partitionSpec);
    Task<?> truncateTask = null;
    ASTNode colNamesNode = (ASTNode) root.getFirstChildWithType(HiveParser.TOK_TABCOLNAME);
    if (colNamesNode == null) {
        truncateTask = getTruncateTaskWithoutColumnNames(tableName, partitionSpec, table);
    } else {
        truncateTask = getTruncateTaskWithColumnNames(tableNode, tableName, table, partitionSpec, colNamesNode);
    }
    rootTasks.add(truncateTask);
}
Also used : TableName(org.apache.hadoop.hive.common.TableName) HiveTableName(org.apache.hadoop.hive.ql.parse.HiveTableName) Table(org.apache.hadoop.hive.ql.metadata.Table) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode)

Example 99 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class AbstractAddPartitionAnalyzer method createPartitions.

private List<AlterTableAddPartitionDesc.PartitionDesc> createPartitions(ASTNode command, Table table, boolean ifNotExists) throws SemanticException {
    String currentLocation = null;
    Map<String, String> currentPart = null;
    List<AlterTableAddPartitionDesc.PartitionDesc> partitions = new ArrayList<>();
    for (int num = ifNotExists ? 1 : 0; num < command.getChildCount(); num++) {
        ASTNode child = (ASTNode) command.getChild(num);
        switch(child.getToken().getType()) {
            case HiveParser.TOK_PARTSPEC:
                if (currentPart != null) {
                    partitions.add(createPartitionDesc(table, currentLocation, currentPart));
                    currentLocation = null;
                }
                currentPart = getValidatedPartSpec(table, child, conf, true);
                // validate reserved values
                PartitionUtils.validatePartitions(conf, currentPart);
                break;
            case HiveParser.TOK_PARTITIONLOCATION:
                // if location specified, set in partition
                if (!allowLocation()) {
                    throw new SemanticException("LOCATION clause illegal for view partition");
                }
                currentLocation = unescapeSQLString(child.getChild(0).getText());
                inputs.add(toReadEntity(currentLocation));
                break;
            default:
                throw new SemanticException("Unknown child: " + child);
        }
    }
    if (currentPart != null) {
        // add the last one
        partitions.add(createPartitionDesc(table, currentLocation, currentPart));
    }
    return partitions;
}
Also used : ArrayList(java.util.ArrayList) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 100 with ASTNode

use of org.apache.hadoop.hive.ql.parse.ASTNode in project hive by apache.

the class ShowPartitionAnalyzer method getShowPartitionsFilter.

@VisibleForTesting
ExprNodeDesc getShowPartitionsFilter(Table table, ASTNode command) throws SemanticException {
    ExprNodeDesc showFilter = null;
    for (int childIndex = 0; childIndex < command.getChildCount(); childIndex++) {
        ASTNode astChild = (ASTNode) command.getChild(childIndex);
        if (astChild.getType() == HiveParser.TOK_WHERE) {
            RowResolver rwsch = new RowResolver();
            Map<String, String> colTypes = new HashMap<String, String>();
            for (FieldSchema fs : table.getPartCols()) {
                rwsch.put(table.getTableName(), fs.getName(), new ColumnInfo(fs.getName(), TypeInfoFactory.stringTypeInfo, null, true));
                colTypes.put(fs.getName().toLowerCase(), fs.getType());
            }
            TypeCheckCtx tcCtx = new TypeCheckCtx(rwsch);
            ASTNode conds = (ASTNode) astChild.getChild(0);
            Map<ASTNode, ExprNodeDesc> nodeOutputs = ExprNodeTypeCheck.genExprNode(conds, tcCtx);
            ExprNodeDesc target = nodeOutputs.get(conds);
            if (!(target instanceof ExprNodeGenericFuncDesc) || !target.getTypeInfo().equals(TypeInfoFactory.booleanTypeInfo)) {
                String errorMsg = tcCtx.getError() != null ? ". " + tcCtx.getError() : "";
                throw new SemanticException("Not a filter expr: " + (target == null ? "null" : target.getExprString()) + errorMsg);
            }
            showFilter = replaceDefaultPartNameAndCastType(target, colTypes, HiveConf.getVar(conf, HiveConf.ConfVars.DEFAULTPARTITIONNAME));
        }
    }
    return showFilter;
}
Also used : TypeCheckCtx(org.apache.hadoop.hive.ql.parse.type.TypeCheckCtx) HashMap(java.util.HashMap) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ColumnInfo(org.apache.hadoop.hive.ql.exec.ColumnInfo) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) RowResolver(org.apache.hadoop.hive.ql.parse.RowResolver) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) ExprNodeDesc(org.apache.hadoop.hive.ql.plan.ExprNodeDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Aggregations

ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)116 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)37 DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)24 ArrayList (java.util.ArrayList)21 ExprNodeDesc (org.apache.hadoop.hive.ql.plan.ExprNodeDesc)13 HashMap (java.util.HashMap)11 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Table (org.apache.hadoop.hive.ql.metadata.Table)10 Node (org.apache.hadoop.hive.ql.lib.Node)9 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)8 TableName (org.apache.hadoop.hive.common.TableName)7 ColumnInfo (org.apache.hadoop.hive.ql.exec.ColumnInfo)7 RowResolver (org.apache.hadoop.hive.ql.parse.RowResolver)7 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)6 RelNode (org.apache.calcite.rel.RelNode)5 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)5 Context (org.apache.hadoop.hive.ql.Context)5 ParseDriver (org.apache.hadoop.hive.ql.parse.ParseDriver)5 SemanticAnalyzer (org.apache.hadoop.hive.ql.parse.SemanticAnalyzer)5 WindowingException (com.sap.hadoop.windowing.WindowingException)4