Search in sources :

Example 1 with AbstractAlterTableDesc

use of org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc in project hive by apache.

the class AlterTableSetSkewedLocationAnalyzer method analyzeCommand.

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
    List<Node> locationNodes = command.getChildren();
    if (locationNodes == null) {
        throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg());
    }
    Map<List<String>, String> locations = new HashMap<>();
    for (Node locationNode : locationNodes) {
        List<Node> locationListNodes = ((ASTNode) locationNode).getChildren();
        if (locationListNodes == null) {
            throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg());
        }
        for (Node locationListNode : locationListNodes) {
            List<Node> locationMapNodes = ((ASTNode) locationListNode).getChildren();
            if (locationMapNodes == null) {
                throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg());
            }
            for (Node locationMapNode : locationMapNodes) {
                List<Node> locationMapNodeMaps = ((ASTNode) locationMapNode).getChildren();
                if ((locationMapNodeMaps == null) || (locationMapNodeMaps.size() != 2)) {
                    throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_MAP.getMsg());
                }
                List<String> keyList = new LinkedList<String>();
                ASTNode node = (ASTNode) locationMapNodeMaps.get(0);
                if (node.getToken().getType() == HiveParser.TOK_TABCOLVALUES) {
                    keyList = SkewedTableUtils.getSkewedValuesFromASTNode(node);
                } else if (isConstant(node)) {
                    keyList.add(PlanUtils.stripQuotes(node.getText()));
                } else {
                    throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg());
                }
                String newLocation = PlanUtils.stripQuotes(unescapeSQLString(((ASTNode) locationMapNodeMaps.get(1)).getText()));
                validateSkewedLocationString(newLocation);
                locations.put(keyList, newLocation);
                outputs.add(toWriteEntity(newLocation));
            }
        }
    }
    AbstractAlterTableDesc desc = new AlterTableSetSkewedLocationDesc(tableName, partitionSpec, locations);
    setAcidDdlDesc(getTable(tableName), desc);
    addInputsOutputsAlterTable(tableName, partitionSpec, desc, AlterTableType.SET_SKEWED_LOCATION, false);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : HashMap(java.util.HashMap) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) Node(org.apache.hadoop.hive.ql.lib.Node) LinkedList(java.util.LinkedList) AbstractAlterTableDesc(org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) List(java.util.List) LinkedList(java.util.LinkedList) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 2 with AbstractAlterTableDesc

use of org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc in project hive by apache.

the class AbstractAlterTablePropertiesAnalyzer method analyzeCommand.

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
    Map<String, String> properties = getProps((ASTNode) (command.getChild(0)).getChild(0));
    boolean updateStats = validate(tableName, properties);
    EnvironmentContext environmentContext = null;
    if (updateStats) {
        environmentContext = new EnvironmentContext();
        environmentContext.putToProperties(StatsSetupConst.STATS_GENERATED, StatsSetupConst.USER);
    }
    boolean isToTxn = AcidUtils.isTablePropertyTransactional(properties) || properties.containsKey(hive_metastoreConstants.TABLE_TRANSACTIONAL_PROPERTIES);
    boolean isExplicitStatsUpdate = updateStats && AcidUtils.isTransactionalTable(getTable(tableName, true));
    AbstractAlterTableDesc desc = createDesc(command, tableName, partitionSpec, properties, isToTxn, isExplicitStatsUpdate, environmentContext);
    addInputsOutputsAlterTable(tableName, partitionSpec, desc, desc.getType(), isToTxn);
    DDLWork ddlWork = new DDLWork(getInputs(), getOutputs(), desc);
    if (isToTxn) {
        // Hmm... why don't many other operations here need locks?
        ddlWork.setNeedLock(true);
    }
    if (isToTxn || isExplicitStatsUpdate) {
        setAcidDdlDesc(desc);
    }
    rootTasks.add(TaskFactory.get(ddlWork));
}
Also used : EnvironmentContext(org.apache.hadoop.hive.metastore.api.EnvironmentContext) AbstractAlterTableDesc(org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork)

Example 3 with AbstractAlterTableDesc

use of org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc in project hive by apache.

the class AlterTableClusterSortAnalyzer method analyzeCommand.

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
    AbstractAlterTableDesc desc;
    switch(command.getChild(0).getType()) {
        case HiveParser.TOK_NOT_CLUSTERED:
            desc = new AlterTableNotClusteredDesc(tableName, partitionSpec);
            break;
        case HiveParser.TOK_NOT_SORTED:
            desc = new AlterTableNotSortedDesc(tableName, partitionSpec);
            break;
        case HiveParser.TOK_ALTERTABLE_BUCKETS:
            ASTNode buckets = (ASTNode) command.getChild(0);
            List<String> bucketCols = getColumnNames((ASTNode) buckets.getChild(0));
            List<Order> sortCols = new ArrayList<Order>();
            int numBuckets = -1;
            if (buckets.getChildCount() == 2) {
                numBuckets = Integer.parseInt(buckets.getChild(1).getText());
            } else {
                sortCols = getColumnNamesOrder((ASTNode) buckets.getChild(1));
                numBuckets = Integer.parseInt(buckets.getChild(2).getText());
            }
            if (numBuckets <= 0) {
                throw new SemanticException(ErrorMsg.INVALID_BUCKET_NUMBER.getMsg());
            }
            desc = new AlterTableClusteredByDesc(tableName, partitionSpec, numBuckets, bucketCols, sortCols);
            break;
        default:
            throw new SemanticException("Invalid operation " + command.getChild(0).getType());
    }
    addInputsOutputsAlterTable(tableName, partitionSpec, desc, desc.getType(), false);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
    setAcidDdlDesc(getTable(tableName), desc);
}
Also used : Order(org.apache.hadoop.hive.metastore.api.Order) ArrayList(java.util.ArrayList) AbstractAlterTableDesc(org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)3 AbstractAlterTableDesc (org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc)3 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)2 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)2 ArrayList (java.util.ArrayList)1 HashMap (java.util.HashMap)1 LinkedList (java.util.LinkedList)1 List (java.util.List)1 EnvironmentContext (org.apache.hadoop.hive.metastore.api.EnvironmentContext)1 Order (org.apache.hadoop.hive.metastore.api.Order)1 Node (org.apache.hadoop.hive.ql.lib.Node)1