Search in sources :

Example 41 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AlterTableSetLocationAnalyzer method analyzeCommand.

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
    String newLocation = unescapeSQLString(command.getChild(0).getText());
    try {
        // To make sure host/port pair is valid, the status of the location does not matter
        FileSystem.get(new URI(newLocation), conf).getFileStatus(new Path(newLocation));
    } catch (FileNotFoundException e) {
    // Only check host/port pair is valid, whether the file exist or not does not matter
    } catch (Exception e) {
        throw new SemanticException("Cannot connect to namenode, please check if host/port pair for " + newLocation + " is valid", e);
    }
    outputs.add(toWriteEntity(newLocation));
    AlterTableSetLocationDesc desc = new AlterTableSetLocationDesc(tableName, partitionSpec, newLocation);
    Table table = getTable(tableName);
    if (AcidUtils.isTransactionalTable(table)) {
        setAcidDdlDesc(desc);
    }
    addInputsOutputsAlterTable(tableName, partitionSpec, desc, AlterTableType.ALTERLOCATION, false);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : Path(org.apache.hadoop.fs.Path) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) FileNotFoundException(java.io.FileNotFoundException) URI(java.net.URI) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) FileNotFoundException(java.io.FileNotFoundException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 42 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AlterTableSkewedByAnalyzer method analyzeCommand.

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
    Table table = getTable(tableName);
    validateAlterTableType(table, AlterTableType.SKEWED_BY, false);
    inputs.add(new ReadEntity(table));
    outputs.add(new WriteEntity(table, WriteEntity.WriteType.DDL_EXCLUSIVE));
    DDLDescWithWriteId desc = null;
    if (command.getChildCount() == 0) {
        desc = new AlterTableNotSkewedDesc(tableName);
        setAcidDdlDesc(table, desc);
    } else {
        switch(((ASTNode) command.getChild(0)).getToken().getType()) {
            case HiveParser.TOK_TABLESKEWED:
                desc = handleAlterTableSkewedBy(command, tableName, table);
                setAcidDdlDesc(table, desc);
                break;
            case HiveParser.TOK_STOREDASDIRS:
                desc = handleAlterTableDisableStoredAsDirs(tableName, table);
                setAcidDdlDesc(table, desc);
                break;
            default:
                assert false;
        }
    }
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) DDLDescWithWriteId(org.apache.hadoop.hive.ql.ddl.DDLDesc.DDLDescWithWriteId) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 43 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AlterTableCompactAnalyzer method analyzeCommand.

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
    String type = unescapeSQLString(command.getChild(0).getText()).toLowerCase();
    try {
        CompactionType.valueOf(type.toUpperCase());
    } catch (IllegalArgumentException e) {
        throw new SemanticException(ErrorMsg.INVALID_COMPACTION_TYPE.getMsg());
    }
    Map<String, String> mapProp = null;
    boolean isBlocking = false;
    for (int i = 0; i < command.getChildCount(); i++) {
        switch(command.getChild(i).getType()) {
            case HiveParser.TOK_TABLEPROPERTIES:
                mapProp = getProps((ASTNode) (command.getChild(i)).getChild(0));
                break;
            case HiveParser.TOK_BLOCKING:
                isBlocking = true;
                break;
            default:
                break;
        }
    }
    AlterTableCompactDesc desc = new AlterTableCompactDesc(tableName, partitionSpec, type, isBlocking, mapProp);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 44 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AlterTableConcatenateAnalyzer method compactAcidTable.

private void compactAcidTable(TableName tableName, Map<String, String> partitionSpec) throws SemanticException {
    boolean isBlocking = !HiveConf.getBoolVar(conf, ConfVars.TRANSACTIONAL_CONCATENATE_NOBLOCK, false);
    AlterTableCompactDesc desc = new AlterTableCompactDesc(tableName, partitionSpec, "MAJOR", isBlocking, null);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
    setAcidDdlDesc(getTable(tableName), desc);
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) AlterTableCompactDesc(org.apache.hadoop.hive.ql.ddl.table.storage.compact.AlterTableCompactDesc)

Example 45 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AlterTableSetSkewedLocationAnalyzer method analyzeCommand.

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
    List<Node> locationNodes = command.getChildren();
    if (locationNodes == null) {
        throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg());
    }
    Map<List<String>, String> locations = new HashMap<>();
    for (Node locationNode : locationNodes) {
        List<Node> locationListNodes = ((ASTNode) locationNode).getChildren();
        if (locationListNodes == null) {
            throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg());
        }
        for (Node locationListNode : locationListNodes) {
            List<Node> locationMapNodes = ((ASTNode) locationListNode).getChildren();
            if (locationMapNodes == null) {
                throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg());
            }
            for (Node locationMapNode : locationMapNodes) {
                List<Node> locationMapNodeMaps = ((ASTNode) locationMapNode).getChildren();
                if ((locationMapNodeMaps == null) || (locationMapNodeMaps.size() != 2)) {
                    throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_MAP.getMsg());
                }
                List<String> keyList = new LinkedList<String>();
                ASTNode node = (ASTNode) locationMapNodeMaps.get(0);
                if (node.getToken().getType() == HiveParser.TOK_TABCOLVALUES) {
                    keyList = SkewedTableUtils.getSkewedValuesFromASTNode(node);
                } else if (isConstant(node)) {
                    keyList.add(PlanUtils.stripQuotes(node.getText()));
                } else {
                    throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg());
                }
                String newLocation = PlanUtils.stripQuotes(unescapeSQLString(((ASTNode) locationMapNodeMaps.get(1)).getText()));
                validateSkewedLocationString(newLocation);
                locations.put(keyList, newLocation);
                outputs.add(toWriteEntity(newLocation));
            }
        }
    }
    AbstractAlterTableDesc desc = new AlterTableSetSkewedLocationDesc(tableName, partitionSpec, locations);
    setAcidDdlDesc(getTable(tableName), desc);
    addInputsOutputsAlterTable(tableName, partitionSpec, desc, AlterTableType.SET_SKEWED_LOCATION, false);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : HashMap(java.util.HashMap) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) Node(org.apache.hadoop.hive.ql.lib.Node) LinkedList(java.util.LinkedList) AbstractAlterTableDesc(org.apache.hadoop.hive.ql.ddl.table.AbstractAlterTableDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) List(java.util.List) LinkedList(java.util.LinkedList) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)153 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)61 Table (org.apache.hadoop.hive.ql.metadata.Table)34 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)31 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)24 TableName (org.apache.hadoop.hive.common.TableName)23 Test (org.junit.Test)23 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)22 PrincipalDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc)21 ArrayList (java.util.ArrayList)18 Path (org.apache.hadoop.fs.Path)15 HashMap (java.util.HashMap)14 Database (org.apache.hadoop.hive.metastore.api.Database)12 Task (org.apache.hadoop.hive.ql.exec.Task)12 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Tree (org.antlr.runtime.tree.Tree)10 HashSet (java.util.HashSet)9 Context (org.apache.hadoop.hive.ql.Context)9 PrivilegeDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc)9 ShowRoleGrantDesc (org.apache.hadoop.hive.ql.ddl.privilege.show.rolegrant.ShowRoleGrantDesc)8