Search in sources :

Example 16 with AlterTableDesc

use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableSkewedLocation.

/**
 * Analyze alter table's skewed location
 *
 * @param ast
 * @param tableName
 * @param partSpec
 * @throws SemanticException
 */
private void analyzeAlterTableSkewedLocation(ASTNode ast, String tableName, HashMap<String, String> partSpec) throws SemanticException {
    /**
     * Throw an error if the user tries to use the DDL with
     * hive.internal.ddl.list.bucketing.enable set to false.
     */
    HiveConf hiveConf = SessionState.get().getConf();
    /**
     * Retrieve mappings from parser
     */
    Map<List<String>, String> locations = new HashMap<List<String>, String>();
    ArrayList<Node> locNodes = ast.getChildren();
    if (null == locNodes) {
        throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg());
    } else {
        for (Node locNode : locNodes) {
            // TOK_SKEWED_LOCATIONS
            ASTNode locAstNode = (ASTNode) locNode;
            ArrayList<Node> locListNodes = locAstNode.getChildren();
            if (null == locListNodes) {
                throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg());
            } else {
                for (Node locListNode : locListNodes) {
                    // TOK_SKEWED_LOCATION_LIST
                    ASTNode locListAstNode = (ASTNode) locListNode;
                    ArrayList<Node> locMapNodes = locListAstNode.getChildren();
                    if (null == locMapNodes) {
                        throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_LOC.getMsg());
                    } else {
                        for (Node locMapNode : locMapNodes) {
                            // TOK_SKEWED_LOCATION_MAP
                            ASTNode locMapAstNode = (ASTNode) locMapNode;
                            ArrayList<Node> locMapAstNodeMaps = locMapAstNode.getChildren();
                            if ((null == locMapAstNodeMaps) || (locMapAstNodeMaps.size() != 2)) {
                                throw new SemanticException(ErrorMsg.ALTER_TBL_SKEWED_LOC_NO_MAP.getMsg());
                            } else {
                                List<String> keyList = new LinkedList<String>();
                                ASTNode node = (ASTNode) locMapAstNodeMaps.get(0);
                                if (node.getToken().getType() == HiveParser.TOK_TABCOLVALUES) {
                                    keyList = getSkewedValuesFromASTNode(node);
                                } else if (isConstant(node)) {
                                    keyList.add(PlanUtils.stripQuotes(node.getText()));
                                } else {
                                    throw new SemanticException(ErrorMsg.SKEWED_TABLE_NO_COLUMN_VALUE.getMsg());
                                }
                                String newLocation = PlanUtils.stripQuotes(unescapeSQLString(((ASTNode) locMapAstNodeMaps.get(1)).getText()));
                                validateSkewedLocationString(newLocation);
                                locations.put(keyList, newLocation);
                                addLocationToOutputs(newLocation);
                            }
                        }
                    }
                }
            }
        }
    }
    AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, locations, partSpec);
    addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
}
Also used : AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) LinkedHashMap(java.util.LinkedHashMap) HashMap(java.util.HashMap) Node(org.apache.hadoop.hive.ql.lib.Node) LinkedList(java.util.LinkedList) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) HiveConf(org.apache.hadoop.hive.conf.HiveConf) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) LinkedList(java.util.LinkedList)

Example 17 with AlterTableDesc

use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableLocation.

private void analyzeAlterTableLocation(ASTNode ast, String tableName, HashMap<String, String> partSpec) throws SemanticException {
    String newLocation = unescapeSQLString(ast.getChild(0).getText());
    try {
        // To make sure host/port pair is valid, the status of the location
        // does not matter
        FileSystem.get(new URI(newLocation), conf).getFileStatus(new Path(newLocation));
    } catch (FileNotFoundException e) {
    // Only check host/port pair is valid, wheter the file exist or not does not matter
    } catch (Exception e) {
        throw new SemanticException("Cannot connect to namenode, please check if host/port pair for " + newLocation + " is valid", e);
    }
    addLocationToOutputs(newLocation);
    AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, newLocation, partSpec);
    addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
}
Also used : Path(org.apache.hadoop.fs.Path) AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) FileNotFoundException(java.io.FileNotFoundException) URI(java.net.URI) LockException(org.apache.hadoop.hive.ql.lockmgr.LockException) InvocationTargetException(java.lang.reflect.InvocationTargetException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException) URISyntaxException(java.net.URISyntaxException) FileNotFoundException(java.io.FileNotFoundException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException)

Example 18 with AlterTableDesc

use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableSerde.

private void analyzeAlterTableSerde(ASTNode ast, String tableName, HashMap<String, String> partSpec) throws SemanticException {
    String serdeName = unescapeSQLString(ast.getChild(0).getText());
    AlterTableDesc alterTblDesc = new AlterTableDesc(AlterTableTypes.ADDSERDE);
    if (ast.getChildCount() > 1) {
        HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(1)).getChild(0));
        alterTblDesc.setProps(mapProp);
    }
    alterTblDesc.setOldName(tableName);
    alterTblDesc.setSerdeName(serdeName);
    alterTblDesc.setPartSpec(partSpec);
    addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
}
Also used : AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork)

Example 19 with AlterTableDesc

use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.

the class DDLSemanticAnalyzer method handleAlterTableSkewedBy.

/**
 * Process "alter table <name> skewed by .. on .. stored as directories
 * @param ast
 * @param tableName
 * @param tab
 * @throws SemanticException
 */
private void handleAlterTableSkewedBy(ASTNode ast, String tableName, Table tab) throws SemanticException {
    List<String> skewedColNames = new ArrayList<String>();
    List<List<String>> skewedValues = new ArrayList<List<String>>();
    /* skewed column names. */
    ASTNode skewedNode = (ASTNode) ast.getChild(0);
    skewedColNames = analyzeSkewedTablDDLColNames(skewedColNames, skewedNode);
    /* skewed value. */
    analyzeDDLSkewedValues(skewedValues, skewedNode);
    // stored as directories
    boolean storedAsDirs = analyzeStoredAdDirs(skewedNode);
    AlterTableDesc alterTblDesc = new AlterTableDesc(tableName, false, skewedColNames, skewedValues);
    alterTblDesc.setStoredAsSubDirectories(storedAsDirs);
    /**
     * Validate information about skewed table
     */
    alterTblDesc.setTable(tab);
    alterTblDesc.validate();
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
}
Also used : AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork) ArrayList(java.util.ArrayList) ArrayList(java.util.ArrayList) List(java.util.List) ImmutableList(com.google.common.collect.ImmutableList) LinkedList(java.util.LinkedList)

Example 20 with AlterTableDesc

use of org.apache.hadoop.hive.ql.plan.AlterTableDesc in project hive by apache.

the class DDLSemanticAnalyzer method analyzeAlterTableSerdeProps.

private void analyzeAlterTableSerdeProps(ASTNode ast, String tableName, HashMap<String, String> partSpec) throws SemanticException {
    HashMap<String, String> mapProp = getProps((ASTNode) (ast.getChild(0)).getChild(0));
    AlterTableDesc alterTblDesc = new AlterTableDesc(AlterTableTypes.ADDSERDEPROPS);
    alterTblDesc.setProps(mapProp);
    alterTblDesc.setOldName(tableName);
    alterTblDesc.setPartSpec(partSpec);
    addInputsOutputsAlterTable(tableName, partSpec, alterTblDesc);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterTblDesc)));
}
Also used : AlterTableDesc(org.apache.hadoop.hive.ql.plan.AlterTableDesc) DDLWork(org.apache.hadoop.hive.ql.plan.DDLWork)

Aggregations

AlterTableDesc (org.apache.hadoop.hive.ql.plan.AlterTableDesc)26 DDLWork (org.apache.hadoop.hive.ql.plan.DDLWork)24 ArrayList (java.util.ArrayList)10 SQLUniqueConstraint (org.apache.hadoop.hive.metastore.api.SQLUniqueConstraint)8 SQLForeignKey (org.apache.hadoop.hive.metastore.api.SQLForeignKey)6 SQLPrimaryKey (org.apache.hadoop.hive.metastore.api.SQLPrimaryKey)6 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)6 ImmutableList (com.google.common.collect.ImmutableList)4 Serializable (java.io.Serializable)4 LinkedList (java.util.LinkedList)4 List (java.util.List)4 SQLCheckConstraint (org.apache.hadoop.hive.metastore.api.SQLCheckConstraint)4 SQLDefaultConstraint (org.apache.hadoop.hive.metastore.api.SQLDefaultConstraint)4 SQLNotNullConstraint (org.apache.hadoop.hive.metastore.api.SQLNotNullConstraint)4 Task (org.apache.hadoop.hive.ql.exec.Task)4 Table (org.apache.hadoop.hive.ql.metadata.Table)4 HashMap (java.util.HashMap)3 DefaultConstraint (org.apache.hadoop.hive.ql.metadata.DefaultConstraint)3 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)3 FileNotFoundException (java.io.FileNotFoundException)2