Search in sources :

Example 26 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class CreateDataConnectorAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    boolean ifNotExists = false;
    String comment = null;
    String url = null;
    String type = null;
    Map<String, String> props = null;
    String connectorName = unescapeIdentifier(root.getChild(0).getText());
    for (int i = 1; i < root.getChildCount(); i++) {
        ASTNode childNode = (ASTNode) root.getChild(i);
        switch(childNode.getToken().getType()) {
            case HiveParser.TOK_IFNOTEXISTS:
                ifNotExists = true;
                break;
            case HiveParser.TOK_DATACONNECTORCOMMENT:
                comment = unescapeSQLString(childNode.getChild(0).getText());
                break;
            case HiveParser.TOK_DATACONNECTORPROPERTIES:
                props = getProps((ASTNode) childNode.getChild(0));
                break;
            case HiveParser.TOK_DATACONNECTORURL:
                url = unescapeSQLString(childNode.getChild(0).getText());
                // outputs.add(toWriteEntity(url));
                break;
            case HiveParser.TOK_DATACONNECTORTYPE:
                type = unescapeSQLString(childNode.getChild(0).getText());
                break;
            default:
                throw new SemanticException("Unrecognized token in CREATE CONNECTOR statement");
        }
    }
    CreateDataConnectorDesc desc = null;
    DataConnector connector = new DataConnector(connectorName, type, url);
    if (comment != null)
        connector.setDescription(comment);
    if (props != null)
        connector.setParameters(props);
    desc = new CreateDataConnectorDesc(connectorName, type, url, ifNotExists, comment, props);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
    outputs.add(new WriteEntity(connector, WriteEntity.WriteType.DDL_NO_LOCK));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) DataConnector(org.apache.hadoop.hive.metastore.api.DataConnector) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 27 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class MsckAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    if (root.getChildCount() == 0) {
        throw new SemanticException("MSCK command must have arguments");
    }
    ctx.setResFile(ctx.getLocalTmpPath());
    boolean repair = root.getChild(0).getType() == HiveParser.KW_REPAIR;
    int offset = repair ? 1 : 0;
    String tableName = getUnescapedName((ASTNode) root.getChild(0 + offset));
    boolean addPartitions = true;
    boolean dropPartitions = false;
    if (root.getChildCount() > 1 + offset) {
        addPartitions = isMsckAddPartition(root.getChild(1 + offset).getType());
        dropPartitions = isMsckDropPartition(root.getChild(1 + offset).getType());
    }
    Table table = getTable(tableName);
    Map<Integer, List<ExprNodeGenericFuncDesc>> partitionSpecs = ParseUtils.getFullPartitionSpecs(root, table, conf, false);
    byte[] filterExp = null;
    if (partitionSpecs != null & !partitionSpecs.isEmpty()) {
        // expression proxy class needs to be PartitionExpressionForMetastore since we intend to use the
        // filterPartitionsByExpr of PartitionExpressionForMetastore for partition pruning down the line.
        // Bail out early if expressionProxyClass is not configured properly.
        String expressionProxyClass = conf.get(MetastoreConf.ConfVars.EXPRESSION_PROXY_CLASS.getVarname());
        if (!PartitionExpressionForMetastore.class.getCanonicalName().equals(expressionProxyClass)) {
            throw new SemanticException("Invalid expression proxy class. The config metastore.expression.proxy needs " + "to be set to org.apache.hadoop.hive.ql.optimizer.ppr.PartitionExpressionForMetastore");
        }
        // fetch the first value of partitionSpecs map since it will always have one key, value pair
        filterExp = SerializationUtilities.serializeExpressionToKryo((ExprNodeGenericFuncDesc) ((List) partitionSpecs.values().toArray()[0]).get(0));
    }
    if (repair && AcidUtils.isTransactionalTable(table)) {
        outputs.add(new WriteEntity(table, WriteType.DDL_EXCLUSIVE));
    } else {
        outputs.add(new WriteEntity(table, WriteEntity.WriteType.DDL_SHARED));
    }
    MsckDesc desc = new MsckDesc(tableName, filterExp, ctx.getResFile(), repair, addPartitions, dropPartitions);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : Table(org.apache.hadoop.hive.ql.metadata.Table) ExprNodeGenericFuncDesc(org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) List(java.util.List) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 28 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AbstractAlterDataConnectorAnalyzer method addAlterDataConnectorDesc.

protected void addAlterDataConnectorDesc(AbstractAlterDataConnectorDesc alterDesc) throws SemanticException {
    DataConnector connector = getDataConnector(alterDesc.getConnectorName());
    outputs.add(new WriteEntity(connector, WriteEntity.WriteType.DDL_NO_LOCK));
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), alterDesc)));
}
Also used : DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) DataConnector(org.apache.hadoop.hive.metastore.api.DataConnector) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 29 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class DropDataConnectorAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode root) throws SemanticException {
    String connectorName = unescapeIdentifier(root.getChild(0).getText());
    boolean ifExists = root.getFirstChildWithType(HiveParser.TOK_IFEXISTS) != null;
    DataConnector connector = getDataConnector(connectorName, !ifExists);
    if (connector == null) {
        return;
    }
    inputs.add(new ReadEntity(connector));
    outputs.add(new WriteEntity(connector, WriteEntity.WriteType.DDL_EXCLUSIVE));
    DropDataConnectorDesc desc = new DropDataConnectorDesc(connectorName, ifExists);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) DataConnector(org.apache.hadoop.hive.metastore.api.DataConnector) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Example 30 with DDLWork

use of org.apache.hadoop.hive.ql.ddl.DDLWork in project hive by apache.

the class AlterTableDropConstraintAnalyzer method analyzeCommand.

@Override
protected void analyzeCommand(TableName tableName, Map<String, String> partitionSpec, ASTNode command) throws SemanticException {
    String constraintName = unescapeIdentifier(command.getChild(0).getText());
    AlterTableDropConstraintDesc desc = new AlterTableDropConstraintDesc(tableName, null, constraintName);
    Table table = getTable(tableName);
    WriteEntity.WriteType writeType = null;
    if (AcidUtils.isTransactionalTable(table)) {
        setAcidDdlDesc(desc);
        writeType = WriteType.DDL_EXCLUSIVE;
    } else {
        writeType = WriteEntity.determineAlterTableWriteType(AlterTableType.DROP_CONSTRAINT);
    }
    inputs.add(new ReadEntity(table));
    WriteEntity alterTableOutput = new WriteEntity(table, writeType);
    outputs.add(alterTableOutput);
    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), desc)));
}
Also used : ReadEntity(org.apache.hadoop.hive.ql.hooks.ReadEntity) WriteType(org.apache.hadoop.hive.ql.hooks.WriteEntity.WriteType) Table(org.apache.hadoop.hive.ql.metadata.Table) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) WriteEntity(org.apache.hadoop.hive.ql.hooks.WriteEntity)

Aggregations

DDLWork (org.apache.hadoop.hive.ql.ddl.DDLWork)153 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)61 Table (org.apache.hadoop.hive.ql.metadata.Table)34 ReadEntity (org.apache.hadoop.hive.ql.hooks.ReadEntity)31 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)24 TableName (org.apache.hadoop.hive.common.TableName)23 Test (org.junit.Test)23 WriteEntity (org.apache.hadoop.hive.ql.hooks.WriteEntity)22 PrincipalDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc)21 ArrayList (java.util.ArrayList)18 Path (org.apache.hadoop.fs.Path)15 HashMap (java.util.HashMap)14 Database (org.apache.hadoop.hive.metastore.api.Database)12 Task (org.apache.hadoop.hive.ql.exec.Task)12 FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)11 Tree (org.antlr.runtime.tree.Tree)10 HashSet (java.util.HashSet)9 Context (org.apache.hadoop.hive.ql.Context)9 PrivilegeDesc (org.apache.hadoop.hive.ql.ddl.privilege.PrivilegeDesc)9 ShowRoleGrantDesc (org.apache.hadoop.hive.ql.ddl.privilege.show.rolegrant.ShowRoleGrantDesc)8