Search in sources :

Example 1 with StorageFormat

use of org.apache.hadoop.hive.ql.parse.StorageFormat in project hive by apache.

the class CreateTableHook method preAnalyze.

@Override
public ASTNode preAnalyze(HiveSemanticAnalyzerHookContext context, ASTNode ast) throws SemanticException {
    Hive db;
    try {
        db = context.getHive();
    } catch (HiveException e) {
        throw new SemanticException("Couldn't get Hive DB instance in semantic analysis phase.", e);
    }
    // Analyze and create tbl properties object
    int numCh = ast.getChildCount();
    tableName = BaseSemanticAnalyzer.getUnescapedName((ASTNode) ast.getChild(0));
    boolean likeTable = false;
    StorageFormat format = new StorageFormat(context.getConf());
    for (int num = 1; num < numCh; num++) {
        ASTNode child = (ASTNode) ast.getChild(num);
        if (format.fillStorageFormat(child)) {
            if (org.apache.commons.lang.StringUtils.isNotEmpty(format.getStorageHandler())) {
                return ast;
            }
            continue;
        }
        switch(child.getToken().getType()) {
            case // CTAS
            HiveParser.TOK_QUERY:
                throw new SemanticException("Operation not supported. Create table as " + "Select is not a valid operation.");
            case HiveParser.TOK_ALTERTABLE_BUCKETS:
                break;
            case HiveParser.TOK_LIKETABLE:
                likeTable = true;
                break;
            case HiveParser.TOK_IFNOTEXISTS:
                try {
                    List<String> tables = db.getTablesByPattern(tableName);
                    if (tables != null && tables.size() > 0) {
                        // exists
                        return ast;
                    }
                } catch (HiveException e) {
                    throw new SemanticException(e);
                }
                break;
            case HiveParser.TOK_TABLEPARTCOLS:
                List<FieldSchema> partCols = BaseSemanticAnalyzer.getColumns(child, false);
                for (FieldSchema fs : partCols) {
                    if (!fs.getType().equalsIgnoreCase("string")) {
                        throw new SemanticException("Operation not supported. HCatalog only " + "supports partition columns of type string. " + "For column: " + fs.getName() + " Found type: " + fs.getType());
                    }
                }
                break;
        }
    }
    if (!likeTable && (format.getInputFormat() == null || format.getOutputFormat() == null)) {
        throw new SemanticException("STORED AS specification is either incomplete or incorrect.");
    }
    return ast;
}
Also used : Hive(org.apache.hadoop.hive.ql.metadata.Hive) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) FieldSchema(org.apache.hadoop.hive.metastore.api.FieldSchema) ASTNode(org.apache.hadoop.hive.ql.parse.ASTNode) StorageFormat(org.apache.hadoop.hive.ql.parse.StorageFormat) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

FieldSchema (org.apache.hadoop.hive.metastore.api.FieldSchema)1 Hive (org.apache.hadoop.hive.ql.metadata.Hive)1 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)1 ASTNode (org.apache.hadoop.hive.ql.parse.ASTNode)1 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)1 StorageFormat (org.apache.hadoop.hive.ql.parse.StorageFormat)1