Search in sources :

Example 6 with MetaData

use of org.apache.hadoop.hive.ql.parse.repl.load.MetaData in project hive by apache.

the class CreateDatabaseHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    MetaData metaData;
    try {
        FileSystem fs = FileSystem.get(new Path(context.location).toUri(), context.hiveConf);
        metaData = EximUtil.readMetaData(fs, new Path(context.location, EximUtil.METADATA_NAME));
    } catch (IOException e) {
        throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e);
    }
    Database db = metaData.getDatabase();
    String destinationDBName = context.dbName == null ? db.getName() : context.dbName;
    CreateDatabaseDesc createDatabaseDesc = new CreateDatabaseDesc(destinationDBName, db.getDescription(), null, null, true, db.getParameters());
    Task<DDLWork> createDBTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), createDatabaseDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
    if (!db.getParameters().isEmpty()) {
        AlterDatabaseSetPropertiesDesc alterDbDesc = new AlterDatabaseSetPropertiesDesc(destinationDBName, db.getParameters(), context.eventOnlyReplicationSpec());
        Task<DDLWork> alterDbProperties = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbDesc, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
        createDBTask.addDependentTask(alterDbProperties);
    }
    if (StringUtils.isNotEmpty(db.getOwnerName())) {
        AlterDatabaseSetOwnerDesc alterDbOwner = new AlterDatabaseSetOwnerDesc(destinationDBName, new PrincipalDesc(db.getOwnerName(), db.getOwnerType()), context.eventOnlyReplicationSpec());
        Task<DDLWork> alterDbTask = TaskFactory.get(new DDLWork(new HashSet<>(), new HashSet<>(), alterDbOwner, true, context.getDumpDirectory(), context.getMetricCollector()), context.hiveConf);
        createDBTask.addDependentTask(alterDbTask);
    }
    updatedMetadata.set(context.dmd.getEventTo().toString(), destinationDBName, null, null);
    return Collections.singletonList(createDBTask);
}
Also used : Path(org.apache.hadoop.fs.Path) IOException(java.io.IOException) PrincipalDesc(org.apache.hadoop.hive.ql.ddl.privilege.PrincipalDesc) DDLWork(org.apache.hadoop.hive.ql.ddl.DDLWork) CreateDatabaseDesc(org.apache.hadoop.hive.ql.ddl.database.create.CreateDatabaseDesc) MetaData(org.apache.hadoop.hive.ql.parse.repl.load.MetaData) FileSystem(org.apache.hadoop.fs.FileSystem) Database(org.apache.hadoop.hive.metastore.api.Database) AlterDatabaseSetPropertiesDesc(org.apache.hadoop.hive.ql.ddl.database.alter.poperties.AlterDatabaseSetPropertiesDesc) AlterDatabaseSetOwnerDesc(org.apache.hadoop.hive.ql.ddl.database.alter.owner.AlterDatabaseSetOwnerDesc) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) HashSet(java.util.HashSet)

Example 7 with MetaData

use of org.apache.hadoop.hive.ql.parse.repl.load.MetaData in project hive by apache.

the class TableHandler method handle.

@Override
public List<Task<?>> handle(Context context) throws SemanticException {
    try {
        List<Task<?>> importTasks = new ArrayList<>();
        boolean isExternal = false, isLocationSet = false;
        String parsedLocation = null;
        DumpType eventType = context.dmd.getDumpType();
        Tuple tuple = extract(context);
        MetaData rv = EximUtil.getMetaDataFromLocation(context.location, context.hiveConf);
        if (tuple.isExternalTable) {
            isLocationSet = true;
            isExternal = true;
            Table table = new Table(rv.getTable());
            parsedLocation = ReplExternalTables.externalTableLocation(context.hiveConf, table.getSd().getLocation());
        }
        context.nestedContext.setConf(context.hiveConf);
        EximUtil.SemanticAnalyzerWrapperContext x = new EximUtil.SemanticAnalyzerWrapperContext(context.hiveConf, context.db, readEntitySet, writeEntitySet, importTasks, context.log, context.nestedContext);
        x.setEventType(eventType);
        // REPL LOAD is not partition level. It is always DB or table level. So, passing null for partition specs.
        if (TableType.VIRTUAL_VIEW.name().equals(rv.getTable().getTableType())) {
            importTasks.add(ReplLoadTask.createViewTask(rv, context.dbName, context.hiveConf, context.getDumpDirectory(), context.getMetricCollector()));
        } else {
            ImportSemanticAnalyzer.prepareImport(false, isLocationSet, isExternal, false, (context.precursor != null), parsedLocation, null, context.dbName, null, context.location, x, updatedMetadata, context.getTxnMgr(), tuple.writeId, rv, context.getDumpDirectory(), context.getMetricCollector());
        }
        Task<?> openTxnTask = x.getOpenTxnTask();
        if (openTxnTask != null && !importTasks.isEmpty()) {
            for (Task<?> t : importTasks) {
                openTxnTask.addDependentTask(t);
            }
            importTasks.add(openTxnTask);
        }
        return importTasks;
    } catch (RuntimeException e) {
        throw e;
    } catch (Exception e) {
        throw new SemanticException(e);
    }
}
Also used : Task(org.apache.hadoop.hive.ql.exec.Task) ReplLoadTask(org.apache.hadoop.hive.ql.exec.repl.ReplLoadTask) Table(org.apache.hadoop.hive.ql.metadata.Table) ArrayList(java.util.ArrayList) EximUtil(org.apache.hadoop.hive.ql.parse.EximUtil) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) DumpType(org.apache.hadoop.hive.ql.parse.repl.DumpType) MetaData(org.apache.hadoop.hive.ql.parse.repl.load.MetaData) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 8 with MetaData

use of org.apache.hadoop.hive.ql.parse.repl.load.MetaData in project hive by apache.

the class ImportSemanticAnalyzer method analyzeInternal.

@Override
public void analyzeInternal(ASTNode ast) throws SemanticException {
    try {
        Tree fromTree = ast.getChild(0);
        boolean isLocationSet = false;
        boolean isExternalSet = false;
        boolean isPartSpecSet = false;
        String parsedLocation = null;
        String parsedTableName = null;
        String parsedDbName = null;
        LinkedHashMap<String, String> parsedPartSpec = new LinkedHashMap<String, String>();
        // waitOnPrecursor determines whether or not non-existence of
        // a dependent object is an error. For regular imports, it is.
        // for now, the only thing this affects is whether or not the
        // db exists.
        boolean waitOnPrecursor = false;
        for (int i = 1; i < ast.getChildCount(); ++i) {
            ASTNode child = (ASTNode) ast.getChild(i);
            switch(child.getToken().getType()) {
                case HiveParser.KW_EXTERNAL:
                    isExternalSet = true;
                    break;
                case HiveParser.TOK_TABLELOCATION:
                    isLocationSet = true;
                    parsedLocation = EximUtil.relativeToAbsolutePath(conf, unescapeSQLString(child.getChild(0).getText()));
                    break;
                case HiveParser.TOK_TAB:
                    ASTNode tableNameNode = (ASTNode) child.getChild(0);
                    Map.Entry<String, String> dbTablePair = getDbTableNamePair(tableNameNode);
                    parsedDbName = dbTablePair.getKey();
                    parsedTableName = dbTablePair.getValue();
                    // get partition metadata if partition specified
                    if (child.getChildCount() == 2) {
                        @SuppressWarnings("unused") ASTNode partspec = (ASTNode) child.getChild(1);
                        isPartSpecSet = true;
                        parsePartitionSpec(child, parsedPartSpec);
                    }
                    break;
            }
        }
        if (StringUtils.isEmpty(parsedDbName)) {
            parsedDbName = SessionState.get().getCurrentDatabase();
        }
        // parsing statement is now done, on to logic.
        EximUtil.SemanticAnalyzerWrapperContext x = new EximUtil.SemanticAnalyzerWrapperContext(conf, db, inputs, outputs, rootTasks, LOG, ctx);
        MetaData rv = EximUtil.getMetaDataFromLocation(fromTree.getText(), x.getConf());
        tableExists = prepareImport(true, isLocationSet, isExternalSet, isPartSpecSet, waitOnPrecursor, parsedLocation, parsedTableName, parsedDbName, parsedPartSpec, fromTree.getText(), x, null, getTxnMgr(), 0, rv);
    } catch (SemanticException e) {
        throw e;
    } catch (Exception e) {
        throw new SemanticException(ErrorMsg.IMPORT_SEMANTIC_ERROR.getMsg(), e);
    }
}
Also used : MetaException(org.apache.hadoop.hive.metastore.api.MetaException) URISyntaxException(java.net.URISyntaxException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) IOException(java.io.IOException) InvalidTableException(org.apache.hadoop.hive.ql.metadata.InvalidTableException) LinkedHashMap(java.util.LinkedHashMap) MetaData(org.apache.hadoop.hive.ql.parse.repl.load.MetaData) Tree(org.antlr.runtime.tree.Tree) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) TreeMap(java.util.TreeMap)

Example 9 with MetaData

use of org.apache.hadoop.hive.ql.parse.repl.load.MetaData in project hive by apache.

the class LoadFunction method isFunctionAlreadyLoaded.

private boolean isFunctionAlreadyLoaded(Path funcDumpRoot) throws HiveException, IOException {
    Path metadataPath = new Path(funcDumpRoot, EximUtil.METADATA_NAME);
    FileSystem fs = FileSystem.get(metadataPath.toUri(), context.hiveConf);
    MetaData metadata = EximUtil.readMetaData(fs, metadataPath);
    Function function;
    try {
        String dbName = StringUtils.isBlank(dbNameToLoadIn) ? metadata.function.getDbName() : dbNameToLoadIn;
        function = context.hiveDb.getFunction(dbName, metadata.function.getFunctionName());
    } catch (HiveException e) {
        if (e.getCause() instanceof NoSuchObjectException) {
            return false;
        }
        throw e;
    }
    return (function != null);
}
Also used : Path(org.apache.hadoop.fs.Path) Function(org.apache.hadoop.hive.metastore.api.Function) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) MetaData(org.apache.hadoop.hive.ql.parse.repl.load.MetaData) FileSystem(org.apache.hadoop.fs.FileSystem) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException)

Aggregations

MetaData (org.apache.hadoop.hive.ql.parse.repl.load.MetaData)9 Path (org.apache.hadoop.fs.Path)5 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)5 FileSystem (org.apache.hadoop.fs.FileSystem)4 IOException (java.io.IOException)3 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)3 URISyntaxException (java.net.URISyntaxException)2 ArrayList (java.util.ArrayList)2 Database (org.apache.hadoop.hive.metastore.api.Database)2 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)2 Task (org.apache.hadoop.hive.ql.exec.Task)2 InvalidTableException (org.apache.hadoop.hive.ql.metadata.InvalidTableException)2 Table (org.apache.hadoop.hive.ql.metadata.Table)2 URI (java.net.URI)1 HashSet (java.util.HashSet)1 LinkedHashMap (java.util.LinkedHashMap)1 Map (java.util.Map)1 TreeMap (java.util.TreeMap)1 Tree (org.antlr.runtime.tree.Tree)1 Warehouse (org.apache.hadoop.hive.metastore.Warehouse)1