Search in sources :

Example 21 with DumpMetaData

use of org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData in project hive by apache.

the class DropPartitionHandler method handle.

@Override
public void handle(Context withinContext) throws Exception {
    LOG.info("Processing#{} DROP_PARTITION message : {}", fromEventId(), eventMessageAsJSON);
    // dump partition related events for metadata-only dump.
    if (withinContext.hiveConf.getBoolVar(HiveConf.ConfVars.REPL_DUMP_METADATA_ONLY)) {
        return;
    }
    DumpMetaData dmd = withinContext.createDmd(this);
    dmd.setPayload(eventMessageAsJSON);
    dmd.write();
}
Also used : DumpMetaData(org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData)

Example 22 with DumpMetaData

use of org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData in project hive by apache.

the class InsertHandler method handle.

@Override
public void handle(Context withinContext) throws Exception {
    if (withinContext.hiveConf.getBoolVar(HiveConf.ConfVars.REPL_DUMP_METADATA_ONLY)) {
        return;
    }
    org.apache.hadoop.hive.ql.metadata.Table qlMdTable = tableObject(eventMessage);
    if (TableType.EXTERNAL_TABLE.equals(qlMdTable.getTableType())) {
        withinContext.replicationSpec.setNoop(true);
    }
    if (!Utils.shouldReplicate(withinContext.replicationSpec, qlMdTable, true, withinContext.getTablesForBootstrap(), withinContext.oldReplScope, withinContext.hiveConf)) {
        return;
    }
    // In case of ACID tables, insert event should not have fired.
    assert (!AcidUtils.isTransactionalTable(qlMdTable));
    List<Partition> qlPtns = null;
    if (qlMdTable.isPartitioned() && (null != eventMessage.getPtnObj())) {
        qlPtns = Collections.singletonList(partitionObject(qlMdTable, eventMessage));
    }
    Path metaDataPath = new Path(withinContext.eventRoot, EximUtil.METADATA_NAME);
    // Mark the replace type based on INSERT-INTO or INSERT_OVERWRITE operation
    withinContext.replicationSpec.setIsReplace(eventMessage.isReplace());
    EximUtil.createExportDump(metaDataPath.getFileSystem(withinContext.hiveConf), metaDataPath, qlMdTable, qlPtns, withinContext.replicationSpec, withinContext.hiveConf);
    Iterable<String> files = eventMessage.getFiles();
    boolean copyAtLoad = withinContext.hiveConf.getBoolVar(HiveConf.ConfVars.REPL_RUN_DATA_COPY_TASKS_ON_TARGET);
    /*
      * Insert into/overwrite operation shall operate on one or more partitions or even partitions from multiple tables.
      * But, Insert event is generated for each partition to which the data is inserted.
      * So, qlPtns list will have only one entry.
     */
    Partition ptn = (null == qlPtns || qlPtns.isEmpty()) ? null : qlPtns.get(0);
    if (files != null) {
        if (copyAtLoad) {
            // encoded filename/checksum of files, write into _files
            Path dataPath = null;
            if ((null == qlPtns) || qlPtns.isEmpty()) {
                dataPath = new Path(withinContext.eventRoot, EximUtil.DATA_PATH_NAME);
            } else {
                dataPath = new Path(withinContext.eventRoot, EximUtil.DATA_PATH_NAME + File.separator + qlPtns.get(0).getName());
            }
            writeEncodedDumpFiles(withinContext, files, dataPath);
        } else {
            for (String file : files) {
                writeFileEntry(qlMdTable, ptn, file, withinContext);
            }
        }
    }
    LOG.info("Processing#{} INSERT message : {}", fromEventId(), eventMessageAsJSON);
    DumpMetaData dmd = withinContext.createDmd(this);
    dmd.setPayload(eventMessageAsJSON);
    dmd.write();
}
Also used : Path(org.apache.hadoop.fs.Path) Partition(org.apache.hadoop.hive.ql.metadata.Partition) DumpMetaData(org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData)

Example 23 with DumpMetaData

use of org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData in project hive by apache.

the class DeletePartColStatHandler method handle.

@Override
public void handle(Context withinContext) throws Exception {
    LOG.info("Processing#{} DeletePartitionColumnStatMessage message : {}", fromEventId(), eventMessageAsJSON);
    DumpMetaData dmd = withinContext.createDmd(this);
    dmd.setPayload(eventMessageAsJSON);
    dmd.write();
}
Also used : DumpMetaData(org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData)

Example 24 with DumpMetaData

use of org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData in project hive by apache.

the class DropConstraintHandler method handle.

@Override
public void handle(Context withinContext) throws Exception {
    LOG.info("Processing#{} DROP_CONSTRAINT_MESSAGE message : {}", fromEventId(), eventMessageAsJSON);
    DumpMetaData dmd = withinContext.createDmd(this);
    dmd.setPayload(eventMessageAsJSON);
    dmd.write();
}
Also used : DumpMetaData(org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData)

Example 25 with DumpMetaData

use of org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData in project hive by apache.

the class DropTableHandler method handle.

@Override
public void handle(Context withinContext) throws Exception {
    LOG.info("Processing#{} DROP_TABLE message : {}", fromEventId(), eventMessageAsJSON);
    // table will not be present at target. Anyways all the events related to this table is ignored.
    if (withinContext.removeFromListOfTablesForBootstrap(event.getTableName())) {
        LOG.info("Table " + event.getTableName() + " is removed from list of tables to be bootstrapped.");
        return;
    }
    DumpMetaData dmd = withinContext.createDmd(this);
    dmd.setPayload(eventMessageAsJSON);
    dmd.write();
}
Also used : DumpMetaData(org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData)

Aggregations

DumpMetaData (org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData)39 Path (org.apache.hadoop.fs.Path)17 FileSystem (org.apache.hadoop.fs.FileSystem)6 Test (org.junit.Test)6 ArrayList (java.util.ArrayList)5 Table (org.apache.hadoop.hive.ql.metadata.Table)5 Database (org.apache.hadoop.hive.metastore.api.Database)4 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)4 IOException (java.io.IOException)3 ReplScope (org.apache.hadoop.hive.common.repl.ReplScope)3 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)3 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)3 Partition (org.apache.hadoop.hive.ql.metadata.Partition)3 HashMap (java.util.HashMap)2 List (java.util.List)2 Task (org.apache.hadoop.hive.ql.exec.Task)2 InvalidTableException (org.apache.hadoop.hive.ql.metadata.InvalidTableException)2 FailoverMetaData (org.apache.hadoop.hive.ql.parse.repl.load.FailoverMetaData)2 FileNotFoundException (java.io.FileNotFoundException)1 URI (java.net.URI)1