Search in sources :

Example 1 with AddDefaultConstraintHandler

use of org.apache.hadoop.hive.ql.parse.repl.load.message.AddDefaultConstraintHandler in project hive by apache.

the class LoadConstraint method tasks.

public TaskTracker tasks() throws IOException, SemanticException {
    URI fromURI = EximUtil.getValidatedURI(context.hiveConf, stripQuotes(event.rootDir().toUri().toString()));
    Path fromPath = new Path(fromURI.getScheme(), fromURI.getAuthority(), fromURI.getPath());
    try {
        FileSystem fs = FileSystem.get(fromPath.toUri(), context.hiveConf);
        JSONObject json = new JSONObject(EximUtil.readAsString(fs, fromPath));
        String pksString = json.getString("pks");
        String fksString = json.getString("fks");
        String uksString = json.getString("uks");
        String nnsString = json.getString("nns");
        String dksString = json.getString("dks");
        String cksString = json.getString("cks");
        List<Task<?>> tasks = new ArrayList<Task<?>>();
        if (StringUtils.isNotEmpty(StringUtils.trim(pksString)) && !isPrimaryKeysAlreadyLoaded(pksString)) {
            AddPrimaryKeyHandler pkHandler = new AddPrimaryKeyHandler();
            DumpMetaData pkDumpMetaData = new DumpMetaData(fromPath, DumpType.EVENT_ADD_PRIMARYKEY, Long.MAX_VALUE, Long.MAX_VALUE, null, context.hiveConf);
            pkDumpMetaData.setPayload(pksString);
            tasks.addAll(pkHandler.handle(new MessageHandler.Context(dbNameToLoadIn, fromPath.toString(), null, pkDumpMetaData, context.hiveConf, context.hiveDb, context.nestedContext, LOG, dumpDirectory, metricCollector)));
        }
        if (StringUtils.isNotEmpty(StringUtils.trim(uksString)) && !isUniqueConstraintsAlreadyLoaded(uksString)) {
            AddUniqueConstraintHandler ukHandler = new AddUniqueConstraintHandler();
            DumpMetaData ukDumpMetaData = new DumpMetaData(fromPath, DumpType.EVENT_ADD_UNIQUECONSTRAINT, Long.MAX_VALUE, Long.MAX_VALUE, null, context.hiveConf);
            ukDumpMetaData.setPayload(uksString);
            tasks.addAll(ukHandler.handle(new MessageHandler.Context(dbNameToLoadIn, fromPath.toString(), null, ukDumpMetaData, context.hiveConf, context.hiveDb, context.nestedContext, LOG, dumpDirectory, metricCollector)));
        }
        if (StringUtils.isNotEmpty(StringUtils.trim(nnsString)) && !isNotNullConstraintsAlreadyLoaded(nnsString)) {
            AddNotNullConstraintHandler nnHandler = new AddNotNullConstraintHandler();
            DumpMetaData nnDumpMetaData = new DumpMetaData(fromPath, DumpType.EVENT_ADD_NOTNULLCONSTRAINT, Long.MAX_VALUE, Long.MAX_VALUE, null, context.hiveConf);
            nnDumpMetaData.setPayload(nnsString);
            tasks.addAll(nnHandler.handle(new MessageHandler.Context(dbNameToLoadIn, fromPath.toString(), null, nnDumpMetaData, context.hiveConf, context.hiveDb, context.nestedContext, LOG, dumpDirectory, metricCollector)));
        }
        if (StringUtils.isNotEmpty(StringUtils.trim(dksString)) && !isDefaultConstraintsAlreadyLoaded(dksString)) {
            AddDefaultConstraintHandler dkHandler = new AddDefaultConstraintHandler();
            DumpMetaData dkDumpMetaData = new DumpMetaData(fromPath, DumpType.EVENT_ADD_DEFAULTCONSTRAINT, Long.MAX_VALUE, Long.MAX_VALUE, null, context.hiveConf);
            dkDumpMetaData.setPayload(dksString);
            tasks.addAll(dkHandler.handle(new MessageHandler.Context(dbNameToLoadIn, fromPath.toString(), null, dkDumpMetaData, context.hiveConf, context.hiveDb, context.nestedContext, LOG, dumpDirectory, metricCollector)));
        }
        if (StringUtils.isNotEmpty(StringUtils.trim(cksString)) && !isCheckConstraintsAlreadyLoaded(cksString)) {
            AddCheckConstraintHandler ckHandler = new AddCheckConstraintHandler();
            DumpMetaData dkDumpMetaData = new DumpMetaData(fromPath, DumpType.EVENT_ADD_CHECKCONSTRAINT, Long.MAX_VALUE, Long.MAX_VALUE, null, context.hiveConf);
            dkDumpMetaData.setPayload(cksString);
            tasks.addAll(ckHandler.handle(new MessageHandler.Context(dbNameToLoadIn, fromPath.toString(), null, dkDumpMetaData, context.hiveConf, context.hiveDb, context.nestedContext, LOG, dumpDirectory, metricCollector)));
        }
        if (StringUtils.isNotEmpty(StringUtils.trim(fksString)) && !isForeignKeysAlreadyLoaded(fksString)) {
            AddForeignKeyHandler fkHandler = new AddForeignKeyHandler();
            DumpMetaData fkDumpMetaData = new DumpMetaData(fromPath, DumpType.EVENT_ADD_FOREIGNKEY, Long.MAX_VALUE, Long.MAX_VALUE, null, context.hiveConf);
            fkDumpMetaData.setPayload(fksString);
            tasks.addAll(fkHandler.handle(new MessageHandler.Context(dbNameToLoadIn, fromPath.toString(), null, fkDumpMetaData, context.hiveConf, context.hiveDb, context.nestedContext, LOG, dumpDirectory, metricCollector)));
        }
        tasks.forEach(tracker::addTask);
        return tracker;
    } catch (Exception e) {
        throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Context(org.apache.hadoop.hive.ql.exec.repl.bootstrap.load.util.Context) Task(org.apache.hadoop.hive.ql.exec.Task) AddPrimaryKeyHandler(org.apache.hadoop.hive.ql.parse.repl.load.message.AddPrimaryKeyHandler) AddForeignKeyHandler(org.apache.hadoop.hive.ql.parse.repl.load.message.AddForeignKeyHandler) DumpMetaData(org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData) ArrayList(java.util.ArrayList) AddNotNullConstraintHandler(org.apache.hadoop.hive.ql.parse.repl.load.message.AddNotNullConstraintHandler) AddCheckConstraintHandler(org.apache.hadoop.hive.ql.parse.repl.load.message.AddCheckConstraintHandler) URI(java.net.URI) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) IOException(java.io.IOException) NoSuchObjectException(org.apache.hadoop.hive.metastore.api.NoSuchObjectException) JSONObject(org.json.JSONObject) FileSystem(org.apache.hadoop.fs.FileSystem) AddUniqueConstraintHandler(org.apache.hadoop.hive.ql.parse.repl.load.message.AddUniqueConstraintHandler) AddDefaultConstraintHandler(org.apache.hadoop.hive.ql.parse.repl.load.message.AddDefaultConstraintHandler) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

IOException (java.io.IOException)1 URI (java.net.URI)1 ArrayList (java.util.ArrayList)1 FileSystem (org.apache.hadoop.fs.FileSystem)1 Path (org.apache.hadoop.fs.Path)1 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)1 Task (org.apache.hadoop.hive.ql.exec.Task)1 Context (org.apache.hadoop.hive.ql.exec.repl.bootstrap.load.util.Context)1 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)1 DumpMetaData (org.apache.hadoop.hive.ql.parse.repl.load.DumpMetaData)1 AddCheckConstraintHandler (org.apache.hadoop.hive.ql.parse.repl.load.message.AddCheckConstraintHandler)1 AddDefaultConstraintHandler (org.apache.hadoop.hive.ql.parse.repl.load.message.AddDefaultConstraintHandler)1 AddForeignKeyHandler (org.apache.hadoop.hive.ql.parse.repl.load.message.AddForeignKeyHandler)1 AddNotNullConstraintHandler (org.apache.hadoop.hive.ql.parse.repl.load.message.AddNotNullConstraintHandler)1 AddPrimaryKeyHandler (org.apache.hadoop.hive.ql.parse.repl.load.message.AddPrimaryKeyHandler)1 AddUniqueConstraintHandler (org.apache.hadoop.hive.ql.parse.repl.load.message.AddUniqueConstraintHandler)1 JSONObject (org.json.JSONObject)1