Search in sources :

Example 26 with ResourceUri

use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.

the class FunctionLocalizer method startLocalizeAllFunctions.

public void startLocalizeAllFunctions() throws HiveException {
    Hive hive = Hive.get(false);
    // Do not allow embedded metastore in LLAP unless we are in test.
    try {
        hive.getMSC(HiveConf.getBoolVar(conf, ConfVars.HIVE_IN_TEST), true);
    } catch (MetaException e) {
        throw new HiveException(e);
    }
    List<Function> fns = hive.getAllFunctions();
    for (Function fn : fns) {
        String fqfn = fn.getDbName() + "." + fn.getFunctionName();
        List<ResourceUri> resources = fn.getResourceUris();
        // Nothing to localize.
        if (resources == null || resources.isEmpty())
            continue;
        FnResources result = new FnResources();
        resourcesByFn.put(fqfn, result);
        workQueue.add(new LocalizeFn(fqfn, resources, result, fn.getClassName(), false));
    }
    workQueue.add(new RefreshClassloader());
}
Also used : Function(org.apache.hadoop.hive.metastore.api.Function) ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) Hive(org.apache.hadoop.hive.ql.metadata.Hive) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) MetaException(org.apache.hadoop.hive.metastore.api.MetaException)

Example 27 with ResourceUri

use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.

the class DropFunctionEvent method getInputHObjs.

private List<HivePrivilegeObject> getInputHObjs() {
    if (LOG.isDebugEnabled()) {
        LOG.debug("==> DropFunctionEvent.getInputHObjs()");
    }
    List<HivePrivilegeObject> ret = new ArrayList<>();
    PreDropFunctionEvent event = (PreDropFunctionEvent) preEventContext;
    Function function = event.getFunction();
    List<ResourceUri> uris = function.getResourceUris();
    ret.add(new HivePrivilegeObject(HivePrivilegeObject.HivePrivilegeObjectType.FUNCTION, function.getDbName(), function.getFunctionName(), null, null, HivePrivilegeObject.HivePrivObjectActionType.OTHER, null, function.getClassName(), function.getOwnerName(), function.getOwnerType()));
    if (uris != null && !uris.isEmpty()) {
        for (ResourceUri uri : uris) {
            ret.add(new HivePrivilegeObject(HivePrivilegeObject.HivePrivilegeObjectType.DFS_URI, null, uri.getUri()));
        }
    }
    COMMAND_STR = buildCommandString(function);
    if (LOG.isDebugEnabled()) {
        LOG.debug("<== DropFunctionEvent.getInputHObjs(): ret=" + ret);
    }
    return ret;
}
Also used : Function(org.apache.hadoop.hive.metastore.api.Function) ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) ArrayList(java.util.ArrayList) PreDropFunctionEvent(org.apache.hadoop.hive.metastore.events.PreDropFunctionEvent) HivePrivilegeObject(org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject)

Example 28 with ResourceUri

use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.

the class FunctionSerializer method writeTo.

@Override
public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvider) throws SemanticException, IOException, MetaException {
    List<ResourceUri> resourceUris = new ArrayList<>();
    if (function.getResourceUris() != null) {
        for (ResourceUri uri : function.getResourceUris()) {
            Path inputPath = new Path(uri.getUri());
            if ("hdfs".equals(inputPath.toUri().getScheme())) {
                FileSystem fileSystem = inputPath.getFileSystem(hiveConf);
                Path qualifiedUri = PathBuilder.fullyQualifiedHDFSUri(inputPath, fileSystem);
                String checkSum = ReplChangeManager.checksumFor(qualifiedUri, fileSystem);
                String encodedSrcUri = ReplChangeManager.getInstance(hiveConf).encodeFileUri(qualifiedUri.toString(), checkSum, null);
                if (copyAtLoad) {
                    if (hiveConf.getBoolVar(HiveConf.ConfVars.REPL_HA_DATAPATH_REPLACE_REMOTE_NAMESERVICE)) {
                        encodedSrcUri = Utils.replaceNameserviceInEncodedURI(encodedSrcUri, hiveConf);
                    }
                    resourceUris.add(new ResourceUri(uri.getResourceType(), encodedSrcUri));
                } else {
                    Path newBinaryPath = new Path(functionDataRoot, qualifiedUri.getName());
                    resourceUris.add(new ResourceUri(uri.getResourceType(), newBinaryPath.toString()));
                    functionBinaryCopyPaths.add(new EximUtil.DataCopyPath(additionalPropertiesProvider, new Path(encodedSrcUri), newBinaryPath));
                }
            } else {
                resourceUris.add(uri);
            }
        }
    }
    Function copyObj = new Function(this.function);
    if (!resourceUris.isEmpty()) {
        assert resourceUris.size() == this.function.getResourceUris().size();
        copyObj.setResourceUris(resourceUris);
    }
    try {
        TSerializer serializer = new TSerializer(new TJSONProtocol.Factory());
        // This is required otherwise correct work object on repl load wont be created.
        writer.jsonGenerator.writeStringField(ReplicationSpec.KEY.REPL_SCOPE.toString(), "all");
        writer.jsonGenerator.writeStringField(ReplicationSpec.KEY.CURR_STATE_ID_SOURCE.toString(), additionalPropertiesProvider.getCurrentReplicationState());
        writer.jsonGenerator.writeStringField(FIELD_NAME, serializer.toString(copyObj));
    } catch (TException e) {
        throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METASTORE.getMsg(), e);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) TException(org.apache.thrift.TException) ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) ArrayList(java.util.ArrayList) EximUtil(org.apache.hadoop.hive.ql.parse.EximUtil) Function(org.apache.hadoop.hive.metastore.api.Function) TSerializer(org.apache.thrift.TSerializer) TJSONProtocol(org.apache.thrift.protocol.TJSONProtocol) FileSystem(org.apache.hadoop.fs.FileSystem) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 29 with ResourceUri

use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.

the class AsyncTaskCreateUdfFile method downloadPermanentFunctions.

private Set<String> downloadPermanentFunctions() throws HiveException, URISyntaxException, IOException {
    Map<String, String> udfs = new HashMap<String, String>();
    HiveConf hiveConf = new HiveConf();
    // disable expensive operations on the metastore
    hiveConf.setBoolean(MetastoreConf.ConfVars.INIT_METADATA_COUNT_ENABLED.getVarname(), false);
    hiveConf.setBoolean(MetastoreConf.ConfVars.METRICS_ENABLED.getVarname(), false);
    // performance problem: ObjectStore does its own new HiveConf()
    Hive hive = Hive.getWithFastCheck(hiveConf, false);
    ResourceDownloader resourceDownloader = new ResourceDownloader(conf, udfDir.toUri().normalize().getPath());
    List<Function> fns = hive.getAllFunctions();
    Set<URI> srcUris = new HashSet<>();
    for (Function fn : fns) {
        String fqfn = fn.getDbName() + "." + fn.getFunctionName();
        if (udfs.containsKey(fn.getClassName())) {
            LOG.warn("Duplicate function names found for " + fn.getClassName() + " with " + fqfn + " and " + udfs.get(fn.getClassName()));
        }
        udfs.put(fn.getClassName(), fqfn);
        List<ResourceUri> resources = fn.getResourceUris();
        if (resources == null || resources.isEmpty()) {
            LOG.warn("Missing resources for " + fqfn);
            continue;
        }
        for (ResourceUri resource : resources) {
            srcUris.add(ResourceDownloader.createURI(resource.getUri()));
        }
    }
    for (URI srcUri : srcUris) {
        List<URI> localUris = resourceDownloader.downloadExternal(srcUri, null, false);
        for (URI dst : localUris) {
            LOG.warn("Downloaded " + dst + " from " + srcUri);
        }
    }
    return udfs.keySet();
}
Also used : ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) HashMap(java.util.HashMap) ResourceDownloader(org.apache.hadoop.hive.ql.util.ResourceDownloader) URI(java.net.URI) Function(org.apache.hadoop.hive.metastore.api.Function) Hive(org.apache.hadoop.hive.ql.metadata.Hive) HiveConf(org.apache.hadoop.hive.conf.HiveConf) HashSet(java.util.HashSet)

Example 30 with ResourceUri

use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.

the class CreateFunctionOperation method checkLocalFunctionResources.

private void checkLocalFunctionResources() throws HiveException {
    // So disallow resources from local filesystem in this case.
    if (CollectionUtils.isNotEmpty(desc.getResources())) {
        try {
            String localFsScheme = FileSystem.getLocal(context.getDb().getConf()).getUri().getScheme();
            String configuredFsScheme = FileSystem.get(context.getDb().getConf()).getUri().getScheme();
            if (configuredFsScheme.equals(localFsScheme)) {
                // Configured warehouse FS is local, don't need to bother checking.
                return;
            }
            for (ResourceUri res : desc.getResources()) {
                String resUri = res.getUri();
                if (ResourceDownloader.isFileUri(resUri)) {
                    throw new HiveException("Hive warehouse is non-local, but " + res.getUri() + " specifies file on local " + "filesystem. Resources on non-local warehouse should specify a non-local scheme/path");
                }
            }
        } catch (HiveException e) {
            throw e;
        } catch (Exception e) {
            LOG.error("Exception caught in checkLocalFunctionResources", e);
            throw new HiveException(e);
        }
    }
}
Also used : ResourceUri(org.apache.hadoop.hive.metastore.api.ResourceUri) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException) IOException(java.io.IOException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) AlreadyExistsException(org.apache.hadoop.hive.metastore.api.AlreadyExistsException) HiveException(org.apache.hadoop.hive.ql.metadata.HiveException)

Aggregations

ResourceUri (org.apache.hadoop.hive.metastore.api.ResourceUri)34 Function (org.apache.hadoop.hive.metastore.api.Function)22 Test (org.junit.Test)13 ArrayList (java.util.ArrayList)6 Database (org.apache.hadoop.hive.metastore.api.Database)5 MetaException (org.apache.hadoop.hive.metastore.api.MetaException)5 NoSuchObjectException (org.apache.hadoop.hive.metastore.api.NoSuchObjectException)5 FunctionBuilder (org.apache.hadoop.hive.metastore.client.builder.FunctionBuilder)5 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)5 IOException (java.io.IOException)4 URI (java.net.URI)4 HashSet (java.util.HashSet)4 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)4 HiveConf (org.apache.hadoop.hive.conf.HiveConf)3 MetastoreCheckinTest (org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest)3 Hive (org.apache.hadoop.hive.ql.metadata.Hive)3 SQLException (java.sql.SQLException)2 HashMap (java.util.HashMap)2 Path (org.apache.hadoop.fs.Path)2 ConfigValSecurityException (org.apache.hadoop.hive.metastore.api.ConfigValSecurityException)2