use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class FunctionLocalizer method startLocalizeAllFunctions.
public void startLocalizeAllFunctions() throws HiveException {
Hive hive = Hive.get(false);
// Do not allow embedded metastore in LLAP unless we are in test.
try {
hive.getMSC(HiveConf.getBoolVar(conf, ConfVars.HIVE_IN_TEST), true);
} catch (MetaException e) {
throw new HiveException(e);
}
List<Function> fns = hive.getAllFunctions();
for (Function fn : fns) {
String fqfn = fn.getDbName() + "." + fn.getFunctionName();
List<ResourceUri> resources = fn.getResourceUris();
// Nothing to localize.
if (resources == null || resources.isEmpty())
continue;
FnResources result = new FnResources();
resourcesByFn.put(fqfn, result);
workQueue.add(new LocalizeFn(fqfn, resources, result, fn.getClassName(), false));
}
workQueue.add(new RefreshClassloader());
}
use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class DropFunctionEvent method getInputHObjs.
private List<HivePrivilegeObject> getInputHObjs() {
if (LOG.isDebugEnabled()) {
LOG.debug("==> DropFunctionEvent.getInputHObjs()");
}
List<HivePrivilegeObject> ret = new ArrayList<>();
PreDropFunctionEvent event = (PreDropFunctionEvent) preEventContext;
Function function = event.getFunction();
List<ResourceUri> uris = function.getResourceUris();
ret.add(new HivePrivilegeObject(HivePrivilegeObject.HivePrivilegeObjectType.FUNCTION, function.getDbName(), function.getFunctionName(), null, null, HivePrivilegeObject.HivePrivObjectActionType.OTHER, null, function.getClassName(), function.getOwnerName(), function.getOwnerType()));
if (uris != null && !uris.isEmpty()) {
for (ResourceUri uri : uris) {
ret.add(new HivePrivilegeObject(HivePrivilegeObject.HivePrivilegeObjectType.DFS_URI, null, uri.getUri()));
}
}
COMMAND_STR = buildCommandString(function);
if (LOG.isDebugEnabled()) {
LOG.debug("<== DropFunctionEvent.getInputHObjs(): ret=" + ret);
}
return ret;
}
use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class FunctionSerializer method writeTo.
@Override
public void writeTo(JsonWriter writer, ReplicationSpec additionalPropertiesProvider) throws SemanticException, IOException, MetaException {
List<ResourceUri> resourceUris = new ArrayList<>();
if (function.getResourceUris() != null) {
for (ResourceUri uri : function.getResourceUris()) {
Path inputPath = new Path(uri.getUri());
if ("hdfs".equals(inputPath.toUri().getScheme())) {
FileSystem fileSystem = inputPath.getFileSystem(hiveConf);
Path qualifiedUri = PathBuilder.fullyQualifiedHDFSUri(inputPath, fileSystem);
String checkSum = ReplChangeManager.checksumFor(qualifiedUri, fileSystem);
String encodedSrcUri = ReplChangeManager.getInstance(hiveConf).encodeFileUri(qualifiedUri.toString(), checkSum, null);
if (copyAtLoad) {
if (hiveConf.getBoolVar(HiveConf.ConfVars.REPL_HA_DATAPATH_REPLACE_REMOTE_NAMESERVICE)) {
encodedSrcUri = Utils.replaceNameserviceInEncodedURI(encodedSrcUri, hiveConf);
}
resourceUris.add(new ResourceUri(uri.getResourceType(), encodedSrcUri));
} else {
Path newBinaryPath = new Path(functionDataRoot, qualifiedUri.getName());
resourceUris.add(new ResourceUri(uri.getResourceType(), newBinaryPath.toString()));
functionBinaryCopyPaths.add(new EximUtil.DataCopyPath(additionalPropertiesProvider, new Path(encodedSrcUri), newBinaryPath));
}
} else {
resourceUris.add(uri);
}
}
}
Function copyObj = new Function(this.function);
if (!resourceUris.isEmpty()) {
assert resourceUris.size() == this.function.getResourceUris().size();
copyObj.setResourceUris(resourceUris);
}
try {
TSerializer serializer = new TSerializer(new TJSONProtocol.Factory());
// This is required otherwise correct work object on repl load wont be created.
writer.jsonGenerator.writeStringField(ReplicationSpec.KEY.REPL_SCOPE.toString(), "all");
writer.jsonGenerator.writeStringField(ReplicationSpec.KEY.CURR_STATE_ID_SOURCE.toString(), additionalPropertiesProvider.getCurrentReplicationState());
writer.jsonGenerator.writeStringField(FIELD_NAME, serializer.toString(copyObj));
} catch (TException e) {
throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METASTORE.getMsg(), e);
}
}
use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class AsyncTaskCreateUdfFile method downloadPermanentFunctions.
private Set<String> downloadPermanentFunctions() throws HiveException, URISyntaxException, IOException {
Map<String, String> udfs = new HashMap<String, String>();
HiveConf hiveConf = new HiveConf();
// disable expensive operations on the metastore
hiveConf.setBoolean(MetastoreConf.ConfVars.INIT_METADATA_COUNT_ENABLED.getVarname(), false);
hiveConf.setBoolean(MetastoreConf.ConfVars.METRICS_ENABLED.getVarname(), false);
// performance problem: ObjectStore does its own new HiveConf()
Hive hive = Hive.getWithFastCheck(hiveConf, false);
ResourceDownloader resourceDownloader = new ResourceDownloader(conf, udfDir.toUri().normalize().getPath());
List<Function> fns = hive.getAllFunctions();
Set<URI> srcUris = new HashSet<>();
for (Function fn : fns) {
String fqfn = fn.getDbName() + "." + fn.getFunctionName();
if (udfs.containsKey(fn.getClassName())) {
LOG.warn("Duplicate function names found for " + fn.getClassName() + " with " + fqfn + " and " + udfs.get(fn.getClassName()));
}
udfs.put(fn.getClassName(), fqfn);
List<ResourceUri> resources = fn.getResourceUris();
if (resources == null || resources.isEmpty()) {
LOG.warn("Missing resources for " + fqfn);
continue;
}
for (ResourceUri resource : resources) {
srcUris.add(ResourceDownloader.createURI(resource.getUri()));
}
}
for (URI srcUri : srcUris) {
List<URI> localUris = resourceDownloader.downloadExternal(srcUri, null, false);
for (URI dst : localUris) {
LOG.warn("Downloaded " + dst + " from " + srcUri);
}
}
return udfs.keySet();
}
use of org.apache.hadoop.hive.metastore.api.ResourceUri in project hive by apache.
the class CreateFunctionOperation method checkLocalFunctionResources.
private void checkLocalFunctionResources() throws HiveException {
// So disallow resources from local filesystem in this case.
if (CollectionUtils.isNotEmpty(desc.getResources())) {
try {
String localFsScheme = FileSystem.getLocal(context.getDb().getConf()).getUri().getScheme();
String configuredFsScheme = FileSystem.get(context.getDb().getConf()).getUri().getScheme();
if (configuredFsScheme.equals(localFsScheme)) {
// Configured warehouse FS is local, don't need to bother checking.
return;
}
for (ResourceUri res : desc.getResources()) {
String resUri = res.getUri();
if (ResourceDownloader.isFileUri(resUri)) {
throw new HiveException("Hive warehouse is non-local, but " + res.getUri() + " specifies file on local " + "filesystem. Resources on non-local warehouse should specify a non-local scheme/path");
}
}
} catch (HiveException e) {
throw e;
} catch (Exception e) {
LOG.error("Exception caught in checkLocalFunctionResources", e);
throw new HiveException(e);
}
}
}
Aggregations