Search in sources :

Example 16 with Retryable

use of org.apache.hadoop.hive.ql.exec.util.Retryable in project hive by apache.

the class ReplLoadWork method externalTableCopyTasks.

public List<Task<?>> externalTableCopyTasks(TaskTracker tracker, HiveConf conf) throws IOException {
    if (conf.getBoolVar(HiveConf.ConfVars.REPL_DUMP_SKIP_IMMUTABLE_DATA_COPY)) {
        return Collections.emptyList();
    }
    List<Task<?>> tasks = new ArrayList<>();
    Retryable retryable = Retryable.builder().withHiveConf(conf).withRetryOnException(UncheckedIOException.class).build();
    try {
        retryable.executeCallable((Callable<Void>) () -> {
            try {
                int numEntriesToSkip = tasks == null ? 0 : tasks.size();
                while (externalTableDataCopyItr.hasNext() && tracker.canAddMoreTasks()) {
                    if (numEntriesToSkip > 0) {
                        // skip entries added in the previous attempts of this retryable block
                        externalTableDataCopyItr.next();
                        numEntriesToSkip--;
                        continue;
                    }
                    DirCopyWork dirCopyWork = new DirCopyWork(metricCollector, (new Path(dumpDirectory).getParent()).toString());
                    dirCopyWork.loadFromString(externalTableDataCopyItr.next());
                    Task<DirCopyWork> task = TaskFactory.get(dirCopyWork, conf);
                    tasks.add(task);
                    tracker.addTask(task);
                    LOG.debug("Added task for {}", dirCopyWork);
                }
            } catch (UncheckedIOException e) {
                LOG.error("Reading entry for data copy failed for external tables, attempting retry.", e);
                throw e;
            }
            return null;
        });
    } catch (Exception e) {
        throw new IOException(ErrorMsg.REPL_RETRY_EXHAUSTED.format(e.getMessage()));
    }
    LOG.info("Added total {} tasks for external table locations copy.", tasks.size());
    return tasks;
}
Also used : Path(org.apache.hadoop.fs.Path) Task(org.apache.hadoop.hive.ql.exec.Task) Retryable(org.apache.hadoop.hive.ql.exec.util.Retryable) ArrayList(java.util.ArrayList) UncheckedIOException(java.io.UncheckedIOException) IOException(java.io.IOException) UncheckedIOException(java.io.UncheckedIOException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) IOException(java.io.IOException) UncheckedIOException(java.io.UncheckedIOException)

Example 17 with Retryable

use of org.apache.hadoop.hive.ql.exec.util.Retryable in project hive by apache.

the class SnapshotUtils method isSnapshotAvailable.

/**
 *  Checks whether a given snapshot exists or not.
 * @param dfs DistributedFileSystem.
 * @param path path of snapshot.
 * @param snapshotPrefix snapshot name prefix.
 * @param snapshotName name of snapshot.
 * @param conf Hive configuration.
 * @return true if the snapshot exists.
 * @throws IOException in case of any error.
 */
public static boolean isSnapshotAvailable(DistributedFileSystem dfs, Path path, String snapshotPrefix, String snapshotName, HiveConf conf) throws IOException {
    AtomicBoolean isSnapAvlb = new AtomicBoolean(false);
    Retryable retryable = Retryable.builder().withHiveConf(conf).withRetryOnException(IOException.class).withFailOnException(SnapshotException.class).build();
    try {
        retryable.executeCallable(() -> {
            isSnapAvlb.set(dfs.exists(new Path(path, HdfsConstants.DOT_SNAPSHOT_DIR + "/" + snapshotPrefix + snapshotName)));
            LOG.debug("Snapshot for path {} is {}", path, isSnapAvlb.get() ? "available" : "unavailable");
            return null;
        });
    } catch (Exception e) {
        throw new SnapshotException("Failed to check if snapshot is available on " + path, e);
    }
    return isSnapAvlb.get();
}
Also used : Path(org.apache.hadoop.fs.Path) ReplExternalTables.externalTableDataPath(org.apache.hadoop.hive.ql.exec.repl.ReplExternalTables.externalTableDataPath) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) Retryable(org.apache.hadoop.hive.ql.exec.util.Retryable) SnapshotException(org.apache.hadoop.hdfs.protocol.SnapshotException) IOException(java.io.IOException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) FileNotFoundException(java.io.FileNotFoundException) SnapshotException(org.apache.hadoop.hdfs.protocol.SnapshotException)

Example 18 with Retryable

use of org.apache.hadoop.hive.ql.exec.util.Retryable in project hive by apache.

the class RangerRestClientImpl method exportRangerPolicies.

public RangerExportPolicyList exportRangerPolicies(String sourceRangerEndpoint, String dbName, String rangerHiveServiceName, HiveConf hiveConf) throws Exception {
    LOG.info("Ranger endpoint for cluster " + sourceRangerEndpoint);
    if (StringUtils.isEmpty(rangerHiveServiceName)) {
        throw new SemanticException(ErrorMsg.REPL_INVALID_CONFIG_FOR_SERVICE.format("Ranger Service Name " + "cannot be empty", ReplUtils.REPL_RANGER_SERVICE));
    }
    String finalUrl = getRangerExportUrl(sourceRangerEndpoint, rangerHiveServiceName, dbName);
    LOG.debug("Url to export policies from source Ranger: {}", finalUrl);
    Retryable retryable = Retryable.builder().withHiveConf(hiveConf).withFailOnException(RuntimeException.class).withRetryOnException(Exception.class).build();
    try {
        return retryable.executeCallable(() -> exportRangerPoliciesPlain(finalUrl, hiveConf));
    } catch (RuntimeException e) {
        throw e;
    } catch (Exception e) {
        throw new SemanticException(ErrorMsg.REPL_RETRY_EXHAUSTED.format(e.getMessage()), e);
    }
}
Also used : Retryable(org.apache.hadoop.hive.ql.exec.util.Retryable) URISyntaxException(java.net.URISyntaxException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Example 19 with Retryable

use of org.apache.hadoop.hive.ql.exec.util.Retryable in project hive by apache.

the class RangerRestClientImpl method importRangerPolicies.

@Override
public RangerExportPolicyList importRangerPolicies(RangerExportPolicyList rangerExportPolicyList, String dbName, String baseUrl, String rangerHiveServiceName, HiveConf hiveConf) throws Exception {
    String sourceClusterServiceName = null;
    String serviceMapJsonFileName = "hive_servicemap.json";
    String rangerPoliciesJsonFileName = "hive_replicationPolicies.json";
    if (!rangerExportPolicyList.getPolicies().isEmpty()) {
        sourceClusterServiceName = rangerExportPolicyList.getPolicies().get(0).getService();
    }
    if (StringUtils.isEmpty(sourceClusterServiceName)) {
        sourceClusterServiceName = rangerHiveServiceName;
    }
    Map<String, String> serviceMap = new LinkedHashMap<String, String>();
    if (!StringUtils.isEmpty(sourceClusterServiceName) && !StringUtils.isEmpty(rangerHiveServiceName)) {
        serviceMap.put(sourceClusterServiceName, rangerHiveServiceName);
    }
    Gson gson = new GsonBuilder().create();
    String jsonServiceMap = gson.toJson(serviceMap);
    String jsonRangerExportPolicyList = gson.toJson(rangerExportPolicyList);
    String finalUrl = getRangerImportUrl(baseUrl, dbName);
    LOG.debug("URL to import policies on target Ranger: {}", finalUrl);
    Retryable retryable = Retryable.builder().withHiveConf(hiveConf).withFailOnException(RuntimeException.class).withRetryOnException(Exception.class).build();
    try {
        return retryable.executeCallable(() -> importRangerPoliciesPlain(jsonRangerExportPolicyList, rangerPoliciesJsonFileName, serviceMapJsonFileName, jsonServiceMap, finalUrl, rangerExportPolicyList, hiveConf));
    } catch (RuntimeException e) {
        throw e;
    } catch (Exception e) {
        throw new SemanticException(ErrorMsg.REPL_RETRY_EXHAUSTED.format(e.getMessage()), e);
    }
}
Also used : GsonBuilder(com.google.gson.GsonBuilder) Retryable(org.apache.hadoop.hive.ql.exec.util.Retryable) Gson(com.google.gson.Gson) URISyntaxException(java.net.URISyntaxException) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException) IOException(java.io.IOException) FileNotFoundException(java.io.FileNotFoundException) LinkedHashMap(java.util.LinkedHashMap) SemanticException(org.apache.hadoop.hive.ql.parse.SemanticException)

Aggregations

IOException (java.io.IOException)19 Retryable (org.apache.hadoop.hive.ql.exec.util.Retryable)19 SemanticException (org.apache.hadoop.hive.ql.parse.SemanticException)13 FileNotFoundException (java.io.FileNotFoundException)11 Path (org.apache.hadoop.fs.Path)8 FileSystem (org.apache.hadoop.fs.FileSystem)6 UncheckedIOException (java.io.UncheckedIOException)5 URISyntaxException (java.net.URISyntaxException)5 ArrayList (java.util.ArrayList)4 SnapshotException (org.apache.hadoop.hdfs.protocol.SnapshotException)4 BufferedReader (java.io.BufferedReader)3 InputStreamReader (java.io.InputStreamReader)3 Task (org.apache.hadoop.hive.ql.exec.Task)3 HiveException (org.apache.hadoop.hive.ql.metadata.HiveException)3 Gson (com.google.gson.Gson)2 GsonBuilder (com.google.gson.GsonBuilder)2 MalformedURLException (java.net.MalformedURLException)2 NoSuchElementException (java.util.NoSuchElementException)2 AtomicBoolean (java.util.concurrent.atomic.AtomicBoolean)2 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)2