use of org.apache.hadoop.hive.ql.exec.util.Retryable in project hive by apache.
the class FileList method writeWithRetry.
private synchronized void writeWithRetry(String entry) throws IOException {
Retryable retryable = buildRetryable();
try {
retryable.executeCallable((Callable<Void>) () -> {
if (this.abortOperation) {
LOG.debug("Aborting write operation for entry {} to file {}.", entry, backingFile);
return null;
}
try {
if (backingFileWriter == null) {
backingFileWriter = initWriter();
}
backingFileWriter.writeBytes(getEntryWithNewline(entry));
backingFileWriter.hflush();
LOG.info("Writing entry {} to file list backed by {}", entry, backingFile);
} catch (IOException e) {
LOG.error("Writing entry {} to file list {} failed, attempting retry.", entry, backingFile, e);
this.retryMode = true;
close();
throw e;
}
return null;
});
} catch (Exception e) {
this.abortOperation = true;
throw new IOException(ErrorMsg.REPL_RETRY_EXHAUSTED.format(e.getMessage()));
}
}
use of org.apache.hadoop.hive.ql.exec.util.Retryable in project hive by apache.
the class RangerRestClientImpl method saveRangerPoliciesToFile.
@Override
public Path saveRangerPoliciesToFile(RangerExportPolicyList rangerExportPolicyList, Path stagingDirPath, String fileName, HiveConf conf) throws SemanticException {
Gson gson = new GsonBuilder().create();
String jsonRangerExportPolicyList = gson.toJson(rangerExportPolicyList);
Retryable retryable = Retryable.builder().withHiveConf(conf).withRetryOnException(IOException.class).build();
try {
return retryable.executeCallable(() -> writeExportedRangerPoliciesToJsonFile(jsonRangerExportPolicyList, fileName, stagingDirPath, conf));
} catch (Exception e) {
throw new SemanticException(ErrorMsg.REPL_RETRY_EXHAUSTED.format(e.getMessage()), e);
}
}
use of org.apache.hadoop.hive.ql.exec.util.Retryable in project hive by apache.
the class RangerRestClientImpl method deleteRangerPolicy.
@Override
public void deleteRangerPolicy(String policyName, String baseUrl, String rangerHiveServiceName, HiveConf hiveConf) throws Exception {
String finalUrl = getRangerDeleteUrl(baseUrl, policyName, rangerHiveServiceName);
LOG.debug("URL to delete policy on target Ranger: {}", finalUrl);
Retryable retryable = Retryable.builder().withHiveConf(hiveConf).withFailOnException(RuntimeException.class).withRetryOnException(Exception.class).build();
try {
retryable.executeCallable(() -> {
ClientResponse clientResp = null;
WebResource.Builder builder = getRangerResourceBuilder(finalUrl, hiveConf);
clientResp = builder.delete(ClientResponse.class);
if (clientResp != null) {
switch(clientResp.getStatus()) {
case HttpServletResponse.SC_NO_CONTENT:
LOG.debug("Ranger policy: {} deleted successfully", policyName);
break;
case HttpServletResponse.SC_NOT_FOUND:
LOG.debug("Ranger policy: {} not found.", policyName);
break;
case HttpServletResponse.SC_FORBIDDEN:
throw new RuntimeException(ErrorMsg.RANGER_AUTHORIZATION_FAILED.getMsg());
case HttpServletResponse.SC_UNAUTHORIZED:
throw new RuntimeException(ErrorMsg.RANGER_AUTHENTICATION_FAILED.getMsg());
default:
throw new SemanticException("Ranger policy deletion failed, Please refer target Ranger admin logs.");
}
}
return null;
});
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new SemanticException(ErrorMsg.REPL_RETRY_EXHAUSTED.format(e.getMessage()), e);
}
}
use of org.apache.hadoop.hive.ql.exec.util.Retryable in project hive by apache.
the class Utils method writeFile.
public static long writeFile(FileSystem fs, Path exportFilePath, InputStream is, HiveConf conf) throws SemanticException {
Retryable retryable = Retryable.builder().withHiveConf(conf).withRetryOnException(IOException.class).build();
try {
return retryable.executeCallable(() -> {
FSDataOutputStream fos = null;
try {
long bytesWritten;
fos = fs.create(exportFilePath);
byte[] buffer = new byte[DEF_BUF_SIZE];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
fos.write(buffer, 0, bytesRead);
}
bytesWritten = fos.getPos();
return bytesWritten;
} finally {
if (fos != null) {
fos.close();
}
}
});
} catch (Exception e) {
throw new SemanticException(e);
}
}
use of org.apache.hadoop.hive.ql.exec.util.Retryable in project hive by apache.
the class FileOperations method exportFilesAsList.
/**
* This needs the root data directory to which the data needs to be exported to.
* The data export here is a list of files either in table/partition that are written to the _files
* in the exportRootDataDir provided.
*/
void exportFilesAsList() throws SemanticException {
if (dataPathList.isEmpty()) {
return;
}
Retryable retryable = Retryable.builder().withHiveConf(hiveConf).withRetryOnException(IOException.class).build();
try {
retryable.executeCallable((Callable<Void>) () -> {
try (BufferedWriter writer = writer()) {
for (Path dataPath : dataPathList) {
writeFilesList(listFilesInDir(dataPath), writer, AcidUtils.getAcidSubDir(dataPath));
}
} catch (IOException e) {
if (e instanceof FileNotFoundException) {
logger.error("exporting data files in dir : " + dataPathList + " to " + exportRootDataDir + " failed");
throw new FileNotFoundException(FILE_NOT_FOUND.format(e.getMessage()));
}
// in case of io error, reset the file system object
FileSystem.closeAllForUGI(Utils.getUGI());
dataFileSystem = dataPathList.get(0).getFileSystem(hiveConf);
exportFileSystem = exportRootDataDir.getFileSystem(hiveConf);
Path exportPath = new Path(exportRootDataDir, EximUtil.FILES_NAME);
if (exportFileSystem.exists(exportPath)) {
exportFileSystem.delete(exportPath, true);
}
throw e;
}
return null;
});
} catch (Exception e) {
throw new SemanticException(e);
}
}
Aggregations