use of org.apache.hudi.exception.HoodieIOException in project hudi by apache.
the class HoodieHFileReader method getRecordIterator.
public ClosableIterator<R> getRecordIterator(List<String> keys, Schema schema) throws IOException {
this.schema = schema;
reader.loadFileInfo();
Iterator<String> iterator = keys.iterator();
return new ClosableIterator<R>() {
private R next;
@Override
public void close() {
}
@Override
public boolean hasNext() {
try {
while (iterator.hasNext()) {
Option<R> value = getRecordByKey(iterator.next(), schema);
if (value.isPresent()) {
next = value.get();
return true;
}
}
return false;
} catch (IOException e) {
throw new HoodieIOException("unable to read next record from hfile ", e);
}
}
@Override
public R next() {
return next;
}
};
}
use of org.apache.hudi.exception.HoodieIOException in project hudi by apache.
the class HashID method getMD5Hash.
private static byte[] getMD5Hash(final byte[] message) throws HoodieIOException {
try {
MessageDigest messageDigest = MessageDigest.getInstance(MD5_ALGORITHM_NAME);
messageDigest.update(message);
return messageDigest.digest();
} catch (NoSuchAlgorithmException e) {
throw new HoodieIOException("Failed to create MD5 Hash: " + e);
}
}
use of org.apache.hudi.exception.HoodieIOException in project hudi by apache.
the class HoodieCopyOnWriteTableInputFormat method makeExternalFileSplit.
private BootstrapBaseFileSplit makeExternalFileSplit(PathWithBootstrapFileStatus file, FileSplit split) {
try {
LOG.info("Making external data split for " + file);
FileStatus externalFileStatus = file.getBootstrapFileStatus();
FileSplit externalFileSplit = makeSplit(externalFileStatus.getPath(), 0, externalFileStatus.getLen(), new String[0], new String[0]);
return new BootstrapBaseFileSplit(split, externalFileSplit);
} catch (IOException e) {
throw new HoodieIOException(e.getMessage(), e);
}
}
use of org.apache.hudi.exception.HoodieIOException in project hudi by apache.
the class TestHoodieRepairTool method cleanUpDanglingDataFilesInFS.
private void cleanUpDanglingDataFilesInFS() {
FileSystem fs = metaClient.getFs();
DANGLING_DATA_FILE_LIST.forEach(relativeFilePath -> {
Path path = new Path(basePath, relativeFilePath);
try {
if (fs.exists(path)) {
fs.delete(path, false);
}
} catch (IOException e) {
throw new HoodieIOException("Unable to delete file: " + path);
}
});
}
use of org.apache.hudi.exception.HoodieIOException in project hudi by apache.
the class AbstractBaseTestSource method initDataGen.
public static void initDataGen(TypedProperties props, int partition) {
try {
boolean useRocksForTestDataGenKeys = props.getBoolean(SourceConfigs.USE_ROCKSDB_FOR_TEST_DATAGEN_KEYS, SourceConfigs.DEFAULT_USE_ROCKSDB_FOR_TEST_DATAGEN_KEYS);
String baseStoreDir = props.getString(SourceConfigs.ROCKSDB_BASE_DIR_FOR_TEST_DATAGEN_KEYS, File.createTempFile("test_data_gen", ".keys").getParent()) + "/" + partition;
LOG.info("useRocksForTestDataGenKeys=" + useRocksForTestDataGenKeys + ", BaseStoreDir=" + baseStoreDir);
dataGeneratorMap.put(partition, new HoodieTestDataGenerator(HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS, useRocksForTestDataGenKeys ? new RocksDBBasedMap<>(baseStoreDir) : new HashMap<>()));
} catch (IOException e) {
throw new HoodieIOException(e.getMessage(), e);
}
}
Aggregations