use of org.apache.hudi.exception.HoodieIOException in project hudi by apache.
the class HoodieAvroDataBlock method decompress.
private static String decompress(byte[] bytes) {
InputStream in = new InflaterInputStream(new ByteArrayInputStream(bytes));
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try {
byte[] buffer = new byte[8192];
int len;
while ((len = in.read(buffer)) > 0) {
baos.write(buffer, 0, len);
}
return new String(baos.toByteArray(), StandardCharsets.UTF_8);
} catch (IOException e) {
throw new HoodieIOException("IOException while decompressing text", e);
}
}
use of org.apache.hudi.exception.HoodieIOException in project hudi by apache.
the class HoodieDeleteBlock method getKeysToDelete.
public HoodieKey[] getKeysToDelete() {
try {
if (keysToDelete == null) {
if (!getContent().isPresent() && readBlockLazily) {
// read content from disk
inflate();
}
SizeAwareDataInputStream dis = new SizeAwareDataInputStream(new DataInputStream(new ByteArrayInputStream(getContent().get())));
int version = dis.readInt();
int dataLength = dis.readInt();
byte[] data = new byte[dataLength];
dis.readFully(data);
this.keysToDelete = SerializationUtils.<HoodieKey[]>deserialize(data);
deflate();
}
return keysToDelete;
} catch (IOException io) {
throw new HoodieIOException("Unable to generate keys to delete from block content", io);
}
}
use of org.apache.hudi.exception.HoodieIOException in project hudi by apache.
the class TableOptionProperties method loadFromProperties.
/**
* Read table options map from the given table base path.
*/
public static Map<String, String> loadFromProperties(String basePath, Configuration hadoopConf) {
Path propertiesFilePath = getPropertiesFilePath(basePath);
Map<String, String> options = new HashMap<>();
Properties props = new Properties();
FileSystem fs = FSUtils.getFs(basePath, hadoopConf);
try (FSDataInputStream inputStream = fs.open(propertiesFilePath)) {
props.load(inputStream);
for (final String name : props.stringPropertyNames()) {
options.put(name, props.getProperty(name));
}
} catch (IOException e) {
throw new HoodieIOException(String.format("Could not load table option properties from %s", propertiesFilePath), e);
}
LOG.info(String.format("Loading table option properties from %s success.", propertiesFilePath));
return options;
}
use of org.apache.hudi.exception.HoodieIOException in project hudi by apache.
the class SchemaRegistryProvider method getTargetSchema.
@Override
public Schema getTargetSchema() {
String registryUrl = config.getString(Config.SRC_SCHEMA_REGISTRY_URL_PROP);
String targetRegistryUrl = config.getString(Config.TARGET_SCHEMA_REGISTRY_URL_PROP, registryUrl);
try {
return getSchema(targetRegistryUrl);
} catch (IOException ioe) {
throw new HoodieIOException("Error reading target schema from registry :" + registryUrl, ioe);
}
}
use of org.apache.hudi.exception.HoodieIOException in project hudi by apache.
the class TestOrcBootstrap method generateInputBatch.
private static JavaRDD<HoodieRecord> generateInputBatch(JavaSparkContext jsc, List<Pair<String, List<HoodieFileStatus>>> partitionPaths, Schema writerSchema) {
List<Pair<String, Path>> fullFilePathsWithPartition = partitionPaths.stream().flatMap(p -> p.getValue().stream().map(x -> Pair.of(p.getKey(), FileStatusUtils.toPath(x.getPath())))).collect(Collectors.toList());
return jsc.parallelize(fullFilePathsWithPartition.stream().flatMap(p -> {
try {
Configuration conf = jsc.hadoopConfiguration();
AvroReadSupport.setAvroReadSchema(conf, writerSchema);
Reader orcReader = OrcFile.createReader(p.getValue(), new OrcFile.ReaderOptions(jsc.hadoopConfiguration()));
RecordReader recordReader = orcReader.rows();
TypeDescription orcSchema = orcReader.getSchema();
Schema avroSchema = AvroOrcUtils.createAvroSchemaWithDefaultValue(orcSchema, "test_orc_record", null, true);
Iterator<GenericRecord> recIterator = new OrcReaderIterator(recordReader, avroSchema, orcSchema);
return StreamSupport.stream(Spliterators.spliteratorUnknownSize(recIterator, 0), false).map(gr -> {
try {
String key = gr.get("_row_key").toString();
String pPath = p.getKey();
return new HoodieAvroRecord<>(new HoodieKey(key, pPath), new RawTripTestPayload(gr.toString(), key, pPath, HoodieTestDataGenerator.TRIP_EXAMPLE_SCHEMA));
} catch (IOException e) {
throw new HoodieIOException(e.getMessage(), e);
}
});
} catch (IOException ioe) {
throw new HoodieIOException(ioe.getMessage(), ioe);
}
}).collect(Collectors.toList()));
}
Aggregations