use of org.apache.hudi.exception.HoodieIOException in project hudi by apache.
the class HoodieTestDataGenerator method generateUpdatesForAllRecords.
/**
* Generate update for each record in the dataset.
* @param instantTime
* @return
* @throws IOException
*/
public List<HoodieRecord> generateUpdatesForAllRecords(String instantTime) {
List<HoodieRecord> updates = new ArrayList<>();
Map<Integer, KeyPartition> existingKeys = existingKeysBySchema.get(TRIP_EXAMPLE_SCHEMA);
existingKeys.values().forEach(kp -> {
try {
HoodieRecord record = generateUpdateRecord(kp.key, instantTime);
updates.add(record);
} catch (IOException ioe) {
throw new HoodieIOException(ioe.getMessage(), ioe);
}
});
return updates;
}
use of org.apache.hudi.exception.HoodieIOException in project hudi by apache.
the class HoodieTestDataGenerator method generateInsertsStream.
/**
* Generates new inserts, uniformly across the partition paths above. It also updates the list of existing keys.
*/
public Stream<HoodieRecord> generateInsertsStream(String instantTime, Integer n, boolean isFlattened, String schemaStr, boolean containsAllPartitions, Supplier<String> partitionPathSupplier, Supplier<String> recordKeySupplier) {
int currSize = getNumExistingKeys(schemaStr);
return IntStream.range(0, n).boxed().map(i -> {
String partitionPath = partitionPathSupplier.get();
if (containsAllPartitions && i < partitionPaths.length) {
partitionPath = partitionPaths[i];
}
HoodieKey key = new HoodieKey(recordKeySupplier.get(), partitionPath);
KeyPartition kp = new KeyPartition();
kp.key = key;
kp.partitionPath = partitionPath;
populateKeysBySchema(schemaStr, currSize + i, kp);
incrementNumExistingKeysBySchema(schemaStr);
try {
return new HoodieAvroRecord(key, generateRandomValueAsPerSchema(schemaStr, key, instantTime, isFlattened));
} catch (IOException e) {
throw new HoodieIOException(e.getMessage(), e);
}
});
}
use of org.apache.hudi.exception.HoodieIOException in project hudi by apache.
the class HoodieTestTable method getMetadataForInstant.
private Option<HoodieCommitMetadata> getMetadataForInstant(String instantTime) {
metaClient = HoodieTableMetaClient.reload(metaClient);
Option<HoodieInstant> hoodieInstant = metaClient.getActiveTimeline().getCommitsTimeline().filterCompletedInstants().filter(i -> i.getTimestamp().equals(instantTime)).firstInstant();
try {
if (hoodieInstant.isPresent()) {
return getCommitMeta(hoodieInstant.get());
} else {
return Option.empty();
}
} catch (IOException io) {
throw new HoodieIOException("Unable to read metadata for instant " + hoodieInstant.get(), io);
}
}
use of org.apache.hudi.exception.HoodieIOException in project hudi by apache.
the class SchemaTestUtil method toRecords.
private static <T extends IndexedRecord> List<T> toRecords(Schema writerSchema, Schema readerSchema, int from, int limit) throws IOException, URISyntaxException {
GenericDatumReader<T> reader = new GenericDatumReader<>(writerSchema, readerSchema);
Path dataPath = initializeSampleDataPath();
try (Stream<String> stream = Files.lines(dataPath)) {
return stream.skip(from).limit(limit).map(s -> {
try {
return reader.read(null, DecoderFactory.get().jsonDecoder(writerSchema, s));
} catch (IOException e) {
throw new HoodieIOException("Could not read data from " + RESOURCE_SAMPLE_DATA, e);
}
}).collect(Collectors.toList());
} catch (IOException e) {
throw new HoodieIOException("Could not read data from " + RESOURCE_SAMPLE_DATA, e);
}
}
use of org.apache.hudi.exception.HoodieIOException in project hudi by apache.
the class HoodieCommonTestHarness method initPath.
/**
* Initializes basePath.
*/
protected void initPath() {
try {
java.nio.file.Path basePath = tempDir.resolve("dataset");
java.nio.file.Files.createDirectories(basePath);
this.basePath = basePath.toString();
} catch (IOException ioe) {
throw new HoodieIOException(ioe.getMessage(), ioe);
}
}
Aggregations