use of org.apache.hudi.io.HoodieCreateHandle in project hudi by apache.
the class TestCopyOnWriteActionExecutor method testMakeNewPath.
@Test
public void testMakeNewPath() {
String fileName = UUID.randomUUID().toString();
String partitionPath = "2016/05/04";
String instantTime = makeNewCommitTime();
HoodieWriteConfig config = makeHoodieClientConfig();
metaClient = HoodieTableMetaClient.reload(metaClient);
HoodieTable table = HoodieSparkTable.create(config, context, metaClient);
Pair<Path, String> newPathWithWriteToken = jsc.parallelize(Arrays.asList(1)).map(x -> {
HoodieRecord record = mock(HoodieRecord.class);
when(record.getPartitionPath()).thenReturn(partitionPath);
String writeToken = FSUtils.makeWriteToken(TaskContext.getPartitionId(), TaskContext.get().stageId(), TaskContext.get().taskAttemptId());
HoodieCreateHandle io = new HoodieCreateHandle(config, instantTime, table, partitionPath, fileName, supplier);
return Pair.of(io.makeNewPath(record.getPartitionPath()), writeToken);
}).collect().get(0);
assertEquals(newPathWithWriteToken.getKey().toString(), Paths.get(this.basePath, partitionPath, FSUtils.makeDataFileName(instantTime, newPathWithWriteToken.getRight(), fileName)).toString());
}
use of org.apache.hudi.io.HoodieCreateHandle in project hudi by apache.
the class TestUpdateSchemaEvolution method prepareFirstRecordCommit.
private WriteStatus prepareFirstRecordCommit(List<String> recordsStrs) throws IOException {
// Create a bunch of records with an old version of schema
final HoodieWriteConfig config = makeHoodieClientConfig("/exampleSchema.avsc");
final HoodieSparkTable table = HoodieSparkTable.create(config, context);
final List<WriteStatus> statuses = jsc.parallelize(Arrays.asList(1)).map(x -> {
List<HoodieRecord> insertRecords = new ArrayList<>();
for (String recordStr : recordsStrs) {
RawTripTestPayload rowChange = new RawTripTestPayload(recordStr);
insertRecords.add(new HoodieAvroRecord(new HoodieKey(rowChange.getRowKey(), rowChange.getPartitionPath()), rowChange));
}
Map<String, HoodieRecord> insertRecordMap = insertRecords.stream().collect(Collectors.toMap(r -> r.getRecordKey(), Function.identity()));
HoodieCreateHandle<?, ?, ?, ?> createHandle = new HoodieCreateHandle(config, "100", table, insertRecords.get(0).getPartitionPath(), "f1-0", insertRecordMap, supplier);
createHandle.write();
return createHandle.close().get(0);
}).collect();
final Path commitFile = new Path(config.getBasePath() + "/.hoodie/" + HoodieTimeline.makeCommitFileName("100"));
FSUtils.getFs(basePath, HoodieTestUtils.getDefaultHadoopConf()).create(commitFile);
return statuses.get(0);
}
use of org.apache.hudi.io.HoodieCreateHandle in project hudi by apache.
the class TestJavaCopyOnWriteActionExecutor method testMakeNewPath.
@Test
public void testMakeNewPath() {
String fileName = UUID.randomUUID().toString();
String partitionPath = "2016/05/04";
String instantTime = makeNewCommitTime();
HoodieWriteConfig config = makeHoodieClientConfig();
metaClient = HoodieTableMetaClient.reload(metaClient);
HoodieTable table = HoodieJavaTable.create(config, context, metaClient);
Pair<Path, String> newPathWithWriteToken = Arrays.asList(1).stream().map(x -> {
HoodieRecord record = mock(HoodieRecord.class);
when(record.getPartitionPath()).thenReturn(partitionPath);
String writeToken = FSUtils.makeWriteToken(context.getTaskContextSupplier().getPartitionIdSupplier().get(), context.getTaskContextSupplier().getStageIdSupplier().get(), context.getTaskContextSupplier().getAttemptIdSupplier().get());
HoodieCreateHandle io = new HoodieCreateHandle(config, instantTime, table, partitionPath, fileName, context.getTaskContextSupplier());
return Pair.of(io.makeNewPath(record.getPartitionPath()), writeToken);
}).collect(Collectors.toList()).get(0);
assertEquals(newPathWithWriteToken.getKey().toString(), Paths.get(this.basePath, partitionPath, FSUtils.makeDataFileName(instantTime, newPathWithWriteToken.getRight(), fileName)).toString());
}
Aggregations