use of org.apache.hudi.client.BaseHoodieWriteClient in project hudi by apache.
the class TestRollbacksCommand method init.
@BeforeEach
public void init() throws Exception {
String tableName = tableName();
String tablePath = tablePath(tableName);
new TableCommand().createTable(tablePath, tableName, HoodieTableType.MERGE_ON_READ.name(), "", TimelineLayoutVersion.VERSION_1, "org.apache.hudi.common.model.HoodieAvroPayload");
HoodieTableMetaClient metaClient = HoodieTableMetaClient.reload(HoodieCLI.getTableMetaClient());
// Create some commits files and base files
Map<String, String> partitionAndFileId = new HashMap<String, String>() {
{
put(DEFAULT_FIRST_PARTITION_PATH, "file-1");
put(DEFAULT_SECOND_PARTITION_PATH, "file-2");
put(DEFAULT_THIRD_PARTITION_PATH, "file-3");
}
};
HoodieTestTable.of(metaClient).withPartitionMetaFiles(DEFAULT_PARTITION_PATHS).addCommit("100").withBaseFilesInPartitions(partitionAndFileId).addCommit("101").withBaseFilesInPartitions(partitionAndFileId).addInflightCommit("102").withBaseFilesInPartitions(partitionAndFileId);
// generate two rollback
HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(tablePath).withRollbackUsingMarkers(false).withIndexConfig(HoodieIndexConfig.newBuilder().withIndexType(HoodieIndex.IndexType.INMEMORY).build()).build();
try (BaseHoodieWriteClient client = new SparkRDDWriteClient(context(), config)) {
// Rollback inflight commit3 and commit2
client.rollback("102");
client.rollback("101");
}
}
Aggregations