use of org.apache.hudi.client.common.HoodieJavaEngineContext in project rocketmq-externals by apache.
the class Updater method schemaEvolution.
private void schemaEvolution(Schema newSchema, Schema oldSchema) {
if (null != oldSchema && oldSchema.toString().equals(newSchema.toString())) {
return;
}
log.info("Schema changed. New schema is " + newSchema.toString());
this.cfg = HoodieWriteConfig.newBuilder().withPath(hudiConnectConfig.getTablePath()).withSchema(this.hudiConnectConfig.schema.toString()).withEngineType(EngineType.JAVA).withParallelism(hudiConnectConfig.getInsertShuffleParallelism(), hudiConnectConfig.getUpsertShuffleParallelism()).withDeleteParallelism(hudiConnectConfig.getDeleteParallelism()).forTable(hudiConnectConfig.getTableName()).withIndexConfig(HoodieIndexConfig.newBuilder().withIndexType(HoodieIndex.IndexType.INMEMORY).build()).withCompactionConfig(HoodieCompactionConfig.newBuilder().archiveCommitsWith(20, 30).build()).build();
this.hudiWriteClient.close();
Configuration hadoopConf = new Configuration();
hadoopConf.setBoolean(AvroReadSupport.AVRO_COMPATIBILITY, false);
hadoopConf.set(AvroReadSupport.AVRO_DATA_SUPPLIER, GenericDataSupplier.class.getName());
this.hudiWriteClient = new HoodieJavaWriteClient<HoodieAvroPayload>(new HoodieJavaEngineContext(hadoopConf), cfg);
}
Aggregations