use of org.apache.hudi.timeline.service.TimelineService in project hudi by apache.
the class TestRemoteHoodieTableFileSystemView method getFileSystemView.
protected SyncableFileSystemView getFileSystemView(HoodieTimeline timeline) {
FileSystemViewStorageConfig sConf = FileSystemViewStorageConfig.newBuilder().withStorageType(FileSystemViewStorageType.SPILLABLE_DISK).build();
HoodieMetadataConfig metadataConfig = HoodieMetadataConfig.newBuilder().build();
HoodieCommonConfig commonConfig = HoodieCommonConfig.newBuilder().build();
HoodieLocalEngineContext localEngineContext = new HoodieLocalEngineContext(metaClient.getHadoopConf());
try {
server = new TimelineService(localEngineContext, new Configuration(), TimelineService.Config.builder().serverPort(0).build(), FileSystem.get(new Configuration()), FileSystemViewManager.createViewManager(localEngineContext, metadataConfig, sConf, commonConfig));
server.startService();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
LOG.info("Connecting to Timeline Server :" + server.getServerPort());
view = new RemoteHoodieTableFileSystemView("localhost", server.getServerPort(), metaClient);
return view;
}
use of org.apache.hudi.timeline.service.TimelineService in project hudi by apache.
the class EmbeddedTimelineService method startServer.
public void startServer() throws IOException {
TimelineService.Config.Builder timelineServiceConfBuilder = TimelineService.Config.builder().serverPort(writeConfig.getEmbeddedTimelineServerPort()).numThreads(writeConfig.getEmbeddedTimelineServerThreads()).compress(writeConfig.getEmbeddedTimelineServerCompressOutput()).async(writeConfig.getEmbeddedTimelineServerUseAsync()).refreshTimelineBasedOnLatestCommit(writeConfig.isRefreshTimelineServerBasedOnLatestCommit());
// if timeline-server-based markers are used.
if (writeConfig.getMarkersType() == MarkerType.TIMELINE_SERVER_BASED) {
timelineServiceConfBuilder.enableMarkerRequests(true).markerBatchNumThreads(writeConfig.getMarkersTimelineServerBasedBatchNumThreads()).markerBatchIntervalMs(writeConfig.getMarkersTimelineServerBasedBatchIntervalMs()).markerParallelism(writeConfig.getMarkersDeleteParallelism());
}
server = new TimelineService(context, hadoopConf.newCopy(), timelineServiceConfBuilder.build(), FSUtils.getFs(basePath, hadoopConf.newCopy()), viewManager);
serverPort = server.startService();
LOG.info("Started embedded timeline server at " + hostAddr + ":" + serverPort);
}
use of org.apache.hudi.timeline.service.TimelineService in project hudi by apache.
the class HoodieClientTestUtils method initTimelineService.
/**
* Initializes timeline service based on the write config.
*
* @param context {@link HoodieEngineContext} instance to use.
* @param basePath Base path of the table.
* @param timelineServicePort Port number to use for timeline service.
* @return started {@link TimelineService} instance.
*/
public static TimelineService initTimelineService(HoodieEngineContext context, String basePath, int timelineServicePort) {
try {
HoodieWriteConfig config = HoodieWriteConfig.newBuilder().withPath(basePath).withFileSystemViewConfig(FileSystemViewStorageConfig.newBuilder().withRemoteServerPort(timelineServicePort).build()).build();
TimelineService timelineService = new TimelineService(context, new Configuration(), TimelineService.Config.builder().enableMarkerRequests(true).serverPort(config.getViewStorageConfig().getRemoteViewServerPort()).build(), FileSystem.get(new Configuration()), FileSystemViewManager.createViewManager(context, config.getMetadataConfig(), config.getViewStorageConfig(), config.getCommonConfig()));
timelineService.startService();
LOG.info("Timeline service server port: " + timelineServicePort);
return timelineService;
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
use of org.apache.hudi.timeline.service.TimelineService in project hudi by apache.
the class TestTimelineServerBasedWriteMarkers method setup.
@BeforeEach
public void setup() throws IOException {
initPath();
initMetaClient();
this.jsc = new JavaSparkContext(HoodieClientTestUtils.getSparkConfForTest(TestTimelineServerBasedWriteMarkers.class.getName()));
this.context = new HoodieSparkEngineContext(jsc);
this.fs = FSUtils.getFs(metaClient.getBasePath(), metaClient.getHadoopConf());
this.markerFolderPath = new Path(metaClient.getMarkerFolderPath("000"));
FileSystemViewStorageConfig storageConf = FileSystemViewStorageConfig.newBuilder().withStorageType(FileSystemViewStorageType.SPILLABLE_DISK).build();
HoodieMetadataConfig metadataConfig = HoodieMetadataConfig.newBuilder().build();
HoodieLocalEngineContext localEngineContext = new HoodieLocalEngineContext(metaClient.getHadoopConf());
try {
timelineService = new TimelineService(localEngineContext, new Configuration(), TimelineService.Config.builder().serverPort(0).enableMarkerRequests(true).build(), FileSystem.get(new Configuration()), FileSystemViewManager.createViewManager(localEngineContext, metadataConfig, storageConf, HoodieCommonConfig.newBuilder().build()));
timelineService.startService();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
this.writeMarkers = new TimelineServerBasedWriteMarkers(metaClient.getBasePath(), markerFolderPath.toString(), "000", "localhost", timelineService.getServerPort(), 300);
}
Aggregations