use of org.apache.druid.indexer.updater.MetadataStorageUpdaterJobSpec in project druid by druid-io.
the class CliInternalHadoopIndexer method run.
@Override
public void run() {
try {
Injector injector = makeInjector();
config = getHadoopDruidIndexerConfig();
MetadataStorageUpdaterJobSpec metadataSpec = config.getSchema().getIOConfig().getMetadataUpdateSpec();
// override metadata storage type based on HadoopIOConfig
Preconditions.checkNotNull(metadataSpec.getType(), "type in metadataUpdateSpec must not be null");
injector.getInstance(Properties.class).setProperty("druid.metadata.storage.type", metadataSpec.getType());
HadoopIngestionSpec.updateSegmentListIfDatasourcePathSpecIsUsed(config.getSchema(), HadoopDruidIndexerConfig.JSON_MAPPER, new MetadataStoreBasedUsedSegmentsRetriever(injector.getInstance(IndexerMetadataStorageCoordinator.class)));
List<Jobby> jobs = new ArrayList<>();
HadoopDruidIndexerJob indexerJob = new HadoopDruidIndexerJob(config, injector.getInstance(MetadataStorageUpdaterJobHandler.class));
jobs.add(new HadoopDruidDetermineConfigurationJob(config));
jobs.add(indexerJob);
boolean jobsSucceeded = JobHelper.runJobs(jobs);
JobHelper.renameIndexFilesForSegments(config.getSchema(), indexerJob.getPublishedSegmentAndIndexZipFilePaths());
JobHelper.maybeDeleteIntermediatePath(jobsSucceeded, config.getSchema());
} catch (Exception e) {
throw new RuntimeException(e);
}
}
use of org.apache.druid.indexer.updater.MetadataStorageUpdaterJobSpec in project druid by druid-io.
the class HadoopIngestionSpecTest method testDbUpdaterJobSpec.
@Test
public void testDbUpdaterJobSpec() {
final HadoopIngestionSpec schema;
schema = jsonReadWriteRead("{\n" + " \"ioConfig\": {\n" + " \"type\": \"hadoop\",\n" + " \"metadataUpdateSpec\": {\n" + " \"type\": \"db\",\n" + " \"connectURI\": \"jdbc:mysql://localhost/druid\",\n" + " \"user\": \"rofl\",\n" + " \"password\": \"p4ssw0rd\",\n" + " \"segmentTable\": \"segments\"\n" + " }\n" + " }\n" + "}", HadoopIngestionSpec.class);
final MetadataStorageUpdaterJobSpec spec = schema.getIOConfig().getMetadataUpdateSpec();
final MetadataStorageConnectorConfig connectorConfig = spec.get();
Assert.assertEquals("segments", spec.getSegmentTable());
Assert.assertEquals("jdbc:mysql://localhost/druid", connectorConfig.getConnectURI());
Assert.assertEquals("rofl", connectorConfig.getUser());
Assert.assertEquals("p4ssw0rd", connectorConfig.getPassword());
}
use of org.apache.druid.indexer.updater.MetadataStorageUpdaterJobSpec in project druid by druid-io.
the class CliInternalHadoopIndexer method getModules.
@Override
protected List<? extends Module> getModules() {
return ImmutableList.of(binder -> {
binder.bindConstant().annotatedWith(Names.named("serviceName")).to("druid/internal-hadoop-indexer");
binder.bindConstant().annotatedWith(Names.named("servicePort")).to(0);
binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(-1);
// bind metadata storage config based on HadoopIOConfig
MetadataStorageUpdaterJobSpec metadataSpec = getHadoopDruidIndexerConfig().getSchema().getIOConfig().getMetadataUpdateSpec();
binder.bind(new TypeLiteral<Supplier<MetadataStorageConnectorConfig>>() {
}).toInstance(metadataSpec);
binder.bind(MetadataStorageTablesConfig.class).toInstance(metadataSpec.getMetadataStorageTablesConfig());
binder.bind(IndexerMetadataStorageCoordinator.class).to(IndexerSQLMetadataStorageCoordinator.class).in(LazySingleton.class);
});
}
Aggregations