use of io.druid.indexer.updater.MetadataStorageUpdaterJobSpec in project druid by druid-io.
the class HadoopIngestionSpecTest method testDbUpdaterJobSpec.
@Test
public void testDbUpdaterJobSpec() throws Exception {
final HadoopIngestionSpec schema;
schema = jsonReadWriteRead("{\n" + " \"ioConfig\": {\n" + " \"type\": \"hadoop\",\n" + " \"metadataUpdateSpec\": {\n" + " \"type\": \"db\",\n" + " \"connectURI\": \"jdbc:mysql://localhost/druid\",\n" + " \"user\": \"rofl\",\n" + " \"password\": \"p4ssw0rd\",\n" + " \"segmentTable\": \"segments\"\n" + " }\n" + " }\n" + "}", HadoopIngestionSpec.class);
final MetadataStorageUpdaterJobSpec spec = schema.getIOConfig().getMetadataUpdateSpec();
final MetadataStorageConnectorConfig connectorConfig = spec.get();
Assert.assertEquals("segments", spec.getSegmentTable());
Assert.assertEquals("jdbc:mysql://localhost/druid", connectorConfig.getConnectURI());
Assert.assertEquals("rofl", connectorConfig.getUser());
Assert.assertEquals("p4ssw0rd", connectorConfig.getPassword());
}
use of io.druid.indexer.updater.MetadataStorageUpdaterJobSpec in project druid by druid-io.
the class CliInternalHadoopIndexer method run.
@Override
public void run() {
try {
Injector injector = makeInjector();
config = getHadoopDruidIndexerConfig();
MetadataStorageUpdaterJobSpec metadataSpec = config.getSchema().getIOConfig().getMetadataUpdateSpec();
// override metadata storage type based on HadoopIOConfig
Preconditions.checkNotNull(metadataSpec.getType(), "type in metadataUpdateSpec must not be null");
injector.getInstance(Properties.class).setProperty("druid.metadata.storage.type", metadataSpec.getType());
config = HadoopDruidIndexerConfig.fromSpec(HadoopIngestionSpec.updateSegmentListIfDatasourcePathSpecIsUsed(config.getSchema(), HadoopDruidIndexerConfig.JSON_MAPPER, new MetadataStoreBasedUsedSegmentLister(injector.getInstance(IndexerMetadataStorageCoordinator.class))));
List<Jobby> jobs = Lists.newArrayList();
jobs.add(new HadoopDruidDetermineConfigurationJob(config));
jobs.add(new HadoopDruidIndexerJob(config, injector.getInstance(MetadataStorageUpdaterJobHandler.class)));
JobHelper.runJobs(jobs, config);
} catch (Exception e) {
throw Throwables.propagate(e);
}
}
Aggregations