use of io.druid.metadata.MetadataStorageConnectorConfig in project druid by druid-io.
the class JdbcDataFetcherTest method testSerDesr.
@Test
public void testSerDesr() throws IOException {
JdbcDataFetcher jdbcDataFetcher = new JdbcDataFetcher(new MetadataStorageConnectorConfig(), "table", "keyColumn", "ValueColumn", 100);
DefaultObjectMapper mapper = new DefaultObjectMapper();
String jdbcDataFetcherSer = mapper.writeValueAsString(jdbcDataFetcher);
Assert.assertEquals(jdbcDataFetcher, mapper.reader(DataFetcher.class).readValue(jdbcDataFetcherSer));
}
use of io.druid.metadata.MetadataStorageConnectorConfig in project druid by druid-io.
the class TaskActionTestKit method before.
@Override
public void before() {
taskStorage = new HeapMemoryTaskStorage(new TaskStorageConfig(new Period("PT24H")));
taskLockbox = new TaskLockbox(taskStorage);
testDerbyConnector = new TestDerbyConnector(Suppliers.ofInstance(new MetadataStorageConnectorConfig()), Suppliers.ofInstance(metadataStorageTablesConfig));
metadataStorageCoordinator = new IndexerSQLMetadataStorageCoordinator(new TestUtils().getTestObjectMapper(), metadataStorageTablesConfig, testDerbyConnector);
taskActionToolbox = new TaskActionToolbox(taskLockbox, metadataStorageCoordinator, new NoopServiceEmitter(), EasyMock.createMock(SupervisorManager.class));
testDerbyConnector.createDataSourceTable();
testDerbyConnector.createPendingSegmentsTable();
testDerbyConnector.createSegmentTable();
testDerbyConnector.createRulesTable();
testDerbyConnector.createConfigTable();
testDerbyConnector.createTaskTables();
testDerbyConnector.createAuditTable();
}
use of io.druid.metadata.MetadataStorageConnectorConfig in project druid by druid-io.
the class HadoopIngestionSpecTest method testDbUpdaterJobSpec.
@Test
public void testDbUpdaterJobSpec() throws Exception {
final HadoopIngestionSpec schema;
schema = jsonReadWriteRead("{\n" + " \"ioConfig\": {\n" + " \"type\": \"hadoop\",\n" + " \"metadataUpdateSpec\": {\n" + " \"type\": \"db\",\n" + " \"connectURI\": \"jdbc:mysql://localhost/druid\",\n" + " \"user\": \"rofl\",\n" + " \"password\": \"p4ssw0rd\",\n" + " \"segmentTable\": \"segments\"\n" + " }\n" + " }\n" + "}", HadoopIngestionSpec.class);
final MetadataStorageUpdaterJobSpec spec = schema.getIOConfig().getMetadataUpdateSpec();
final MetadataStorageConnectorConfig connectorConfig = spec.get();
Assert.assertEquals("segments", spec.getSegmentTable());
Assert.assertEquals("jdbc:mysql://localhost/druid", connectorConfig.getConnectURI());
Assert.assertEquals("rofl", connectorConfig.getUser());
Assert.assertEquals("p4ssw0rd", connectorConfig.getPassword());
}
use of io.druid.metadata.MetadataStorageConnectorConfig in project hive by apache.
the class DruidStorageHandler method getConnector.
private SQLMetadataConnector getConnector() {
if (connector != null) {
return connector;
}
final String dbType = HiveConf.getVar(getConf(), HiveConf.ConfVars.DRUID_METADATA_DB_TYPE);
final String username = HiveConf.getVar(getConf(), HiveConf.ConfVars.DRUID_METADATA_DB_USERNAME);
final String password = HiveConf.getVar(getConf(), HiveConf.ConfVars.DRUID_METADATA_DB_PASSWORD);
final String uri = HiveConf.getVar(getConf(), HiveConf.ConfVars.DRUID_METADATA_DB_URI);
final Supplier<MetadataStorageConnectorConfig> storageConnectorConfigSupplier = Suppliers.<MetadataStorageConnectorConfig>ofInstance(new MetadataStorageConnectorConfig() {
@Override
public String getConnectURI() {
return uri;
}
@Override
public String getUser() {
return Strings.emptyToNull(username);
}
@Override
public String getPassword() {
return Strings.emptyToNull(password);
}
});
if (dbType.equals("mysql")) {
connector = new MySQLConnector(storageConnectorConfigSupplier, Suppliers.ofInstance(getDruidMetadataStorageTablesConfig()));
} else if (dbType.equals("postgresql")) {
connector = new PostgreSQLConnector(storageConnectorConfigSupplier, Suppliers.ofInstance(getDruidMetadataStorageTablesConfig()));
} else if (dbType.equals("derby")) {
connector = new DerbyConnector(new DerbyMetadataStorage(storageConnectorConfigSupplier.get()), storageConnectorConfigSupplier, Suppliers.ofInstance(getDruidMetadataStorageTablesConfig()));
} else {
throw new IllegalStateException(String.format("Unknown metadata storage type [%s]", dbType));
}
return connector;
}
Aggregations