use of org.apache.druid.metadata.SQLMetadataConnector in project druid by druid-io.
the class ExportMetadata method run.
@Override
public void run() {
InjectableValues.Std injectableValues = new InjectableValues.Std();
injectableValues.addValue(ObjectMapper.class, JSON_MAPPER);
injectableValues.addValue(PruneSpecsHolder.class, PruneSpecsHolder.DEFAULT);
JSON_MAPPER.setInjectableValues(injectableValues);
if (hadoopStorageDirectory != null && newLocalPath != null) {
throw new IllegalArgumentException("Only one of s3Bucket, hadoopStorageDirectory, and newLocalPath can be set.");
}
if (s3Bucket != null && (hadoopStorageDirectory != null || newLocalPath != null)) {
throw new IllegalArgumentException("Only one of s3Bucket, hadoopStorageDirectory, and newLocalPath can be set.");
}
if (s3Bucket != null && s3baseKey == null) {
throw new IllegalArgumentException("s3baseKey must be set if s3Bucket is set.");
}
final Injector injector = makeInjector();
SQLMetadataConnector dbConnector = injector.getInstance(SQLMetadataConnector.class);
MetadataStorageTablesConfig metadataStorageTablesConfig = injector.getInstance(MetadataStorageTablesConfig.class);
// We export a raw CSV first, and then apply some conversions for easier imports:
// Boolean strings are rewritten as 1 and 0
// hexadecimal BLOB columns are rewritten with rewriteHexPayloadAsEscapedJson()
log.info("Exporting datasource table: " + metadataStorageTablesConfig.getDataSourceTable());
exportTable(dbConnector, metadataStorageTablesConfig.getDataSourceTable(), true);
rewriteDatasourceExport(metadataStorageTablesConfig.getDataSourceTable());
log.info("Exporting segments table: " + metadataStorageTablesConfig.getSegmentsTable());
exportTable(dbConnector, metadataStorageTablesConfig.getSegmentsTable(), true);
rewriteSegmentsExport(metadataStorageTablesConfig.getSegmentsTable());
log.info("Exporting rules table: " + metadataStorageTablesConfig.getRulesTable());
exportTable(dbConnector, metadataStorageTablesConfig.getRulesTable(), true);
rewriteRulesExport(metadataStorageTablesConfig.getRulesTable());
log.info("Exporting config table: " + metadataStorageTablesConfig.getConfigTable());
exportTable(dbConnector, metadataStorageTablesConfig.getConfigTable(), true);
rewriteConfigExport(metadataStorageTablesConfig.getConfigTable());
log.info("Exporting supervisor table: " + metadataStorageTablesConfig.getSupervisorTable());
exportTable(dbConnector, metadataStorageTablesConfig.getSupervisorTable(), true);
rewriteSupervisorExport(metadataStorageTablesConfig.getSupervisorTable());
}
use of org.apache.druid.metadata.SQLMetadataConnector in project druid by druid-io.
the class IngestionTestBase method setUpIngestionTestBase.
@Before
public void setUpIngestionTestBase() throws IOException {
EmittingLogger.registerEmitter(new NoopServiceEmitter());
temporaryFolder.create();
final SQLMetadataConnector connector = derbyConnectorRule.getConnector();
connector.createTaskTables();
connector.createSegmentTable();
taskStorage = new HeapMemoryTaskStorage(new TaskStorageConfig(null));
storageCoordinator = new IndexerSQLMetadataStorageCoordinator(objectMapper, derbyConnectorRule.metadataTablesConfigSupplier().get(), derbyConnectorRule.getConnector());
segmentsMetadataManager = new SqlSegmentsMetadataManager(objectMapper, SegmentsMetadataManagerConfig::new, derbyConnectorRule.metadataTablesConfigSupplier(), derbyConnectorRule.getConnector());
lockbox = new TaskLockbox(taskStorage, storageCoordinator);
segmentCacheManagerFactory = new SegmentCacheManagerFactory(getObjectMapper());
}
Aggregations