use of org.apache.druid.metadata.TestDerbyConnector in project druid by druid-io.
the class TaskActionTestKit method before.
@Override
public void before() {
taskStorage = new HeapMemoryTaskStorage(new TaskStorageConfig(new Period("PT24H")));
testDerbyConnector = new TestDerbyConnector(Suppliers.ofInstance(new MetadataStorageConnectorConfig()), Suppliers.ofInstance(metadataStorageTablesConfig));
final ObjectMapper objectMapper = new TestUtils().getTestObjectMapper();
metadataStorageCoordinator = new IndexerSQLMetadataStorageCoordinator(objectMapper, metadataStorageTablesConfig, testDerbyConnector) {
@Override
public int getSqlMetadataMaxRetry() {
return 2;
}
};
taskLockbox = new TaskLockbox(taskStorage, metadataStorageCoordinator);
segmentsMetadataManager = new SqlSegmentsMetadataManager(objectMapper, Suppliers.ofInstance(new SegmentsMetadataManagerConfig()), Suppliers.ofInstance(metadataStorageTablesConfig), testDerbyConnector);
taskActionToolbox = new TaskActionToolbox(taskLockbox, taskStorage, metadataStorageCoordinator, new NoopServiceEmitter(), EasyMock.createMock(SupervisorManager.class));
testDerbyConnector.createDataSourceTable();
testDerbyConnector.createPendingSegmentsTable();
testDerbyConnector.createSegmentTable();
testDerbyConnector.createRulesTable();
testDerbyConnector.createConfigTable();
testDerbyConnector.createTaskTables();
testDerbyConnector.createAuditTable();
}
use of org.apache.druid.metadata.TestDerbyConnector in project druid by druid-io.
the class AppenderatorDriverRealtimeIndexTaskTest method setUp.
@Before
public void setUp() throws IOException {
EmittingLogger.registerEmitter(EMITTER);
EMITTER.start();
taskExec = MoreExecutors.listeningDecorator(Execs.singleThreaded("realtime-index-task-test-%d"));
now = DateTimes.nowUtc();
TestDerbyConnector derbyConnector = derbyConnectorRule.getConnector();
derbyConnector.createDataSourceTable();
derbyConnector.createTaskTables();
derbyConnector.createSegmentTable();
derbyConnector.createPendingSegmentsTable();
baseDir = tempFolder.newFolder();
reportsFile = File.createTempFile("KafkaIndexTaskTestReports-" + System.currentTimeMillis(), "json");
makeToolboxFactory(baseDir);
}
use of org.apache.druid.metadata.TestDerbyConnector in project druid by druid-io.
the class TaskLifecycleTest method setUpTaskStorage.
private TaskStorage setUpTaskStorage() {
Preconditions.checkNotNull(mapper);
Preconditions.checkNotNull(derbyConnectorRule);
TaskStorage taskStorage;
switch(taskStorageType) {
case HEAP_TASK_STORAGE:
{
taskStorage = new HeapMemoryTaskStorage(new TaskStorageConfig(null) {
});
break;
}
case METADATA_TASK_STORAGE:
{
TestDerbyConnector testDerbyConnector = derbyConnectorRule.getConnector();
mapper.registerSubtypes(new NamedType(MockFirehoseFactory.class, "mockFirehoseFactory"), new NamedType(MockInputSource.class, "mockInputSource"), new NamedType(NoopInputFormat.class, "noopInputFormat"));
testDerbyConnector.createTaskTables();
testDerbyConnector.createSegmentTable();
taskStorage = new MetadataTaskStorage(testDerbyConnector, new TaskStorageConfig(null), new DerbyMetadataStorageActionHandlerFactory(testDerbyConnector, derbyConnectorRule.metadataTablesConfigSupplier().get(), mapper));
break;
}
default:
{
throw new RE("Unknown task storage type [%s]", taskStorageType);
}
}
tsqa = new TaskStorageQueryAdapter(taskStorage, taskLockbox);
return taskStorage;
}
use of org.apache.druid.metadata.TestDerbyConnector in project druid by druid-io.
the class TaskLockboxTest method setup.
@Before
public void setup() {
objectMapper = TestHelper.makeJsonMapper();
objectMapper.registerSubtypes(NumberedShardSpec.class, HashBasedNumberedShardSpec.class);
final TestDerbyConnector derbyConnector = derby.getConnector();
derbyConnector.createTaskTables();
derbyConnector.createPendingSegmentsTable();
derbyConnector.createSegmentTable();
final MetadataStorageTablesConfig tablesConfig = derby.metadataTablesConfigSupplier().get();
taskStorage = new MetadataTaskStorage(derbyConnector, new TaskStorageConfig(null), new DerbyMetadataStorageActionHandlerFactory(derbyConnector, tablesConfig, objectMapper));
ServiceEmitter emitter = EasyMock.createMock(ServiceEmitter.class);
EmittingLogger.registerEmitter(emitter);
EasyMock.replay(emitter);
metadataStorageCoordinator = new IndexerSQLMetadataStorageCoordinator(objectMapper, tablesConfig, derbyConnector);
lockbox = new TaskLockbox(taskStorage, metadataStorageCoordinator);
}
use of org.apache.druid.metadata.TestDerbyConnector in project druid by druid-io.
the class DatasourceOptimizerTest method setUp.
@Before
public void setUp() throws Exception {
TestDerbyConnector derbyConnector = derbyConnectorRule.getConnector();
derbyConnector.createDataSourceTable();
derbyConnector.createSegmentTable();
MaterializedViewConfig viewConfig = new MaterializedViewConfig();
jsonMapper = TestHelper.makeJsonMapper();
jsonMapper.registerSubtypes(new NamedType(DerivativeDataSourceMetadata.class, "view"));
metadataStorageCoordinator = EasyMock.createMock(IndexerSQLMetadataStorageCoordinator.class);
derivativesManager = new DerivativeDataSourceManager(viewConfig, derbyConnectorRule.metadataTablesConfigSupplier(), jsonMapper, derbyConnector);
metadataStorageCoordinator = new IndexerSQLMetadataStorageCoordinator(jsonMapper, derbyConnectorRule.metadataTablesConfigSupplier().get(), derbyConnector);
setupServerAndCurator();
curator.start();
curator.blockUntilConnected();
zkPathsConfig = new ZkPathsConfig();
setupViews();
druidServer = new DruidServer("localhost:1234", "localhost:1234", null, 10000000L, ServerType.HISTORICAL, "default_tier", 0);
setupZNodeForServer(druidServer, new ZkPathsConfig(), jsonMapper);
optimizer = new DataSourceOptimizer(brokerServerView);
}
Aggregations