use of org.apache.druid.data.input.impl.StringDimensionSchema in project druid by druid-io.
the class MaterializedViewSupervisorTest method setUp.
@Before
public void setUp() {
TestDerbyConnector derbyConnector = derbyConnectorRule.getConnector();
derbyConnector.createDataSourceTable();
derbyConnector.createSegmentTable();
taskStorage = EasyMock.createMock(TaskStorage.class);
taskMaster = EasyMock.createMock(TaskMaster.class);
indexerMetadataStorageCoordinator = new IndexerSQLMetadataStorageCoordinator(objectMapper, derbyConnectorRule.metadataTablesConfigSupplier().get(), derbyConnector);
metadataSupervisorManager = EasyMock.createMock(MetadataSupervisorManager.class);
sqlSegmentsMetadataManager = EasyMock.createMock(SqlSegmentsMetadataManager.class);
taskQueue = EasyMock.createMock(TaskQueue.class);
taskQueue.start();
objectMapper.registerSubtypes(new NamedType(HashBasedNumberedShardSpec.class, "hashed"));
spec = new MaterializedViewSupervisorSpec("base", new DimensionsSpec(Collections.singletonList(new StringDimensionSchema("dim"))), new AggregatorFactory[] { new LongSumAggregatorFactory("m1", "m1") }, HadoopTuningConfig.makeDefaultTuningConfig(), null, null, null, null, null, false, objectMapper, taskMaster, taskStorage, metadataSupervisorManager, sqlSegmentsMetadataManager, indexerMetadataStorageCoordinator, new MaterializedViewTaskConfig(), EasyMock.createMock(AuthorizerMapper.class), EasyMock.createMock(ChatHandlerProvider.class), new SupervisorStateManagerConfig());
derivativeDatasourceName = spec.getDataSourceName();
supervisor = (MaterializedViewSupervisor) spec.createSupervisor();
}
use of org.apache.druid.data.input.impl.StringDimensionSchema in project druid by druid-io.
the class MaterializedViewSupervisorTest method testSuspendedDoesntRun.
@Test
public void testSuspendedDoesntRun() {
MaterializedViewSupervisorSpec suspended = new MaterializedViewSupervisorSpec("base", new DimensionsSpec(Collections.singletonList(new StringDimensionSchema("dim"))), new AggregatorFactory[] { new LongSumAggregatorFactory("m1", "m1") }, HadoopTuningConfig.makeDefaultTuningConfig(), null, null, null, null, null, true, objectMapper, taskMaster, taskStorage, metadataSupervisorManager, sqlSegmentsMetadataManager, indexerMetadataStorageCoordinator, new MaterializedViewTaskConfig(), EasyMock.createMock(AuthorizerMapper.class), EasyMock.createMock(ChatHandlerProvider.class), new SupervisorStateManagerConfig());
MaterializedViewSupervisor supervisor = (MaterializedViewSupervisor) suspended.createSupervisor();
// mock IndexerSQLMetadataStorageCoordinator to ensure that retrieveDataSourceMetadata is not called
// which will be true if truly suspended, since this is the first operation of the 'run' method otherwise
IndexerSQLMetadataStorageCoordinator mock = EasyMock.createMock(IndexerSQLMetadataStorageCoordinator.class);
EasyMock.expect(mock.retrieveDataSourceMetadata(suspended.getDataSourceName())).andAnswer(() -> {
Assert.fail();
return null;
}).anyTimes();
EasyMock.replay(mock);
supervisor.run();
}
use of org.apache.druid.data.input.impl.StringDimensionSchema in project druid by druid-io.
the class MaterializedViewSupervisorSpecTest method testMaterializedViewSupervisorSpecCreated.
@Test
public void testMaterializedViewSupervisorSpecCreated() {
Exception ex = null;
try {
MaterializedViewSupervisorSpec spec = new MaterializedViewSupervisorSpec("wikiticker", new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("isUnpatrolled"), new StringDimensionSchema("metroCode"), new StringDimensionSchema("namespace"), new StringDimensionSchema("page"), new StringDimensionSchema("regionIsoCode"), new StringDimensionSchema("regionName"), new StringDimensionSchema("user"))), new AggregatorFactory[] { new CountAggregatorFactory("count"), new LongSumAggregatorFactory("added", "added") }, HadoopTuningConfig.makeDefaultTuningConfig(), null, null, null, null, null, false, objectMapper, null, null, null, null, null, new MaterializedViewTaskConfig(), EasyMock.createMock(AuthorizerMapper.class), new NoopChatHandlerProvider(), new SupervisorStateManagerConfig());
Supervisor supervisor = spec.createSupervisor();
Assert.assertTrue(supervisor instanceof MaterializedViewSupervisor);
SupervisorTaskAutoScaler autoscaler = spec.createAutoscaler(supervisor);
Assert.assertNull(autoscaler);
try {
supervisor.computeLagStats();
} catch (Exception e) {
Assert.assertTrue(e instanceof UnsupportedOperationException);
}
try {
int count = supervisor.getActiveTaskGroupsCount();
} catch (Exception e) {
Assert.assertTrue(e instanceof UnsupportedOperationException);
}
Callable<Integer> noop = new Callable<Integer>() {
@Override
public Integer call() {
return -1;
}
};
} catch (Exception e) {
ex = e;
}
Assert.assertNull(ex);
}
use of org.apache.druid.data.input.impl.StringDimensionSchema in project druid by druid-io.
the class MaterializedViewSupervisorSpecTest method testNullBaseDataSource.
@Test
public void testNullBaseDataSource() {
expectedException.expect(CoreMatchers.instanceOf(IllegalArgumentException.class));
expectedException.expectMessage("baseDataSource cannot be null or empty. Please provide a baseDataSource.");
// noinspection ResultOfObjectAllocationIgnored (this method call will trigger the expected exception)
new MaterializedViewSupervisorSpec(null, new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("isUnpatrolled"), new StringDimensionSchema("metroCode"), new StringDimensionSchema("namespace"), new StringDimensionSchema("page"), new StringDimensionSchema("regionIsoCode"), new StringDimensionSchema("regionName"), new StringDimensionSchema("user"))), new AggregatorFactory[] { new CountAggregatorFactory("count"), new LongSumAggregatorFactory("added", "added") }, HadoopTuningConfig.makeDefaultTuningConfig(), null, null, null, null, null, false, objectMapper, null, null, null, null, null, new MaterializedViewTaskConfig(), EasyMock.createMock(AuthorizerMapper.class), new NoopChatHandlerProvider(), new SupervisorStateManagerConfig());
}
use of org.apache.druid.data.input.impl.StringDimensionSchema in project druid by druid-io.
the class GroupByLimitPushDownMultiNodeMergeTest method setup.
@Before
public void setup() throws Exception {
tmpDir = FileUtils.createTempDir();
InputRow row;
List<String> dimNames = Arrays.asList("dimA", "metA");
Map<String, Object> event;
final IncrementalIndex indexA = makeIncIndex(false);
incrementalIndices.add(indexA);
event = new HashMap<>();
event.put("dimA", "pomegranate");
event.put("metA", 2395L);
row = new MapBasedInputRow(1505260888888L, dimNames, event);
indexA.add(row);
event = new HashMap<>();
event.put("dimA", "mango");
event.put("metA", 8L);
row = new MapBasedInputRow(1505260800000L, dimNames, event);
indexA.add(row);
event = new HashMap<>();
event.put("dimA", "pomegranate");
event.put("metA", 5028L);
row = new MapBasedInputRow(1505264400000L, dimNames, event);
indexA.add(row);
event = new HashMap<>();
event.put("dimA", "mango");
event.put("metA", 7L);
row = new MapBasedInputRow(1505264400400L, dimNames, event);
indexA.add(row);
final File fileA = INDEX_MERGER_V9.persist(indexA, new File(tmpDir, "A"), new IndexSpec(), null);
QueryableIndex qindexA = INDEX_IO.loadIndex(fileA);
final IncrementalIndex indexB = makeIncIndex(false);
incrementalIndices.add(indexB);
event = new HashMap<>();
event.put("dimA", "pomegranate");
event.put("metA", 4718L);
row = new MapBasedInputRow(1505260800000L, dimNames, event);
indexB.add(row);
event = new HashMap<>();
event.put("dimA", "mango");
event.put("metA", 18L);
row = new MapBasedInputRow(1505260800000L, dimNames, event);
indexB.add(row);
event = new HashMap<>();
event.put("dimA", "pomegranate");
event.put("metA", 2698L);
row = new MapBasedInputRow(1505264400000L, dimNames, event);
indexB.add(row);
event = new HashMap<>();
event.put("dimA", "mango");
event.put("metA", 3L);
row = new MapBasedInputRow(1505264400000L, dimNames, event);
indexB.add(row);
final File fileB = INDEX_MERGER_V9.persist(indexB, new File(tmpDir, "B"), new IndexSpec(), null);
QueryableIndex qindexB = INDEX_IO.loadIndex(fileB);
final IncrementalIndex indexC = makeIncIndex(false);
incrementalIndices.add(indexC);
event = new HashMap<>();
event.put("dimA", "pomegranate");
event.put("metA", 2395L);
row = new MapBasedInputRow(1505260800000L, dimNames, event);
indexC.add(row);
event = new HashMap<>();
event.put("dimA", "mango");
event.put("metA", 8L);
row = new MapBasedInputRow(1605260800000L, dimNames, event);
indexC.add(row);
event = new HashMap<>();
event.put("dimA", "pomegranate");
event.put("metA", 5028L);
row = new MapBasedInputRow(1705264400000L, dimNames, event);
indexC.add(row);
event = new HashMap<>();
event.put("dimA", "mango");
event.put("metA", 7L);
row = new MapBasedInputRow(1805264400000L, dimNames, event);
indexC.add(row);
final File fileC = INDEX_MERGER_V9.persist(indexC, new File(tmpDir, "C"), new IndexSpec(), null);
QueryableIndex qindexC = INDEX_IO.loadIndex(fileC);
final IncrementalIndex indexD = makeIncIndex(false);
incrementalIndices.add(indexD);
event = new HashMap<>();
event.put("dimA", "pomegranate");
event.put("metA", 4718L);
row = new MapBasedInputRow(1505260800000L, dimNames, event);
indexD.add(row);
event = new HashMap<>();
event.put("dimA", "mango");
event.put("metA", 18L);
row = new MapBasedInputRow(1605260800000L, dimNames, event);
indexD.add(row);
event = new HashMap<>();
event.put("dimA", "pomegranate");
event.put("metA", 2698L);
row = new MapBasedInputRow(1705264400000L, dimNames, event);
indexD.add(row);
event = new HashMap<>();
event.put("dimA", "mango");
event.put("metA", 3L);
row = new MapBasedInputRow(1805264400000L, dimNames, event);
indexD.add(row);
final File fileD = INDEX_MERGER_V9.persist(indexD, new File(tmpDir, "D"), new IndexSpec(), null);
QueryableIndex qindexD = INDEX_IO.loadIndex(fileD);
List<String> dimNames2 = Arrays.asList("dimA", "dimB", "metA");
List<DimensionSchema> dimensions = Arrays.asList(new StringDimensionSchema("dimA"), new StringDimensionSchema("dimB"), new LongDimensionSchema("metA"));
final IncrementalIndex indexE = makeIncIndex(false, dimensions);
incrementalIndices.add(indexE);
event = new HashMap<>();
event.put("dimA", "pomegranate");
event.put("dimB", "raw");
event.put("metA", 5L);
row = new MapBasedInputRow(1505260800000L, dimNames2, event);
indexE.add(row);
event = new HashMap<>();
event.put("dimA", "mango");
event.put("dimB", "ripe");
event.put("metA", 9L);
row = new MapBasedInputRow(1605260800000L, dimNames2, event);
indexE.add(row);
event = new HashMap<>();
event.put("dimA", "pomegranate");
event.put("dimB", "raw");
event.put("metA", 3L);
row = new MapBasedInputRow(1705264400000L, dimNames2, event);
indexE.add(row);
event = new HashMap<>();
event.put("dimA", "mango");
event.put("dimB", "ripe");
event.put("metA", 7L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexE.add(row);
event = new HashMap<>();
event.put("dimA", "grape");
event.put("dimB", "raw");
event.put("metA", 5L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexE.add(row);
event = new HashMap<>();
event.put("dimA", "apple");
event.put("dimB", "ripe");
event.put("metA", 3L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexE.add(row);
event = new HashMap<>();
event.put("dimA", "apple");
event.put("dimB", "raw");
event.put("metA", 1L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexE.add(row);
event = new HashMap<>();
event.put("dimA", "apple");
event.put("dimB", "ripe");
event.put("metA", 4L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexE.add(row);
event = new HashMap<>();
event.put("dimA", "apple");
event.put("dimB", "raw");
event.put("metA", 1L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexE.add(row);
event = new HashMap<>();
event.put("dimA", "banana");
event.put("dimB", "ripe");
event.put("metA", 4L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexE.add(row);
event = new HashMap<>();
event.put("dimA", "orange");
event.put("dimB", "raw");
event.put("metA", 9L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexE.add(row);
event = new HashMap<>();
event.put("dimA", "peach");
event.put("dimB", "ripe");
event.put("metA", 7L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexE.add(row);
event = new HashMap<>();
event.put("dimA", "orange");
event.put("dimB", "raw");
event.put("metA", 2L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexE.add(row);
event = new HashMap<>();
event.put("dimA", "strawberry");
event.put("dimB", "ripe");
event.put("metA", 10L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexE.add(row);
final File fileE = INDEX_MERGER_V9.persist(indexE, new File(tmpDir, "E"), new IndexSpec(), null);
QueryableIndex qindexE = INDEX_IO.loadIndex(fileE);
final IncrementalIndex indexF = makeIncIndex(false, dimensions);
incrementalIndices.add(indexF);
event = new HashMap<>();
event.put("dimA", "kiwi");
event.put("dimB", "raw");
event.put("metA", 7L);
row = new MapBasedInputRow(1505260800000L, dimNames2, event);
indexF.add(row);
event = new HashMap<>();
event.put("dimA", "watermelon");
event.put("dimB", "ripe");
event.put("metA", 14L);
row = new MapBasedInputRow(1605260800000L, dimNames2, event);
indexF.add(row);
event = new HashMap<>();
event.put("dimA", "kiwi");
event.put("dimB", "raw");
event.put("metA", 8L);
row = new MapBasedInputRow(1705264400000L, dimNames2, event);
indexF.add(row);
event = new HashMap<>();
event.put("dimA", "kiwi");
event.put("dimB", "ripe");
event.put("metA", 8L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexF.add(row);
event = new HashMap<>();
event.put("dimA", "lemon");
event.put("dimB", "raw");
event.put("metA", 3L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexF.add(row);
event = new HashMap<>();
event.put("dimA", "cherry");
event.put("dimB", "ripe");
event.put("metA", 2L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexF.add(row);
event = new HashMap<>();
event.put("dimA", "cherry");
event.put("dimB", "raw");
event.put("metA", 7L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexF.add(row);
event = new HashMap<>();
event.put("dimA", "avocado");
event.put("dimB", "ripe");
event.put("metA", 12L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexF.add(row);
event = new HashMap<>();
event.put("dimA", "cherry");
event.put("dimB", "raw");
event.put("metA", 3L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexF.add(row);
event = new HashMap<>();
event.put("dimA", "plum");
event.put("dimB", "ripe");
event.put("metA", 5L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexF.add(row);
event = new HashMap<>();
event.put("dimA", "plum");
event.put("dimB", "raw");
event.put("metA", 3L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexF.add(row);
event = new HashMap<>();
event.put("dimA", "lime");
event.put("dimB", "ripe");
event.put("metA", 7L);
row = new MapBasedInputRow(1805264400000L, dimNames2, event);
indexF.add(row);
final File fileF = INDEX_MERGER_V9.persist(indexF, new File(tmpDir, "F"), new IndexSpec(), null);
QueryableIndex qindexF = INDEX_IO.loadIndex(fileF);
groupByIndices = Arrays.asList(qindexA, qindexB, qindexC, qindexD, qindexE, qindexF);
resourceCloser = Closer.create();
setupGroupByFactory();
}
Aggregations