use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class MaterializedViewSupervisorTest method testSuspendedDoesntRun.
@Test
public void testSuspendedDoesntRun() {
MaterializedViewSupervisorSpec suspended = new MaterializedViewSupervisorSpec("base", new DimensionsSpec(Collections.singletonList(new StringDimensionSchema("dim"))), new AggregatorFactory[] { new LongSumAggregatorFactory("m1", "m1") }, HadoopTuningConfig.makeDefaultTuningConfig(), null, null, null, null, null, true, objectMapper, taskMaster, taskStorage, metadataSupervisorManager, sqlSegmentsMetadataManager, indexerMetadataStorageCoordinator, new MaterializedViewTaskConfig(), EasyMock.createMock(AuthorizerMapper.class), EasyMock.createMock(ChatHandlerProvider.class), new SupervisorStateManagerConfig());
MaterializedViewSupervisor supervisor = (MaterializedViewSupervisor) suspended.createSupervisor();
// mock IndexerSQLMetadataStorageCoordinator to ensure that retrieveDataSourceMetadata is not called
// which will be true if truly suspended, since this is the first operation of the 'run' method otherwise
IndexerSQLMetadataStorageCoordinator mock = EasyMock.createMock(IndexerSQLMetadataStorageCoordinator.class);
EasyMock.expect(mock.retrieveDataSourceMetadata(suspended.getDataSourceName())).andAnswer(() -> {
Assert.fail();
return null;
}).anyTimes();
EasyMock.replay(mock);
supervisor.run();
}
use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class MaterializedViewSupervisorSpecTest method testMaterializedViewSupervisorSpecCreated.
@Test
public void testMaterializedViewSupervisorSpecCreated() {
Exception ex = null;
try {
MaterializedViewSupervisorSpec spec = new MaterializedViewSupervisorSpec("wikiticker", new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("isUnpatrolled"), new StringDimensionSchema("metroCode"), new StringDimensionSchema("namespace"), new StringDimensionSchema("page"), new StringDimensionSchema("regionIsoCode"), new StringDimensionSchema("regionName"), new StringDimensionSchema("user"))), new AggregatorFactory[] { new CountAggregatorFactory("count"), new LongSumAggregatorFactory("added", "added") }, HadoopTuningConfig.makeDefaultTuningConfig(), null, null, null, null, null, false, objectMapper, null, null, null, null, null, new MaterializedViewTaskConfig(), EasyMock.createMock(AuthorizerMapper.class), new NoopChatHandlerProvider(), new SupervisorStateManagerConfig());
Supervisor supervisor = spec.createSupervisor();
Assert.assertTrue(supervisor instanceof MaterializedViewSupervisor);
SupervisorTaskAutoScaler autoscaler = spec.createAutoscaler(supervisor);
Assert.assertNull(autoscaler);
try {
supervisor.computeLagStats();
} catch (Exception e) {
Assert.assertTrue(e instanceof UnsupportedOperationException);
}
try {
int count = supervisor.getActiveTaskGroupsCount();
} catch (Exception e) {
Assert.assertTrue(e instanceof UnsupportedOperationException);
}
Callable<Integer> noop = new Callable<Integer>() {
@Override
public Integer call() {
return -1;
}
};
} catch (Exception e) {
ex = e;
}
Assert.assertNull(ex);
}
use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class MaterializedViewSupervisorSpecTest method testNullBaseDataSource.
@Test
public void testNullBaseDataSource() {
expectedException.expect(CoreMatchers.instanceOf(IllegalArgumentException.class));
expectedException.expectMessage("baseDataSource cannot be null or empty. Please provide a baseDataSource.");
// noinspection ResultOfObjectAllocationIgnored (this method call will trigger the expected exception)
new MaterializedViewSupervisorSpec(null, new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("isUnpatrolled"), new StringDimensionSchema("metroCode"), new StringDimensionSchema("namespace"), new StringDimensionSchema("page"), new StringDimensionSchema("regionIsoCode"), new StringDimensionSchema("regionName"), new StringDimensionSchema("user"))), new AggregatorFactory[] { new CountAggregatorFactory("count"), new LongSumAggregatorFactory("added", "added") }, HadoopTuningConfig.makeDefaultTuningConfig(), null, null, null, null, null, false, objectMapper, null, null, null, null, null, new MaterializedViewTaskConfig(), EasyMock.createMock(AuthorizerMapper.class), new NoopChatHandlerProvider(), new SupervisorStateManagerConfig());
}
use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class TimestampsParquetReaderTest method testParseInt96Timestamp.
@Test
public void testParseInt96Timestamp() throws IOException {
// the source parquet file was found in apache spark sql repo tests, where it is known as impala_timestamp.parq
// it has a single column, "ts" which is an int96 timestamp
final String file = "example/timestamps/int96_timestamp.parquet";
InputRowSchema schema = new InputRowSchema(new TimestampSpec("ts", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of())), ColumnsFilter.all());
InputEntityReader reader = createReader(file, schema, JSONPathSpec.DEFAULT);
List<InputRow> rows = readAllRows(reader);
Assert.assertEquals("2001-01-01T01:01:01.000Z", rows.get(0).getTimestamp().toString());
reader = createReader(file, schema, JSONPathSpec.DEFAULT);
List<InputRowListPlusRawValues> sampled = sampleAllRows(reader);
final String expectedJson = "{\n" + " \"ts\" : 978310861000\n" + "}";
Assert.assertEquals(expectedJson, DEFAULT_JSON_WRITER.writeValueAsString(sampled.get(0).getRawValues()));
}
use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class TimestampsParquetReaderTest method testTimeMillisInInt64.
@Test
public void testTimeMillisInInt64() throws IOException {
final String file = "example/timestamps/timemillis-in-i64.parquet";
InputRowSchema schema = new InputRowSchema(new TimestampSpec("time", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of())), ColumnsFilter.all());
InputEntityReader reader = createReader(file, schema, JSONPathSpec.DEFAULT);
List<InputRow> rows = readAllRows(reader);
Assert.assertEquals("1970-01-01T00:00:00.010Z", rows.get(0).getTimestamp().toString());
reader = createReader(file, schema, JSONPathSpec.DEFAULT);
List<InputRowListPlusRawValues> sampled = sampleAllRows(reader);
final String expectedJson = "{\n" + " \"time\" : 10\n" + "}";
Assert.assertEquals(expectedJson, DEFAULT_JSON_WRITER.writeValueAsString(sampled.get(0).getRawValues()));
}
Aggregations