use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class OssInputSourceTest method testReader.
@Test
public void testReader() throws IOException {
EasyMock.reset(OSSCLIENT);
expectListObjects(PREFIXES.get(0), ImmutableList.of(EXPECTED_URIS.get(0)), CONTENT);
expectListObjects(EXPECTED_URIS.get(1), ImmutableList.of(EXPECTED_URIS.get(1)), CONTENT);
expectGetObject(EXPECTED_URIS.get(0));
expectGetObject(EXPECTED_URIS.get(1));
EasyMock.replay(OSSCLIENT);
OssInputSource inputSource = new OssInputSource(OSSCLIENT, INPUT_DATA_CONFIG, null, ImmutableList.of(PREFIXES.get(0), EXPECTED_URIS.get(1)), null, null);
InputRowSchema someSchema = new InputRowSchema(new TimestampSpec("time", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim1", "dim2"))), ColumnsFilter.all());
InputSourceReader reader = inputSource.reader(someSchema, new CsvInputFormat(ImmutableList.of("time", "dim1", "dim2"), "|", false, null, 0), temporaryFolder.newFolder());
CloseableIterator<InputRow> iterator = reader.read();
while (iterator.hasNext()) {
InputRow nextRow = iterator.next();
Assert.assertEquals(NOW, nextRow.getTimestamp());
Assert.assertEquals("hello", nextRow.getDimension("dim1").get(0));
Assert.assertEquals("world", nextRow.getDimension("dim2").get(0));
}
EasyMock.verify(OSSCLIENT);
}
use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class MaterializedViewSupervisorSpecTest method testEmptyBaseDataSource.
@Test
public void testEmptyBaseDataSource() {
expectedException.expect(CoreMatchers.instanceOf(IllegalArgumentException.class));
expectedException.expectMessage("baseDataSource cannot be null or empty. Please provide a baseDataSource.");
// noinspection ResultOfObjectAllocationIgnored (this method call will trigger the expected exception)
new MaterializedViewSupervisorSpec("", new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("isUnpatrolled"), new StringDimensionSchema("metroCode"), new StringDimensionSchema("namespace"), new StringDimensionSchema("page"), new StringDimensionSchema("regionIsoCode"), new StringDimensionSchema("regionName"), new StringDimensionSchema("user"))), new AggregatorFactory[] { new CountAggregatorFactory("count"), new LongSumAggregatorFactory("added", "added") }, HadoopTuningConfig.makeDefaultTuningConfig(), null, null, null, null, null, false, objectMapper, null, null, null, null, null, new MaterializedViewTaskConfig(), EasyMock.createMock(AuthorizerMapper.class), new NoopChatHandlerProvider(), new SupervisorStateManagerConfig());
}
use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class MaterializedViewSupervisorSpecTest method testSupervisorSerialization.
@Test
public void testSupervisorSerialization() throws IOException {
String supervisorStr = "{\n" + " \"type\" : \"derivativeDataSource\",\n" + " \"baseDataSource\": \"wikiticker\",\n" + " \"dimensionsSpec\":{\n" + " \"dimensions\" : [\n" + " \"isUnpatrolled\",\n" + " \"metroCode\",\n" + " \"namespace\",\n" + " \"page\",\n" + " \"regionIsoCode\",\n" + " \"regionName\",\n" + " \"user\"\n" + " ]\n" + " },\n" + " \"metricsSpec\" : [\n" + " {\n" + " \"name\" : \"count\",\n" + " \"type\" : \"count\"\n" + " },\n" + " {\n" + " \"name\" : \"added\",\n" + " \"type\" : \"longSum\",\n" + " \"fieldName\" : \"added\"\n" + " }\n" + " ],\n" + " \"tuningConfig\": {\n" + " \"type\" : \"hadoop\"\n" + " }\n" + "}";
MaterializedViewSupervisorSpec expected = new MaterializedViewSupervisorSpec("wikiticker", new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("isUnpatrolled"), new StringDimensionSchema("metroCode"), new StringDimensionSchema("namespace"), new StringDimensionSchema("page"), new StringDimensionSchema("regionIsoCode"), new StringDimensionSchema("regionName"), new StringDimensionSchema("user"))), new AggregatorFactory[] { new CountAggregatorFactory("count"), new LongSumAggregatorFactory("added", "added") }, HadoopTuningConfig.makeDefaultTuningConfig(), null, null, null, null, null, false, objectMapper, null, null, null, null, null, new MaterializedViewTaskConfig(), EasyMock.createMock(AuthorizerMapper.class), new NoopChatHandlerProvider(), new SupervisorStateManagerConfig());
MaterializedViewSupervisorSpec spec = objectMapper.readValue(supervisorStr, MaterializedViewSupervisorSpec.class);
Assert.assertEquals(expected.getBaseDataSource(), spec.getBaseDataSource());
Assert.assertEquals(expected.getId(), spec.getId());
Assert.assertEquals(expected.getDataSourceName(), spec.getDataSourceName());
Assert.assertEquals(expected.getDimensions(), spec.getDimensions());
Assert.assertEquals(expected.getMetrics(), spec.getMetrics());
}
use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class AvroOCFReaderTest method createReader.
private InputEntityReader createReader(ObjectMapper mapper, Map<String, Object> readerSchema) throws Exception {
final GenericRecord someAvroDatum = AvroStreamInputRowParserTest.buildSomeAvroDatum();
final File someAvroFile = AvroHadoopInputRowParserTest.createAvroFile(someAvroDatum);
final TimestampSpec timestampSpec = new TimestampSpec("timestamp", "auto", null);
final DimensionsSpec dimensionsSpec = new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("eventType")));
final AvroOCFInputFormat inputFormat = new AvroOCFInputFormat(mapper, null, readerSchema, null, null);
final InputRowSchema schema = new InputRowSchema(timestampSpec, dimensionsSpec, ColumnsFilter.all());
final FileEntity entity = new FileEntity(someAvroFile);
return inputFormat.createReader(schema, entity, temporaryFolder.newFolder());
}
use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class GoogleCloudStorageInputSourceTest method testCompressedReader.
@Test
public void testCompressedReader() throws IOException {
EasyMock.reset(STORAGE);
EasyMock.reset(INPUT_DATA_CONFIG);
addExpectedPrefixObjects(PREFIXES.get(0), ImmutableList.of(EXPECTED_COMPRESSED_URIS.get(0)));
addExpectedGetCompressedObjectMock(EXPECTED_COMPRESSED_URIS.get(0));
addExpectedPrefixObjects(PREFIXES.get(1), ImmutableList.of(EXPECTED_COMPRESSED_URIS.get(1)));
addExpectedGetCompressedObjectMock(EXPECTED_COMPRESSED_URIS.get(1));
EasyMock.expect(INPUT_DATA_CONFIG.getMaxListingLength()).andReturn(MAX_LISTING_LENGTH);
EasyMock.replay(STORAGE);
EasyMock.replay(INPUT_DATA_CONFIG);
GoogleCloudStorageInputSource inputSource = new GoogleCloudStorageInputSource(STORAGE, INPUT_DATA_CONFIG, null, PREFIXES, null);
InputRowSchema someSchema = new InputRowSchema(new TimestampSpec("time", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim1", "dim2"))), ColumnsFilter.all());
InputSourceReader reader = inputSource.reader(someSchema, new CsvInputFormat(ImmutableList.of("time", "dim1", "dim2"), "|", false, null, 0), null);
CloseableIterator<InputRow> iterator = reader.read();
while (iterator.hasNext()) {
InputRow nextRow = iterator.next();
Assert.assertEquals(NOW, nextRow.getTimestamp());
Assert.assertEquals("hello", nextRow.getDimension("dim1").get(0));
Assert.assertEquals("world", nextRow.getDimension("dim2").get(0));
}
}
Aggregations