use of org.apache.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class ProtobufInputFormatTest method setUp.
@Before
public void setUp() {
timestampSpec = new TimestampSpec("timestamp", "iso", null);
dimensionsSpec = new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("event"), new StringDimensionSchema("id"), new StringDimensionSchema("someOtherId"), new StringDimensionSchema("isValid")));
flattenSpec = new JSONPathSpec(true, Lists.newArrayList(new JSONPathFieldSpec(JSONPathFieldType.ROOT, "eventType", "eventType"), new JSONPathFieldSpec(JSONPathFieldType.PATH, "foobar", "$.foo.bar"), new JSONPathFieldSpec(JSONPathFieldType.PATH, "bar0", "$.bar[0].bar")));
decoder = new FileBasedProtobufBytesDecoder("prototest.desc", "ProtoTestEvent");
for (Module jacksonModule : new ProtobufExtensionsModule().getJacksonModules()) {
jsonMapper.registerModule(jacksonModule);
}
jsonMapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, new DefaultObjectMapper()));
}
use of org.apache.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class ProtobufInputRowParserTest method testDisableJavaScript.
@Test
public void testDisableJavaScript() {
final JavaScriptParseSpec parseSpec = new JavaScriptParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim1", "dim2"))), "func", new JavaScriptConfig(false));
final ProtobufInputRowParser parser = new ProtobufInputRowParser(parseSpec, decoder, null, null);
expectedException.expect(CoreMatchers.instanceOf(IllegalStateException.class));
expectedException.expectMessage("JavaScript is disabled");
// noinspection ResultOfMethodCallIgnored (this method call will trigger the expected exception)
parser.parseBatch(ByteBuffer.allocate(1)).get(0);
}
use of org.apache.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class ProtobufInputRowParserTest method setUp.
@Before
public void setUp() {
parseSpec = new JSONParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("event"), new StringDimensionSchema("id"), new StringDimensionSchema("someOtherId"), new StringDimensionSchema("isValid"))), new JSONPathSpec(true, Lists.newArrayList(new JSONPathFieldSpec(JSONPathFieldType.ROOT, "eventType", "eventType"), new JSONPathFieldSpec(JSONPathFieldType.PATH, "foobar", "$.foo.bar"), new JSONPathFieldSpec(JSONPathFieldType.PATH, "bar0", "$.bar[0].bar"))), null, null);
flatParseSpec = new JSONParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("event"), new StringDimensionSchema("id"), new StringDimensionSchema("someOtherId"), new StringDimensionSchema("isValid"))), null, null, null);
flatParseSpecWithComplexTimestamp = new JSONParseSpec(new TimestampSpec("otherTimestamp", "iso", null), new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("event"), new StringDimensionSchema("id"), new StringDimensionSchema("someOtherId"), new StringDimensionSchema("isValid"))), null, null, null);
decoder = new FileBasedProtobufBytesDecoder("prototest.desc", "ProtoTestEvent");
}
use of org.apache.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class ParquetReaderResourceLeakTest method testFetchOnReadCleanupAfterExhaustingIterator.
@Test
public void testFetchOnReadCleanupAfterExhaustingIterator() throws IOException {
InputRowSchema schema = new InputRowSchema(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("page", "language", "user", "unpatrolled"))), ColumnsFilter.all());
FetchingFileEntity entity = new FetchingFileEntity(new File("example/wiki/wiki.parquet"));
ParquetInputFormat parquet = new ParquetInputFormat(JSONPathSpec.DEFAULT, false, new Configuration());
File tempDir = temporaryFolder.newFolder();
InputEntityReader reader = parquet.createReader(schema, entity, tempDir);
Assert.assertEquals(0, Objects.requireNonNull(tempDir.list()).length);
try (CloseableIterator<InputRow> iterator = reader.read()) {
Assert.assertTrue(Objects.requireNonNull(tempDir.list()).length > 0);
while (iterator.hasNext()) {
iterator.next();
}
}
Assert.assertEquals(0, Objects.requireNonNull(tempDir.list()).length);
}
use of org.apache.druid.data.input.impl.TimestampSpec in project druid by druid-io.
the class ProtobufReaderTest method setUp.
@Before
public void setUp() {
TimestampSpec timestampSpec = new TimestampSpec("timestamp", "iso", null);
DimensionsSpec dimensionsSpec = new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("event"), new StringDimensionSchema("id"), new StringDimensionSchema("someOtherId"), new StringDimensionSchema("isValid")));
flattenSpec = new JSONPathSpec(true, Lists.newArrayList(new JSONPathFieldSpec(JSONPathFieldType.ROOT, "eventType", "eventType"), new JSONPathFieldSpec(JSONPathFieldType.PATH, "foobar", "$.foo.bar"), new JSONPathFieldSpec(JSONPathFieldType.PATH, "bar0", "$.bar[0].bar")));
inputRowSchema = new InputRowSchema(timestampSpec, dimensionsSpec, null);
inputRowSchemaWithComplexTimestamp = new InputRowSchema(new TimestampSpec("otherTimestamp", "iso", null), dimensionsSpec, null);
decoder = new FileBasedProtobufBytesDecoder("prototest.desc", "ProtoTestEvent");
}
Aggregations