use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class ProtobufParserBenchmark method setup.
@Setup
public void setup() {
nestedParseSpec = new JSONParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("event"), new StringDimensionSchema("id"), new StringDimensionSchema("someOtherId"), new StringDimensionSchema("isValid"))), new JSONPathSpec(true, Lists.newArrayList(new JSONPathFieldSpec(JSONPathFieldType.ROOT, "eventType", "eventType"), new JSONPathFieldSpec(JSONPathFieldType.PATH, "foobar", "$.foo.bar"), new JSONPathFieldSpec(JSONPathFieldType.PATH, "bar0", "$.bar[0].bar"))), null, null);
flatParseSpec = new JSONParseSpec(new TimestampSpec("timestamp", "iso", null), new DimensionsSpec(Lists.newArrayList(new StringDimensionSchema("event"), new StringDimensionSchema("id"), new StringDimensionSchema("someOtherId"), new StringDimensionSchema("isValid"))), null, null, null);
decoder = new FileBasedProtobufBytesDecoder("prototest.desc", "ProtoTestEvent");
protoFilePath = "ProtoFile";
protoInputs = getProtoInputs(protoFilePath);
nestedParser = new ProtobufInputRowParser(nestedParseSpec, decoder, null, null);
flatParser = new ProtobufInputRowParser(flatParseSpec, decoder, null, null);
}
use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class AvroStreamInputFormatTest method before.
@Before
public void before() {
timestampSpec = new TimestampSpec("nested", "millis", null);
dimensionsSpec = new DimensionsSpec(DimensionsSpec.getDefaultSchemas(DIMENSIONS));
flattenSpec = new JSONPathSpec(true, ImmutableList.of(new JSONPathFieldSpec(JSONPathFieldType.PATH, "nested", "someRecord.subLong")));
for (Module jacksonModule : new AvroExtensionsModule().getJacksonModules()) {
jsonMapper.registerModule(jacksonModule);
}
jsonMapper.setInjectableValues(new InjectableValues.Std().addValue(ObjectMapper.class, new DefaultObjectMapper()));
}
use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class ThriftInputRowParserTest method testDisableJavaScript.
@Test
public void testDisableJavaScript() {
final JavaScriptParseSpec parseSpec = new JavaScriptParseSpec(new TimestampSpec("timestamp", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(ImmutableList.of("dim1", "dim2"))), "func", new JavaScriptConfig(false));
ThriftInputRowParser parser = new ThriftInputRowParser(parseSpec, "example/book.jar", "org.apache.druid.data.input.thrift.Book");
expectedException.expect(CoreMatchers.instanceOf(IllegalStateException.class));
expectedException.expectMessage("JavaScript is disabled");
// noinspection ResultOfMethodCallIgnored (this method call will trigger the expected exception)
parser.parseBatch(ByteBuffer.allocate(1)).get(0);
}
use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class MapVirtualColumnTestBase method generateIndex.
static IncrementalIndex generateIndex() throws IOException {
final CharSource input = CharSource.wrap("2011-01-12T00:00:00.000Z\ta\tkey1,key2,key3\tvalue1,value2,value3\n" + "2011-01-12T00:00:00.000Z\tb\tkey4,key5,key6\tvalue4\n" + "2011-01-12T00:00:00.000Z\tc\tkey1,key5\tvalue1,value5,value9\n");
final StringInputRowParser parser = new StringInputRowParser(new DelimitedParseSpec(new TimestampSpec("ts", "auto", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList("dim", "keys", "values"))), "\t", ",", Arrays.asList("ts", "dim", "keys", "values"), false, 0), "utf8");
final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder().withMinTimestamp(DateTimes.of("2011-01-12T00:00:00.000Z").getMillis()).build();
return TestIndex.loadIncrementalIndex(() -> new OnheapIncrementalIndex.Builder().setIndexSchema(schema).setMaxRowCount(10000).build(), input, parser);
}
use of org.apache.druid.data.input.impl.DimensionsSpec in project druid by druid-io.
the class MaterializedViewSupervisorTest method setUp.
@Before
public void setUp() {
TestDerbyConnector derbyConnector = derbyConnectorRule.getConnector();
derbyConnector.createDataSourceTable();
derbyConnector.createSegmentTable();
taskStorage = EasyMock.createMock(TaskStorage.class);
taskMaster = EasyMock.createMock(TaskMaster.class);
indexerMetadataStorageCoordinator = new IndexerSQLMetadataStorageCoordinator(objectMapper, derbyConnectorRule.metadataTablesConfigSupplier().get(), derbyConnector);
metadataSupervisorManager = EasyMock.createMock(MetadataSupervisorManager.class);
sqlSegmentsMetadataManager = EasyMock.createMock(SqlSegmentsMetadataManager.class);
taskQueue = EasyMock.createMock(TaskQueue.class);
taskQueue.start();
objectMapper.registerSubtypes(new NamedType(HashBasedNumberedShardSpec.class, "hashed"));
spec = new MaterializedViewSupervisorSpec("base", new DimensionsSpec(Collections.singletonList(new StringDimensionSchema("dim"))), new AggregatorFactory[] { new LongSumAggregatorFactory("m1", "m1") }, HadoopTuningConfig.makeDefaultTuningConfig(), null, null, null, null, null, false, objectMapper, taskMaster, taskStorage, metadataSupervisorManager, sqlSegmentsMetadataManager, indexerMetadataStorageCoordinator, new MaterializedViewTaskConfig(), EasyMock.createMock(AuthorizerMapper.class), EasyMock.createMock(ChatHandlerProvider.class), new SupervisorStateManagerConfig());
derivativeDatasourceName = spec.getDataSourceName();
supervisor = (MaterializedViewSupervisor) spec.createSupervisor();
}
Aggregations