use of io.druid.guice.FirehoseModule in project druid by druid-io.
the class TaskSerdeTest method testIndexTaskwithResourceSerde.
@Test
public void testIndexTaskwithResourceSerde() throws Exception {
final IndexTask task = new IndexTask(null, new TaskResource("rofl", 2), new IndexTask.IndexIngestionSpec(new DataSchema("foo", null, new AggregatorFactory[] { new DoubleSumAggregatorFactory("met", "met") }, new UniformGranularitySpec(Granularities.DAY, null, ImmutableList.of(new Interval("2010-01-01/P2D"))), jsonMapper), new IndexTask.IndexIOConfig(new LocalFirehoseFactory(new File("lol"), "rofl", null), true, null), new IndexTask.IndexTuningConfig(10000, 10, null, null, indexSpec, 3, true, true, true)), null, jsonMapper);
for (final Module jacksonModule : new FirehoseModule().getJacksonModules()) {
jsonMapper.registerModule(jacksonModule);
}
final String json = jsonMapper.writeValueAsString(task);
// Just want to run the clock a bit to make sure the task id doesn't change
Thread.sleep(100);
final IndexTask task2 = (IndexTask) jsonMapper.readValue(json, Task.class);
Assert.assertEquals("foo", task.getDataSource());
Assert.assertEquals(task.getId(), task2.getId());
Assert.assertEquals(2, task.getTaskResource().getRequiredCapacity());
Assert.assertEquals("rofl", task.getTaskResource().getAvailabilityGroup());
Assert.assertEquals(task.getTaskResource().getRequiredCapacity(), task2.getTaskResource().getRequiredCapacity());
Assert.assertEquals(task.getTaskResource().getAvailabilityGroup(), task2.getTaskResource().getAvailabilityGroup());
Assert.assertEquals(task.getGroupId(), task2.getGroupId());
Assert.assertEquals(task.getDataSource(), task2.getDataSource());
Assert.assertTrue(task.getIngestionSchema().getIOConfig().getFirehoseFactory() instanceof LocalFirehoseFactory);
Assert.assertTrue(task2.getIngestionSchema().getIOConfig().getFirehoseFactory() instanceof LocalFirehoseFactory);
}
use of io.druid.guice.FirehoseModule in project druid by druid-io.
the class DruidJsonValidator method run.
@Override
public void run() {
File file = new File(jsonFile);
if (!file.exists()) {
System.out.printf("File[%s] does not exist.%n", file);
}
final Injector injector = makeInjector();
final ObjectMapper jsonMapper = injector.getInstance(ObjectMapper.class);
registerModules(jsonMapper, Iterables.concat(Initialization.getFromExtensions(injector.getInstance(ExtensionsConfig.class), DruidModule.class), Arrays.asList(new FirehoseModule(), new IndexingHadoopModule(), new IndexingServiceFirehoseModule(), new LocalDataStorageDruidModule(), new ParsersModule())));
final ClassLoader loader;
if (Thread.currentThread().getContextClassLoader() != null) {
loader = Thread.currentThread().getContextClassLoader();
} else {
loader = DruidJsonValidator.class.getClassLoader();
}
if (toLogger) {
logWriter = new NullWriter() {
private final Logger logger = new Logger(DruidJsonValidator.class);
@Override
public void write(char[] cbuf, int off, int len) {
logger.info(new String(cbuf, off, len));
}
};
}
try {
if (type.equalsIgnoreCase("query")) {
jsonMapper.readValue(file, Query.class);
} else if (type.equalsIgnoreCase("hadoopConfig")) {
jsonMapper.readValue(file, HadoopDruidIndexerConfig.class);
} else if (type.equalsIgnoreCase("task")) {
jsonMapper.readValue(file, Task.class);
} else if (type.equalsIgnoreCase("parse")) {
final StringInputRowParser parser;
if (file.isFile()) {
logWriter.write("loading parse spec from file '" + file + "'");
parser = jsonMapper.readValue(file, StringInputRowParser.class);
} else if (loader.getResource(jsonFile) != null) {
logWriter.write("loading parse spec from resource '" + jsonFile + "'");
parser = jsonMapper.readValue(loader.getResource(jsonFile), StringInputRowParser.class);
} else {
logWriter.write("cannot find proper spec from 'file'.. regarding it as a json spec");
parser = jsonMapper.readValue(jsonFile, StringInputRowParser.class);
}
if (resource != null) {
final CharSource source;
if (new File(resource).isFile()) {
logWriter.write("loading data from file '" + resource + "'");
source = Resources.asByteSource(new File(resource).toURL()).asCharSource(Charset.forName(parser.getEncoding()));
} else if (loader.getResource(resource) != null) {
logWriter.write("loading data from resource '" + resource + "'");
source = Resources.asByteSource(loader.getResource(resource)).asCharSource(Charset.forName(parser.getEncoding()));
} else {
logWriter.write("cannot find proper data from 'resource'.. regarding it as data string");
source = CharSource.wrap(resource);
}
readData(parser, source);
}
} else {
throw new UOE("Unknown type[%s]", type);
}
} catch (Exception e) {
System.out.println("INVALID JSON!");
throw Throwables.propagate(e);
}
}
use of io.druid.guice.FirehoseModule in project druid by druid-io.
the class DruidJsonValidatorTest method testTaskValidator.
@Test
public void testTaskValidator() throws Exception {
final ObjectMapper jsonMapper = new DefaultObjectMapper();
for (final Module jacksonModule : new FirehoseModule().getJacksonModules()) {
jsonMapper.registerModule(jacksonModule);
}
final RealtimeIndexTask task = new RealtimeIndexTask(null, new TaskResource("rofl", 2), new FireDepartment(new DataSchema("foo", null, new AggregatorFactory[0], new UniformGranularitySpec(Granularities.HOUR, Granularities.NONE, null), jsonMapper), new RealtimeIOConfig(new LocalFirehoseFactory(new File("lol"), "rofl", null), new PlumberSchool() {
@Override
public Plumber findPlumber(DataSchema schema, RealtimeTuningConfig config, FireDepartmentMetrics metrics) {
return null;
}
}, null), new RealtimeTuningConfig(1, new Period("PT10M"), null, null, null, null, 1, NoneShardSpec.instance(), new IndexSpec(), null, 0, 0, true, null)), null);
File tmp = temporaryFolder.newFile("test_task.json");
jsonMapper.writeValue(tmp, task);
parseCommand("validator", "-f", tmp.getAbsolutePath(), "-t", "task").run();
}
use of io.druid.guice.FirehoseModule in project druid by druid-io.
the class Initialization method makeInjectorWithModules.
public static Injector makeInjectorWithModules(final Injector baseInjector, Iterable<? extends Module> modules) {
final ModuleList defaultModules = new ModuleList(baseInjector);
defaultModules.addModules(// New modules should be added after Log4jShutterDownerModule
new Log4jShutterDownerModule(), new DruidAuthModule(), new LifecycleModule(), EmitterModule.class, HttpClientModule.global(), new HttpClientModule("druid.broker.http", Client.class), new CuratorModule(), new AnnouncerModule(), new DruidProcessingModule(), new AWSModule(), new MetricsModule(), new ServerModule(), new StorageNodeModule(), new JettyServerModule(), new QueryableModule(), new QueryRunnerFactoryModule(), new DiscoveryModule(), new ServerViewModule(), new MetadataConfigModule(), new DerbyMetadataStorageDruidModule(), new JacksonConfigManagerModule(), new IndexingServiceDiscoveryModule(), new CoordinatorDiscoveryModule(), new LocalDataStorageDruidModule(), new FirehoseModule(), new ParsersModule(), new JavaScriptModule(), new StartupLoggingModule());
ModuleList actualModules = new ModuleList(baseInjector);
actualModules.addModule(DruidSecondaryModule.class);
for (Object module : modules) {
actualModules.addModule(module);
}
Module intermediateModules = Modules.override(defaultModules.getModules()).with(actualModules.getModules());
ModuleList extensionModules = new ModuleList(baseInjector);
final ExtensionsConfig config = baseInjector.getInstance(ExtensionsConfig.class);
for (DruidModule module : Initialization.getFromExtensions(config, DruidModule.class)) {
extensionModules.addModule(module);
}
return Guice.createInjector(Modules.override(intermediateModules).with(extensionModules.getModules()));
}
Aggregations