Search in sources :

Example 1 with IndexingServiceInputSourceModule

use of org.apache.druid.guice.IndexingServiceInputSourceModule in project druid by druid-io.

the class DruidJsonValidator method run.

@Override
public void run() {
    File file = new File(jsonFile);
    if (!file.exists()) {
        LOG.info("File[%s] does not exist.%n", file);
    }
    final Injector injector = makeInjector();
    final ObjectMapper jsonMapper = injector.getInstance(ObjectMapper.class);
    registerModules(jsonMapper, Iterables.concat(Initialization.getFromExtensions(injector.getInstance(ExtensionsConfig.class), DruidModule.class), Arrays.asList(new FirehoseModule(), new IndexingHadoopModule(), new IndexingServiceFirehoseModule(), new IndexingServiceInputSourceModule(), new LocalDataStorageDruidModule())));
    final ClassLoader loader;
    if (Thread.currentThread().getContextClassLoader() != null) {
        loader = Thread.currentThread().getContextClassLoader();
    } else {
        loader = DruidJsonValidator.class.getClassLoader();
    }
    if (toLogger) {
        logWriter = new NullWriter() {

            private final Logger logger = new Logger(DruidJsonValidator.class);

            @Override
            public void write(char[] cbuf, int off, int len) {
                logger.info(new String(cbuf, off, len));
            }
        };
    }
    try {
        if ("query".equalsIgnoreCase(type)) {
            jsonMapper.readValue(file, Query.class);
        } else if ("hadoopConfig".equalsIgnoreCase(type)) {
            jsonMapper.readValue(file, HadoopDruidIndexerConfig.class);
        } else if ("task".equalsIgnoreCase(type)) {
            jsonMapper.readValue(file, Task.class);
        } else if ("parse".equalsIgnoreCase(type)) {
            final StringInputRowParser parser;
            if (file.isFile()) {
                logWriter.write("loading parse spec from file '" + file + "'");
                parser = jsonMapper.readValue(file, StringInputRowParser.class);
            } else if (loader.getResource(jsonFile) != null) {
                logWriter.write("loading parse spec from resource '" + jsonFile + "'");
                parser = jsonMapper.readValue(loader.getResource(jsonFile), StringInputRowParser.class);
            } else {
                logWriter.write("cannot find proper spec from 'file'.. regarding it as a json spec");
                parser = jsonMapper.readValue(jsonFile, StringInputRowParser.class);
            }
            parser.initializeParser();
            if (resource != null) {
                final CharSource source;
                if (new File(resource).isFile()) {
                    logWriter.write("loading data from file '" + resource + "'");
                    source = Resources.asByteSource(new File(resource).toURI().toURL()).asCharSource(Charset.forName(parser.getEncoding()));
                } else if (loader.getResource(resource) != null) {
                    logWriter.write("loading data from resource '" + resource + "'");
                    source = Resources.asByteSource(loader.getResource(resource)).asCharSource(Charset.forName(parser.getEncoding()));
                } else {
                    logWriter.write("cannot find proper data from 'resource'.. regarding it as data string");
                    source = CharSource.wrap(resource);
                }
                readData(parser, source);
            }
        } else {
            throw new UOE("Unknown type[%s]", type);
        }
    } catch (Exception e) {
        LOG.error(e, "INVALID JSON!");
        Throwables.propagateIfPossible(e);
        throw new RuntimeException(e);
    }
}
Also used : CharSource(com.google.common.io.CharSource) IndexingServiceFirehoseModule(org.apache.druid.guice.IndexingServiceFirehoseModule) IndexingHadoopModule(org.apache.druid.indexer.IndexingHadoopModule) LocalDataStorageDruidModule(org.apache.druid.guice.LocalDataStorageDruidModule) UOE(org.apache.druid.java.util.common.UOE) Logger(org.apache.druid.java.util.common.logger.Logger) HadoopDruidIndexerConfig(org.apache.druid.indexer.HadoopDruidIndexerConfig) NullWriter(org.apache.commons.io.output.NullWriter) IOException(java.io.IOException) FirehoseModule(org.apache.druid.guice.FirehoseModule) IndexingServiceFirehoseModule(org.apache.druid.guice.IndexingServiceFirehoseModule) IndexingServiceInputSourceModule(org.apache.druid.guice.IndexingServiceInputSourceModule) Injector(com.google.inject.Injector) StringInputRowParser(org.apache.druid.data.input.impl.StringInputRowParser) ExtensionsConfig(org.apache.druid.guice.ExtensionsConfig) File(java.io.File) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper)

Aggregations

ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1 CharSource (com.google.common.io.CharSource)1 Injector (com.google.inject.Injector)1 File (java.io.File)1 IOException (java.io.IOException)1 NullWriter (org.apache.commons.io.output.NullWriter)1 StringInputRowParser (org.apache.druid.data.input.impl.StringInputRowParser)1 ExtensionsConfig (org.apache.druid.guice.ExtensionsConfig)1 FirehoseModule (org.apache.druid.guice.FirehoseModule)1 IndexingServiceFirehoseModule (org.apache.druid.guice.IndexingServiceFirehoseModule)1 IndexingServiceInputSourceModule (org.apache.druid.guice.IndexingServiceInputSourceModule)1 LocalDataStorageDruidModule (org.apache.druid.guice.LocalDataStorageDruidModule)1 HadoopDruidIndexerConfig (org.apache.druid.indexer.HadoopDruidIndexerConfig)1 IndexingHadoopModule (org.apache.druid.indexer.IndexingHadoopModule)1 UOE (org.apache.druid.java.util.common.UOE)1 Logger (org.apache.druid.java.util.common.logger.Logger)1