Search in sources :

Example 1 with UOE

use of io.druid.java.util.common.UOE in project druid by druid-io.

the class DruidJsonValidator method run.

@Override
public void run() {
    File file = new File(jsonFile);
    if (!file.exists()) {
        System.out.printf("File[%s] does not exist.%n", file);
    }
    final Injector injector = makeInjector();
    final ObjectMapper jsonMapper = injector.getInstance(ObjectMapper.class);
    registerModules(jsonMapper, Iterables.concat(Initialization.getFromExtensions(injector.getInstance(ExtensionsConfig.class), DruidModule.class), Arrays.asList(new FirehoseModule(), new IndexingHadoopModule(), new IndexingServiceFirehoseModule(), new LocalDataStorageDruidModule(), new ParsersModule())));
    final ClassLoader loader;
    if (Thread.currentThread().getContextClassLoader() != null) {
        loader = Thread.currentThread().getContextClassLoader();
    } else {
        loader = DruidJsonValidator.class.getClassLoader();
    }
    if (toLogger) {
        logWriter = new NullWriter() {

            private final Logger logger = new Logger(DruidJsonValidator.class);

            @Override
            public void write(char[] cbuf, int off, int len) {
                logger.info(new String(cbuf, off, len));
            }
        };
    }
    try {
        if (type.equalsIgnoreCase("query")) {
            jsonMapper.readValue(file, Query.class);
        } else if (type.equalsIgnoreCase("hadoopConfig")) {
            jsonMapper.readValue(file, HadoopDruidIndexerConfig.class);
        } else if (type.equalsIgnoreCase("task")) {
            jsonMapper.readValue(file, Task.class);
        } else if (type.equalsIgnoreCase("parse")) {
            final StringInputRowParser parser;
            if (file.isFile()) {
                logWriter.write("loading parse spec from file '" + file + "'");
                parser = jsonMapper.readValue(file, StringInputRowParser.class);
            } else if (loader.getResource(jsonFile) != null) {
                logWriter.write("loading parse spec from resource '" + jsonFile + "'");
                parser = jsonMapper.readValue(loader.getResource(jsonFile), StringInputRowParser.class);
            } else {
                logWriter.write("cannot find proper spec from 'file'.. regarding it as a json spec");
                parser = jsonMapper.readValue(jsonFile, StringInputRowParser.class);
            }
            if (resource != null) {
                final CharSource source;
                if (new File(resource).isFile()) {
                    logWriter.write("loading data from file '" + resource + "'");
                    source = Resources.asByteSource(new File(resource).toURL()).asCharSource(Charset.forName(parser.getEncoding()));
                } else if (loader.getResource(resource) != null) {
                    logWriter.write("loading data from resource '" + resource + "'");
                    source = Resources.asByteSource(loader.getResource(resource)).asCharSource(Charset.forName(parser.getEncoding()));
                } else {
                    logWriter.write("cannot find proper data from 'resource'.. regarding it as data string");
                    source = CharSource.wrap(resource);
                }
                readData(parser, source);
            }
        } else {
            throw new UOE("Unknown type[%s]", type);
        }
    } catch (Exception e) {
        System.out.println("INVALID JSON!");
        throw Throwables.propagate(e);
    }
}
Also used : CharSource(com.google.common.io.CharSource) IndexingServiceFirehoseModule(io.druid.guice.IndexingServiceFirehoseModule) IndexingHadoopModule(io.druid.indexer.IndexingHadoopModule) LocalDataStorageDruidModule(io.druid.guice.LocalDataStorageDruidModule) UOE(io.druid.java.util.common.UOE) Logger(io.druid.java.util.common.logger.Logger) HadoopDruidIndexerConfig(io.druid.indexer.HadoopDruidIndexerConfig) NullWriter(org.apache.commons.io.output.NullWriter) IOException(java.io.IOException) FirehoseModule(io.druid.guice.FirehoseModule) IndexingServiceFirehoseModule(io.druid.guice.IndexingServiceFirehoseModule) Injector(com.google.inject.Injector) StringInputRowParser(io.druid.data.input.impl.StringInputRowParser) ParsersModule(io.druid.guice.ParsersModule) ExtensionsConfig(io.druid.guice.ExtensionsConfig) File(java.io.File) ObjectMapper(com.fasterxml.jackson.databind.ObjectMapper)

Example 2 with UOE

use of io.druid.java.util.common.UOE in project druid by druid-io.

the class IndexIOTest method constructionFeeder.

@Parameterized.Parameters
public static Iterable<Object[]> constructionFeeder() {
    final Map<String, Object> map = ImmutableMap.<String, Object>of();
    final Map<String, Object> map00 = ImmutableMap.<String, Object>of("dim0", ImmutableList.<String>of("dim00", "dim01"));
    final Map<String, Object> map10 = ImmutableMap.<String, Object>of("dim1", "dim10");
    final Map<String, Object> map0null = new HashMap<>();
    map0null.put("dim0", null);
    final Map<String, Object> map1null = new HashMap<>();
    map1null.put("dim1", null);
    final Map<String, Object> mapAll = ImmutableMap.<String, Object>of("dim0", ImmutableList.<String>of("dim00", "dim01"), "dim1", "dim10");
    final List<Map<String, Object>> maps = ImmutableList.of(map, map00, map10, map0null, map1null, mapAll);
    return Iterables.<Object[]>concat(// First iterable tests permutations of the maps which are expected to be equal
    Iterables.<Object[]>concat(new Iterable<Iterable<Object[]>>() {

        @Override
        public Iterator<Iterable<Object[]>> iterator() {
            return new Iterator<Iterable<Object[]>>() {

                long nextBitset = 1L;

                @Override
                public boolean hasNext() {
                    return nextBitset < (1L << maps.size());
                }

                @Override
                public Iterable<Object[]> next() {
                    final BitSet bitset = BitSet.valueOf(new long[] { nextBitset++ });
                    final List<Map<String, Object>> myMaps = filterByBitset(maps, bitset);
                    return Collections2.transform(Collections2.permutations(myMaps), new Function<List<Map<String, Object>>, Object[]>() {

                        @Nullable
                        @Override
                        public Object[] apply(List<Map<String, Object>> input) {
                            return new Object[] { input, input, null };
                        }
                    });
                }

                @Override
                public void remove() {
                    throw new UOE("Remove not suported");
                }
            };
        }
    }), // Second iterable tests combinations of the maps which may or may not be equal
    Iterables.<Object[]>concat(new Iterable<Iterable<Object[]>>() {

        @Override
        public Iterator<Iterable<Object[]>> iterator() {
            return new Iterator<Iterable<Object[]>>() {

                long nextMap1Bits = 1L;

                @Override
                public boolean hasNext() {
                    return nextMap1Bits < (1L << maps.size());
                }

                @Override
                public Iterable<Object[]> next() {
                    final BitSet bitset1 = BitSet.valueOf(new long[] { nextMap1Bits++ });
                    final List<Map<String, Object>> maplist1 = filterByBitset(maps, bitset1);
                    return new Iterable<Object[]>() {

                        @Override
                        public Iterator<Object[]> iterator() {
                            return new Iterator<Object[]>() {

                                long nextMap2Bits = 1L;

                                @Override
                                public boolean hasNext() {
                                    return nextMap2Bits < (1L << maps.size());
                                }

                                @Override
                                public Object[] next() {
                                    final List<Map<String, Object>> maplist2 = filterByBitset(maps, BitSet.valueOf(new long[] { nextMap2Bits++ }));
                                    return new Object[] { maplist1, maplist2, filterNullValues(maplist1).equals(filterNullValues(maplist2)) ? null : SegmentValidationException.class };
                                }

                                @Override
                                public void remove() {
                                    throw new UOE("remove not supported");
                                }
                            };
                        }
                    };
                }

                @Override
                public void remove() {
                    throw new UOE("Remove not supported");
                }
            };
        }
    }));
}
Also used : HashMap(java.util.HashMap) BitSet(java.util.BitSet) UOE(io.druid.java.util.common.UOE) Iterator(java.util.Iterator) ArrayList(java.util.ArrayList) ImmutableList(com.google.common.collect.ImmutableList) List(java.util.List) HashMap(java.util.HashMap) Map(java.util.Map) ImmutableMap(com.google.common.collect.ImmutableMap) Nullable(javax.annotation.Nullable)

Example 3 with UOE

use of io.druid.java.util.common.UOE in project druid by druid-io.

the class S3DataSegmentPuller method buildFileObject.

public static FileObject buildFileObject(final URI uri, final RestS3Service s3Client) throws ServiceException {
    final S3Coords coords = new S3Coords(checkURI(uri));
    final StorageObject s3Obj = s3Client.getObjectDetails(coords.bucket, coords.path);
    final String path = uri.getPath();
    return new FileObject() {

        final Object inputStreamOpener = new Object();

        volatile boolean streamAcquired = false;

        volatile StorageObject storageObject = s3Obj;

        @Override
        public URI toUri() {
            return uri;
        }

        @Override
        public String getName() {
            final String ext = Files.getFileExtension(path);
            return Files.getNameWithoutExtension(path) + (Strings.isNullOrEmpty(ext) ? "" : ("." + ext));
        }

        @Override
        public InputStream openInputStream() throws IOException {
            try {
                synchronized (inputStreamOpener) {
                    if (streamAcquired) {
                        return storageObject.getDataInputStream();
                    }
                    // lazily promote to full GET
                    storageObject = s3Client.getObject(s3Obj.getBucketName(), s3Obj.getKey());
                    final InputStream stream = storageObject.getDataInputStream();
                    streamAcquired = true;
                    return stream;
                }
            } catch (ServiceException e) {
                throw new IOException(StringUtils.safeFormat("Could not load S3 URI [%s]", uri), e);
            }
        }

        @Override
        public OutputStream openOutputStream() throws IOException {
            throw new UOE("Cannot stream S3 output");
        }

        @Override
        public Reader openReader(boolean ignoreEncodingErrors) throws IOException {
            throw new UOE("Cannot open reader");
        }

        @Override
        public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException {
            throw new UOE("Cannot open character sequence");
        }

        @Override
        public Writer openWriter() throws IOException {
            throw new UOE("Cannot open writer");
        }

        @Override
        public long getLastModified() {
            return s3Obj.getLastModifiedDate().getTime();
        }

        @Override
        public boolean delete() {
            throw new UOE("Cannot delete S3 items anonymously. jetS3t doesn't support authenticated deletes easily.");
        }
    };
}
Also used : StorageObject(org.jets3t.service.model.StorageObject) ServiceException(org.jets3t.service.ServiceException) S3ServiceException(org.jets3t.service.S3ServiceException) InputStream(java.io.InputStream) FileObject(javax.tools.FileObject) StorageObject(org.jets3t.service.model.StorageObject) UOE(io.druid.java.util.common.UOE) FileObject(javax.tools.FileObject) IOException(java.io.IOException)

Example 4 with UOE

use of io.druid.java.util.common.UOE in project druid by druid-io.

the class LocalDataSegmentPuller method buildFileObject.

public static FileObject buildFileObject(final URI uri) {
    final Path path = Paths.get(uri);
    final File file = path.toFile();
    return new FileObject() {

        @Override
        public URI toUri() {
            return uri;
        }

        @Override
        public String getName() {
            return path.getFileName().toString();
        }

        @Override
        public InputStream openInputStream() throws IOException {
            return new FileInputStream(file);
        }

        @Override
        public OutputStream openOutputStream() throws IOException {
            return new FileOutputStream(file);
        }

        @Override
        public Reader openReader(boolean ignoreEncodingErrors) throws IOException {
            return new FileReader(file);
        }

        @Override
        public CharSequence getCharContent(boolean ignoreEncodingErrors) throws IOException {
            throw new UOE("CharSequence not supported");
        }

        @Override
        public Writer openWriter() throws IOException {
            return new FileWriter(file);
        }

        @Override
        public long getLastModified() {
            return file.lastModified();
        }

        @Override
        public boolean delete() {
            return file.delete();
        }
    };
}
Also used : Path(java.nio.file.Path) FileOutputStream(java.io.FileOutputStream) FileWriter(java.io.FileWriter) FileReader(java.io.FileReader) UOE(io.druid.java.util.common.UOE) FileObject(javax.tools.FileObject) File(java.io.File) FileInputStream(java.io.FileInputStream)

Example 5 with UOE

use of io.druid.java.util.common.UOE in project druid by druid-io.

the class URIExtractionNamespaceCacheFactoryTest method getParameters.

@Parameterized.Parameters(name = "{0}")
public static Iterable<Object[]> getParameters() throws NoSuchMethodException {
    final List<Object[]> compressionParams = ImmutableList.of(new Object[] { ".dat", new Function<File, OutputStream>() {

        @Nullable
        @Override
        public OutputStream apply(@Nullable File outFile) {
            try {
                return new FileOutputStream(outFile);
            } catch (IOException ex) {
                throw Throwables.propagate(ex);
            }
        }
    } }, new Object[] { ".gz", new Function<File, OutputStream>() {

        @Nullable
        @Override
        public OutputStream apply(@Nullable File outFile) {
            try {
                final FileOutputStream fos = new FileOutputStream(outFile);
                return new GZIPOutputStream(fos) {

                    @Override
                    public void close() throws IOException {
                        try {
                            super.close();
                        } finally {
                            fos.close();
                        }
                    }
                };
            } catch (IOException ex) {
                throw Throwables.propagate(ex);
            }
        }
    } });
    final List<Function<Lifecycle, NamespaceExtractionCacheManager>> cacheManagerCreators = ImmutableList.of(new Function<Lifecycle, NamespaceExtractionCacheManager>() {

        @Override
        public NamespaceExtractionCacheManager apply(Lifecycle lifecycle) {
            return new OnHeapNamespaceExtractionCacheManager(lifecycle, new NoopServiceEmitter());
        }
    }, new Function<Lifecycle, NamespaceExtractionCacheManager>() {

        @Override
        public NamespaceExtractionCacheManager apply(Lifecycle lifecycle) {
            return new OffHeapNamespaceExtractionCacheManager(lifecycle, new NoopServiceEmitter());
        }
    });
    return new Iterable<Object[]>() {

        @Override
        public Iterator<Object[]> iterator() {
            return new Iterator<Object[]>() {

                Iterator<Object[]> compressionIt = compressionParams.iterator();

                Iterator<Function<Lifecycle, NamespaceExtractionCacheManager>> cacheManagerCreatorsIt = cacheManagerCreators.iterator();

                Object[] compressions = compressionIt.next();

                @Override
                public boolean hasNext() {
                    return compressionIt.hasNext() || cacheManagerCreatorsIt.hasNext();
                }

                @Override
                public Object[] next() {
                    if (cacheManagerCreatorsIt.hasNext()) {
                        Function<Lifecycle, NamespaceExtractionCacheManager> cacheManagerCreator = cacheManagerCreatorsIt.next();
                        return new Object[] { compressions[0], compressions[1], cacheManagerCreator };
                    } else {
                        cacheManagerCreatorsIt = cacheManagerCreators.iterator();
                        compressions = compressionIt.next();
                        return next();
                    }
                }

                @Override
                public void remove() {
                    throw new UOE("Cannot remove");
                }
            };
        }
    };
}
Also used : OnHeapNamespaceExtractionCacheManager(io.druid.server.lookup.namespace.cache.OnHeapNamespaceExtractionCacheManager) GZIPOutputStream(java.util.zip.GZIPOutputStream) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) Lifecycle(io.druid.java.util.common.lifecycle.Lifecycle) NoopServiceEmitter(io.druid.server.metrics.NoopServiceEmitter) UOE(io.druid.java.util.common.UOE) IOException(java.io.IOException) OffHeapNamespaceExtractionCacheManager(io.druid.server.lookup.namespace.cache.OffHeapNamespaceExtractionCacheManager) Function(com.google.common.base.Function) NamespaceExtractionCacheManager(io.druid.server.lookup.namespace.cache.NamespaceExtractionCacheManager) OffHeapNamespaceExtractionCacheManager(io.druid.server.lookup.namespace.cache.OffHeapNamespaceExtractionCacheManager) OnHeapNamespaceExtractionCacheManager(io.druid.server.lookup.namespace.cache.OnHeapNamespaceExtractionCacheManager) GZIPOutputStream(java.util.zip.GZIPOutputStream) FileOutputStream(java.io.FileOutputStream) Iterator(java.util.Iterator) File(java.io.File) Nullable(javax.annotation.Nullable)

Aggregations

UOE (io.druid.java.util.common.UOE)5 File (java.io.File)3 IOException (java.io.IOException)3 FileOutputStream (java.io.FileOutputStream)2 Iterator (java.util.Iterator)2 Nullable (javax.annotation.Nullable)2 FileObject (javax.tools.FileObject)2 ObjectMapper (com.fasterxml.jackson.databind.ObjectMapper)1 Function (com.google.common.base.Function)1 ImmutableList (com.google.common.collect.ImmutableList)1 ImmutableMap (com.google.common.collect.ImmutableMap)1 CharSource (com.google.common.io.CharSource)1 Injector (com.google.inject.Injector)1 StringInputRowParser (io.druid.data.input.impl.StringInputRowParser)1 ExtensionsConfig (io.druid.guice.ExtensionsConfig)1 FirehoseModule (io.druid.guice.FirehoseModule)1 IndexingServiceFirehoseModule (io.druid.guice.IndexingServiceFirehoseModule)1 LocalDataStorageDruidModule (io.druid.guice.LocalDataStorageDruidModule)1 ParsersModule (io.druid.guice.ParsersModule)1 HadoopDruidIndexerConfig (io.druid.indexer.HadoopDruidIndexerConfig)1