use of com.google.common.io.CharSource in project druid by druid-io.
the class MapVirtualColumnTest method constructorFeeder.
@Parameterized.Parameters
public static Iterable<Object[]> constructorFeeder() throws IOException {
final Supplier<SelectQueryConfig> selectConfigSupplier = Suppliers.ofInstance(new SelectQueryConfig(true));
SelectQueryRunnerFactory factory = new SelectQueryRunnerFactory(new SelectQueryQueryToolChest(new DefaultObjectMapper(), QueryRunnerTestHelper.NoopIntervalChunkingQueryRunnerDecorator(), selectConfigSupplier), new SelectQueryEngine(selectConfigSupplier), QueryRunnerTestHelper.NOOP_QUERYWATCHER);
final IncrementalIndexSchema schema = new IncrementalIndexSchema.Builder().withMinTimestamp(new DateTime("2011-01-12T00:00:00.000Z").getMillis()).withQueryGranularity(Granularities.NONE).build();
final IncrementalIndex index = new OnheapIncrementalIndex(schema, true, 10000);
final StringInputRowParser parser = new StringInputRowParser(new DelimitedParseSpec(new TimestampSpec("ts", "iso", null), new DimensionsSpec(DimensionsSpec.getDefaultSchemas(Arrays.asList("dim", "keys", "values")), null, null), "\t", ",", Arrays.asList("ts", "dim", "keys", "values")), "utf8");
CharSource input = CharSource.wrap("2011-01-12T00:00:00.000Z\ta\tkey1,key2,key3\tvalue1,value2,value3\n" + "2011-01-12T00:00:00.000Z\tb\tkey4,key5,key6\tvalue4\n" + "2011-01-12T00:00:00.000Z\tc\tkey1,key5\tvalue1,value5,value9\n");
IncrementalIndex index1 = TestIndex.loadIncrementalIndex(index, input, parser);
QueryableIndex index2 = TestIndex.persistRealtimeAndLoadMMapped(index1);
return transformToConstructionFeeder(Arrays.asList(makeQueryRunner(factory, "index1", new IncrementalIndexSegment(index1, "index1"), "incremental"), makeQueryRunner(factory, "index2", new QueryableIndexSegment("index2", index2), "queryable")));
}
use of com.google.common.io.CharSource in project druid by druid-io.
the class MultiSegmentScanQueryTest method setup.
@BeforeClass
public static void setup() throws IOException {
CharSource v_0112 = CharSource.wrap(StringUtils.join(V_0112, "\n"));
CharSource v_0113 = CharSource.wrap(StringUtils.join(V_0113, "\n"));
IncrementalIndex index0 = TestIndex.loadIncrementalIndex(newIndex("2011-01-12T00:00:00.000Z"), v_0112);
IncrementalIndex index1 = TestIndex.loadIncrementalIndex(newIndex("2011-01-13T00:00:00.000Z"), v_0113);
segment0 = new IncrementalIndexSegment(index0, makeIdentifier(index0, "v1"));
segment1 = new IncrementalIndexSegment(index1, makeIdentifier(index1, "v1"));
}
use of com.google.common.io.CharSource in project druid by druid-io.
the class DruidJsonValidator method run.
@Override
public void run() {
File file = new File(jsonFile);
if (!file.exists()) {
System.out.printf("File[%s] does not exist.%n", file);
}
final Injector injector = makeInjector();
final ObjectMapper jsonMapper = injector.getInstance(ObjectMapper.class);
registerModules(jsonMapper, Iterables.concat(Initialization.getFromExtensions(injector.getInstance(ExtensionsConfig.class), DruidModule.class), Arrays.asList(new FirehoseModule(), new IndexingHadoopModule(), new IndexingServiceFirehoseModule(), new LocalDataStorageDruidModule(), new ParsersModule())));
final ClassLoader loader;
if (Thread.currentThread().getContextClassLoader() != null) {
loader = Thread.currentThread().getContextClassLoader();
} else {
loader = DruidJsonValidator.class.getClassLoader();
}
if (toLogger) {
logWriter = new NullWriter() {
private final Logger logger = new Logger(DruidJsonValidator.class);
@Override
public void write(char[] cbuf, int off, int len) {
logger.info(new String(cbuf, off, len));
}
};
}
try {
if (type.equalsIgnoreCase("query")) {
jsonMapper.readValue(file, Query.class);
} else if (type.equalsIgnoreCase("hadoopConfig")) {
jsonMapper.readValue(file, HadoopDruidIndexerConfig.class);
} else if (type.equalsIgnoreCase("task")) {
jsonMapper.readValue(file, Task.class);
} else if (type.equalsIgnoreCase("parse")) {
final StringInputRowParser parser;
if (file.isFile()) {
logWriter.write("loading parse spec from file '" + file + "'");
parser = jsonMapper.readValue(file, StringInputRowParser.class);
} else if (loader.getResource(jsonFile) != null) {
logWriter.write("loading parse spec from resource '" + jsonFile + "'");
parser = jsonMapper.readValue(loader.getResource(jsonFile), StringInputRowParser.class);
} else {
logWriter.write("cannot find proper spec from 'file'.. regarding it as a json spec");
parser = jsonMapper.readValue(jsonFile, StringInputRowParser.class);
}
if (resource != null) {
final CharSource source;
if (new File(resource).isFile()) {
logWriter.write("loading data from file '" + resource + "'");
source = Resources.asByteSource(new File(resource).toURL()).asCharSource(Charset.forName(parser.getEncoding()));
} else if (loader.getResource(resource) != null) {
logWriter.write("loading data from resource '" + resource + "'");
source = Resources.asByteSource(loader.getResource(resource)).asCharSource(Charset.forName(parser.getEncoding()));
} else {
logWriter.write("cannot find proper data from 'resource'.. regarding it as data string");
source = CharSource.wrap(resource);
}
readData(parser, source);
}
} else {
throw new UOE("Unknown type[%s]", type);
}
} catch (Exception e) {
System.out.println("INVALID JSON!");
throw Throwables.propagate(e);
}
}
use of com.google.common.io.CharSource in project druid by druid-io.
the class SearchQueryRunnerWithCaseTest method constructorFeeder.
@Parameterized.Parameters
public static Iterable<Object[]> constructorFeeder() throws IOException {
final SearchQueryConfig[] configs = new SearchQueryConfig[3];
configs[0] = new SearchQueryConfig();
configs[0].setSearchStrategy(UseIndexesStrategy.NAME);
configs[1] = new SearchQueryConfig();
configs[1].setSearchStrategy(CursorOnlyStrategy.NAME);
configs[2] = new SearchQueryConfig();
configs[2].setSearchStrategy(AutoStrategy.NAME);
CharSource input = CharSource.wrap("2011-01-12T00:00:00.000Z\tspot\tAutoMotive\t1000\t10000.0\t100000\tPREFERRED\tapreferred\t100.000000\n" + "2011-01-12T00:00:00.000Z\tSPot\tbusiness\t1100\t11000.0\t110000\tpreferred\tbPreferred\t100.000000\n" + "2011-01-12T00:00:00.000Z\tspot\tentertainment\t1200\t12000.0\t120000\tPREFERRed\tepreferred\t100.000000\n" + "2011-01-13T00:00:00.000Z\tspot\tautomotive\t1000\t10000.0\t100000\tpreferred\tapreferred\t94.874713");
IncrementalIndex index1 = TestIndex.makeRealtimeIndex(input);
IncrementalIndex index2 = TestIndex.makeRealtimeIndex(input);
QueryableIndex index3 = TestIndex.persistRealtimeAndLoadMMapped(index1);
QueryableIndex index4 = TestIndex.persistRealtimeAndLoadMMapped(index2);
final List<QueryRunner<Result<SearchResultValue>>> runners = Lists.newArrayList();
for (int i = 0; i < configs.length; i++) {
runners.addAll(Arrays.asList(makeQueryRunner(makeRunnerFactory(configs[i]), "index1", new IncrementalIndexSegment(index1, "index1"), "index1"), makeQueryRunner(makeRunnerFactory(configs[i]), "index2", new IncrementalIndexSegment(index2, "index2"), "index2"), makeQueryRunner(makeRunnerFactory(configs[i]), "index3", new QueryableIndexSegment("index3", index3), "index3"), makeQueryRunner(makeRunnerFactory(configs[i]), "index4", new QueryableIndexSegment("index4", index4), "index4")));
}
return transformToConstructionFeeder(runners);
}
use of com.google.common.io.CharSource in project MinecraftForge by MinecraftForge.
the class FMLDeobfuscatingRemapper method setup.
public void setup(File mcDir, LaunchClassLoader classLoader, String deobfFileName) {
this.classLoader = classLoader;
try {
List<String> srgList;
final String gradleStartProp = System.getProperty("net.minecraftforge.gradle.GradleStart.srg.srg-mcp");
if (Strings.isNullOrEmpty(gradleStartProp)) {
// get as a resource
InputStream classData = getClass().getResourceAsStream(deobfFileName);
LZMAInputSupplier zis = new LZMAInputSupplier(classData);
CharSource srgSource = zis.asCharSource(Charsets.UTF_8);
srgList = srgSource.readLines();
FMLRelaunchLog.fine("Loading deobfuscation resource %s with %d records", deobfFileName, srgList.size());
} else {
srgList = Files.readLines(new File(gradleStartProp), Charsets.UTF_8);
FMLRelaunchLog.fine("Loading deobfuscation resource %s with %d records", gradleStartProp, srgList.size());
}
rawMethodMaps = Maps.newHashMap();
rawFieldMaps = Maps.newHashMap();
Builder<String, String> builder = ImmutableBiMap.builder();
Splitter splitter = Splitter.on(CharMatcher.anyOf(": ")).omitEmptyStrings().trimResults();
for (String line : srgList) {
String[] parts = Iterables.toArray(splitter.split(line), String.class);
String typ = parts[0];
if ("CL".equals(typ)) {
parseClass(builder, parts);
} else if ("MD".equals(typ)) {
parseMethod(parts);
} else if ("FD".equals(typ)) {
parseField(parts);
}
}
classNameBiMap = builder.build();
} catch (IOException ioe) {
FMLRelaunchLog.log(Level.ERROR, ioe, "An error occurred loading the deobfuscation map data");
}
methodNameMaps = Maps.newHashMapWithExpectedSize(rawMethodMaps.size());
fieldNameMaps = Maps.newHashMapWithExpectedSize(rawFieldMaps.size());
}
Aggregations