use of org.apache.druid.data.input.impl.prefetch.OpenObject in project druid by druid-io.
the class PrefetchSqlFirehoseFactory method connect.
@Override
public Firehose connect(InputRowParser<Map<String, Object>> firehoseParser, @Nullable File temporaryDirectory) {
if (objects == null) {
objects = ImmutableList.copyOf(Preconditions.checkNotNull(initObjects(), "objects"));
}
if (cacheManager.isEnabled() || fetchConfig.getMaxFetchCapacityBytes() > 0) {
Preconditions.checkNotNull(temporaryDirectory, "temporaryDirectory");
Preconditions.checkArgument(temporaryDirectory.exists(), "temporaryDirectory[%s] does not exist", temporaryDirectory);
Preconditions.checkArgument(temporaryDirectory.isDirectory(), "temporaryDirectory[%s] is not a directory", temporaryDirectory);
}
LOG.info("Create a new firehose for [%d] queries", objects.size());
// fetchExecutor is responsible for background data fetching
final ExecutorService fetchExecutor = Execs.singleThreaded("firehose_fetch_%d");
final Fetcher<T> fetcher = new SqlFetcher<>(cacheManager, objects, fetchExecutor, temporaryDirectory, fetchConfig, new ObjectOpenFunction<T>() {
@Override
public InputStream open(T object, File outFile) throws IOException {
return openObjectStream(object, outFile);
}
@Override
public InputStream open(T object) throws IOException {
final File outFile = File.createTempFile("sqlresults_", null, temporaryDirectory);
return openObjectStream(object, outFile);
}
});
return new SqlFirehose(new Iterator<JsonIterator<Map<String, Object>>>() {
@Override
public boolean hasNext() {
return fetcher.hasNext();
}
@Override
public JsonIterator<Map<String, Object>> next() {
if (!hasNext()) {
throw new NoSuchElementException();
}
try {
TypeReference<Map<String, Object>> type = new TypeReference<Map<String, Object>>() {
};
final OpenObject<T> openObject = fetcher.next();
final InputStream stream = openObject.getObjectStream();
return new JsonIterator<>(type, stream, openObject.getResourceCloser(), objectMapper);
} catch (Exception ioe) {
throw new RuntimeException(ioe);
}
}
}, firehoseParser, () -> {
fetchExecutor.shutdownNow();
try {
Preconditions.checkState(fetchExecutor.awaitTermination(fetchConfig.getFetchTimeout(), TimeUnit.MILLISECONDS));
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new ISE("Failed to shutdown fetch executor during close");
}
});
}
Aggregations