use of com.bakdata.conquery.models.config.ConqueryConfig in project conquery by bakdata.
the class StandaloneSupport method preprocessTmp.
public void preprocessTmp(File tmpDir, List<File> descriptions) throws Exception {
final Environment env = testConquery.getDropwizard().getEnvironment();
final net.sourceforge.argparse4j.inf.Namespace namespace = new net.sourceforge.argparse4j.inf.Namespace(Map.of("in", tmpDir, "out", tmpDir, "desc", descriptions));
// We use this to change the visibility of the run method, hence it cannot be instantiated.
new PreprocessorCommand(MoreExecutors.newDirectExecutorService()) {
@Override
public void run(Environment environment, net.sourceforge.argparse4j.inf.Namespace namespace, ConqueryConfig config) throws Exception {
super.run(environment, namespace, config);
}
}.run(env, namespace, config);
}
use of com.bakdata.conquery.models.config.ConqueryConfig in project conquery by bakdata.
the class StandaloneCommand method startStandalone.
protected void startStandalone(Environment environment, Namespace namespace, ConqueryConfig config) throws Exception {
// start ManagerNode
ConqueryMDC.setLocation("ManagerNode");
log.debug("Starting ManagerNode");
ConqueryConfig managerConfig = config;
if (config.getStorage() instanceof XodusStoreFactory) {
final Path managerDir = ((XodusStoreFactory) config.getStorage()).getDirectory().resolve("manager");
managerConfig = config.withStorage(((XodusStoreFactory) config.getStorage()).withDirectory(managerDir));
}
conquery.setManager(manager);
conquery.run(managerConfig, environment);
// create thread pool to start multiple ShardNodes at the same time
ExecutorService starterPool = Executors.newFixedThreadPool(config.getStandalone().getNumberOfShardNodes(), new ThreadFactoryBuilder().setNameFormat("ShardNode Storage Loader %d").setUncaughtExceptionHandler((t, e) -> {
ConqueryMDC.setLocation(t.getName());
log.error(t.getName() + " failed to init storage of ShardNode", e);
}).build());
List<Future<ShardNode>> tasks = new ArrayList<>();
for (int i = 0; i < config.getStandalone().getNumberOfShardNodes(); i++) {
final int id = i;
tasks.add(starterPool.submit(() -> {
ShardNode sc = new ShardNode(ShardNode.DEFAULT_NAME + id);
shardNodes.add(sc);
ConqueryMDC.setLocation(sc.getName());
ConqueryConfig clone = config;
if (config.getStorage() instanceof XodusStoreFactory) {
final Path managerDir = ((XodusStoreFactory) config.getStorage()).getDirectory().resolve("shard-node" + id);
clone = config.withStorage(((XodusStoreFactory) config.getStorage()).withDirectory(managerDir));
}
sc.run(environment, namespace, clone);
return sc;
}));
}
ConqueryMDC.setLocation("ManagerNode");
log.debug("Waiting for ShardNodes to start");
starterPool.shutdown();
starterPool.awaitTermination(1, TimeUnit.HOURS);
// catch exceptions on tasks
boolean failed = false;
for (Future<ShardNode> f : tasks) {
try {
f.get();
} catch (ExecutionException e) {
log.error("during ShardNodes creation", e);
failed = true;
}
}
if (failed) {
System.exit(-1);
}
// starts the Jersey Server
log.debug("Starting REST Server");
ConqueryMDC.setLocation(null);
super.run(environment, namespace, config);
}
use of com.bakdata.conquery.models.config.ConqueryConfig in project conquery by bakdata.
the class ResultArrowProcessor method getArrowResult.
public static <E extends ManagedExecution<?> & SingleTableResult> Response getArrowResult(Function<OutputStream, Function<VectorSchemaRoot, ArrowWriter>> writerProducer, Subject subject, E exec, Dataset dataset, DatasetRegistry datasetRegistry, boolean pretty, String fileExtension, MediaType mediaType, ConqueryConfig config) {
final Namespace namespace = datasetRegistry.get(dataset.getId());
ConqueryMDC.setLocation(subject.getName());
log.info("Downloading results for {} on dataset {}", exec, dataset);
subject.authorize(dataset, Ability.READ);
subject.authorize(dataset, Ability.DOWNLOAD);
subject.authorize(exec, Ability.READ);
// Check if subject is permitted to download on all datasets that were referenced by the query
authorizeDownloadDatasets(subject, exec);
if (!(exec instanceof ManagedQuery || (exec instanceof ManagedForm && ((ManagedForm) exec).getSubQueries().size() == 1))) {
return Response.status(HttpStatus.SC_UNPROCESSABLE_ENTITY, "Execution result is not a single Table").build();
}
// Get the locale extracted by the LocaleFilter
IdPrinter idPrinter = config.getFrontend().getQueryUpload().getIdPrinter(subject, exec, namespace);
final Locale locale = I18n.LOCALE.get();
PrintSettings settings = new PrintSettings(pretty, locale, datasetRegistry, config, idPrinter::createId);
// Collect ResultInfos for id columns and result columns
final List<ResultInfo> resultInfosId = config.getFrontend().getQueryUpload().getIdResultInfos();
final List<ResultInfo> resultInfosExec = exec.getResultInfos();
StreamingOutput out = output -> renderToStream(writerProducer.apply(output), settings, config.getArrow().getBatchSize(), resultInfosId, resultInfosExec, exec.streamResults());
return makeResponseWithFileName(out, exec.getLabelWithoutAutoLabelSuffix(), fileExtension, mediaType, ResultUtil.ContentDispositionOption.ATTACHMENT);
}
use of com.bakdata.conquery.models.config.ConqueryConfig in project conquery by bakdata.
the class StringEncodingTest method testHexStreamStringType.
@Test
public void testHexStreamStringType() {
StringParser parser = new StringParser(new ConqueryConfig());
Stream.generate(() -> UUID.randomUUID().toString().replace("-", "")).map(String::toUpperCase).mapToInt(v -> {
try {
return parser.parse(v);
} catch (ParsingException e) {
// We know that StringTypeVarInt is able to parse our strings.
return 0;
}
}).limit(100).forEach(parser::addLine);
StringTypeEncoded subType = (StringTypeEncoded) parser.findBestType();
assertThat(subType).isInstanceOf(StringTypeEncoded.class);
assertThat(subType.getEncoding()).isEqualByComparingTo(StringTypeEncoded.Encoding.Base16UpperCase);
}
use of com.bakdata.conquery.models.config.ConqueryConfig in project conquery by bakdata.
the class DateRangeParserTest method onlyClosed.
@Test
public void onlyClosed() {
final DateRangeParser parser = new DateRangeParser(new ConqueryConfig());
List.of(CDateRange.of(10, 11), CDateRange.exactly(10)).forEach(parser::addLine);
final ColumnStore actual = parser.decideType();
assertThat(actual).isInstanceOf(DateRangeTypeDateRange.class);
assertThat(((IntegerDateStore) ((DateRangeTypeDateRange) actual).getMinStore()).getStore()).isInstanceOfAny(ByteArrayStore.class, RebasingStore.class);
}
Aggregations