use of com.bakdata.conquery.models.worker.DatasetRegistry in project conquery by bakdata.
the class FormTest method executeTest.
@Override
public void executeTest(StandaloneSupport support) throws Exception {
DatasetRegistry namespaces = support.getNamespace().getNamespaces();
assertThat(support.getValidator().validate(form)).describedAs("Form Validation Errors").isEmpty();
ManagedExecution<?> managedForm = support.getNamespace().getExecutionManager().runQuery(namespaces, form, support.getTestUser(), support.getDataset(), support.getConfig());
managedForm.awaitDone(10, TimeUnit.MINUTES);
if (managedForm.getState() != ExecutionState.DONE) {
if (managedForm.getState() == ExecutionState.FAILED) {
fail(getLabel() + " Query failed");
} else {
fail(getLabel() + " not finished after 10 min");
}
}
log.info("{} QUERIES EXECUTED", getLabel());
checkResults(support, (ManagedForm) managedForm, support.getTestUser());
}
use of com.bakdata.conquery.models.worker.DatasetRegistry in project conquery by bakdata.
the class FormConfigTest method setupTestClass.
@BeforeAll
public void setupTestClass() throws Exception {
datasetId = dataset.getId();
datasetId1 = dataset1.getId();
// Mock DatasetRegistry for translation
namespacesMock = Mockito.mock(DatasetRegistry.class);
doAnswer(invocation -> {
throw new UnsupportedOperationException("Not yet implemented");
}).when(namespacesMock).getOptional(any());
doAnswer(invocation -> {
final DatasetId id = invocation.getArgument(0);
Namespace namespaceMock = Mockito.mock(Namespace.class);
if (id.equals(datasetId)) {
when(namespaceMock.getDataset()).thenReturn(dataset);
} else if (id.equals(datasetId1)) {
when(namespaceMock.getDataset()).thenReturn(dataset1);
} else {
throw new IllegalStateException("Unkown dataset id.");
}
return namespaceMock;
}).when(namespacesMock).get(any(DatasetId.class));
when(namespacesMock.getAllDatasets()).thenReturn(List.of(dataset, dataset1));
when(namespacesMock.injectIntoNew(any(ObjectMapper.class))).thenCallRealMethod();
when(namespacesMock.inject(any(MutableInjectableValues.class))).thenCallRealMethod();
storage = new NonPersistentStoreFactory().createMetaStorage();
((MutableInjectableValues) FormConfigProcessor.getMAPPER().getInjectableValues()).add(IdResolveContext.class, namespacesMock);
processor = new FormConfigProcessor(validator, storage, namespacesMock);
controller = new AuthorizationController(storage, new DevelopmentAuthorizationConfig());
controller.start();
}
use of com.bakdata.conquery.models.worker.DatasetRegistry in project conquery by bakdata.
the class QueryProcessor method postQuery.
/**
* Creates a query for all datasets, then submits it for execution on the
* intended dataset.
*/
public ManagedExecution<?> postQuery(Dataset dataset, QueryDescription query, Subject subject) {
log.info("Query posted on Dataset[{}] by User[{{}].", dataset.getId(), subject.getId());
// This maps works as long as we have query visitors that are not configured in anyway.
// So adding a visitor twice would replace the previous one but both would have yielded the same result.
// For the future a better data structure might be desired that also regards similar QueryVisitors of different configuration
ClassToInstanceMap<QueryVisitor> visitors = MutableClassToInstanceMap.create();
query.addVisitors(visitors);
// Initialize checks that need to traverse the query tree
visitors.putInstance(QueryUtils.OnlyReusingChecker.class, new QueryUtils.OnlyReusingChecker());
visitors.putInstance(NamespacedIdentifiableCollector.class, new NamespacedIdentifiableCollector());
final String primaryGroupName = AuthorizationHelper.getPrimaryGroup(subject, storage).map(Group::getName).orElse("none");
visitors.putInstance(ExecutionMetrics.QueryMetricsReporter.class, new ExecutionMetrics.QueryMetricsReporter(primaryGroupName));
// Chain all Consumers
Consumer<Visitable> consumerChain = QueryUtils.getNoOpEntryPoint();
for (QueryVisitor visitor : visitors.values()) {
consumerChain = consumerChain.andThen(visitor);
}
// Apply consumers to the query tree
query.visit(consumerChain);
query.authorize(subject, dataset, visitors);
// After all authorization checks we can now use the actual subject to invoke the query and do not to bubble down the Userish in methods
ExecutionMetrics.reportNamespacedIds(visitors.getInstance(NamespacedIdentifiableCollector.class).getIdentifiables(), primaryGroupName);
ExecutionMetrics.reportQueryClassUsage(query.getClass(), primaryGroupName);
final Namespace namespace = datasetRegistry.get(dataset.getId());
final ExecutionManager executionManager = namespace.getExecutionManager();
// If this is only a re-executing query, try to execute the underlying query instead.
{
final Optional<ManagedExecutionId> executionId = visitors.getInstance(QueryUtils.OnlyReusingChecker.class).getOnlyReused();
final Optional<ManagedExecution<?>> execution = executionId.map(id -> tryReuse(query, id, datasetRegistry, config, executionManager, subject.getUser()));
if (execution.isPresent()) {
return execution.get();
}
}
// Execute the query
return executionManager.runQuery(datasetRegistry, query, subject.getUser(), dataset, config);
}
use of com.bakdata.conquery.models.worker.DatasetRegistry in project conquery by bakdata.
the class ApiV1 method registerResources.
@Override
public void registerResources(ManagerNode manager) {
DatasetRegistry datasets = manager.getDatasetRegistry();
JerseyEnvironment environment = manager.getEnvironment().jersey();
environment.setUrlPattern("/api");
// inject required services
environment.register(new AbstractBinder() {
@Override
protected void configure() {
bind(manager.getConfig()).to(ConqueryConfig.class);
bind(manager.getDatasetRegistry()).to(DatasetRegistry.class);
bind(manager.getStorage()).to(MetaStorage.class);
bind(new ConceptsProcessor(manager.getDatasetRegistry())).to(ConceptsProcessor.class);
bind(new MeProcessor(manager.getStorage(), datasets)).to(MeProcessor.class);
bind(new QueryProcessor(datasets, manager.getStorage(), manager.getConfig())).to(QueryProcessor.class);
bind(new FormConfigProcessor(manager.getValidator(), manager.getStorage(), datasets)).to(FormConfigProcessor.class);
}
});
environment.register(CORSPreflightRequestFilter.class);
environment.register(CORSResponseFilter.class);
environment.register(new ActiveUsersFilter(manager.getStorage(), Duration.ofMinutes(manager.getConfig().getMetricsConfig().getUserActiveDuration().toMinutes())));
/*
* Register the authentication filter which protects all resources registered in this servlet.
* We use the same instance of the filter for the api servlet and the admin servlet to have a single
* point for authentication.
*/
environment.register(manager.getAuthController().getAuthenticationFilter());
environment.register(QueryResource.class);
environment.register(IdParamConverter.Provider.INSTANCE);
environment.register(new ConfigResource(manager.getConfig()));
environment.register(FormConfigResource.class);
environment.register(DatasetsResource.class);
environment.register(ConceptResource.class);
environment.register(DatasetResource.class);
environment.register(FilterResource.class);
environment.register(MeResource.class);
for (ResultRendererProvider resultProvider : manager.getConfig().getResultProviders()) {
resultProvider.registerResultResource(environment, manager);
}
environment.register(new IdRefPathParamConverterProvider(manager.getDatasetRegistry(), manager.getDatasetRegistry().getMetaRegistry()));
}
use of com.bakdata.conquery.models.worker.DatasetRegistry in project conquery by bakdata.
the class FullExportForm method createSubQueries.
@Override
public Map<String, List<ManagedQuery>> createSubQueries(DatasetRegistry datasets, User user, Dataset submittedDataset) {
// Forms are sent as an array of standard queries containing AND/OR of CQConcepts, we ignore everything and just convert the CQConcepts into CQUnfiltered for export.
final List<CQUnfilteredTable> unfilteredTables = tables.stream().flatMap(Visitable::stream).filter(CQConcept.class::isInstance).map(CQConcept.class::cast).flatMap(concept -> concept.getTables().stream()).map(table -> new CQUnfilteredTable(table.getConnector(), table.getDateColumn())).collect(Collectors.toList());
final TableExportQuery exportQuery = new TableExportQuery(queryGroup.getQuery());
exportQuery.setDateRange(getDateRange());
exportQuery.setTables(unfilteredTables);
final ManagedQuery managedQuery = new ManagedQuery(exportQuery, user, submittedDataset);
return Map.of(ConqueryConstants.SINGLE_RESULT_TABLE_NAME, List.of(managedQuery));
}
Aggregations