use of com.bakdata.conquery.models.datasets.SecondaryIdDescription in project conquery by bakdata.
the class AdminDatasetProcessor method deleteSecondaryId.
/**
* Delete SecondaryId if it does not have any dependents.
*/
public synchronized void deleteSecondaryId(@NonNull SecondaryIdDescription secondaryId) {
final Namespace namespace = datasetRegistry.get(secondaryId.getDataset().getId());
// Before we commit this deletion, we check if this SecondaryId still has dependent Columns.
final List<Column> dependents = namespace.getStorage().getTables().stream().map(Table::getColumns).flatMap(Arrays::stream).filter(column -> secondaryId.equals(column.getSecondaryId())).collect(Collectors.toList());
if (!dependents.isEmpty()) {
final Set<TableId> tables = dependents.stream().map(Column::getTable).map(Identifiable::getId).collect(Collectors.toSet());
log.error("SecondaryId[{}] still present on {}", secondaryId, tables);
throw new ForbiddenException(String.format("SecondaryId still has dependencies. %s", tables));
}
log.info("Deleting SecondaryId[{}]", secondaryId);
namespace.getStorage().removeSecondaryId(secondaryId.getId());
namespace.sendToAll(new RemoveSecondaryId(secondaryId));
}
use of com.bakdata.conquery.models.datasets.SecondaryIdDescription in project conquery by bakdata.
the class SecondaryIdEndpointTest method execute.
@Override
public void execute(StandaloneSupport conquery) throws Exception {
final SecondaryIdDescription description = new SecondaryIdDescription();
description.setDescription("description-DESCRIPTION");
description.setName("description-NAME");
description.setLabel("description-LABEL");
final SecondaryIdDescriptionId id = new SecondaryIdDescriptionId(conquery.getDataset().getId(), description.getName());
final Response post = uploadDescription(conquery, description);
log.info("{}", post);
assertThat(post).describedAs("Response = `%s`", post).returns(Response.Status.Family.SUCCESSFUL, response -> response.getStatusInfo().getFamily());
{
final Set<FESecondaryId> secondaryIds = fetchSecondaryIdDescriptions(conquery);
log.info("{}", secondaryIds);
description.setDataset(conquery.getDataset());
assertThat(secondaryIds).extracting(FESecondaryId::getId).containsExactly(description.getId().toString());
// Upload Table referencing SecondaryId
{
// Build data manually so content is minmal (ie no dataset prefixes etc)
ObjectNode tableNode = Jackson.MAPPER.createObjectNode();
tableNode.put("name", "table");
ObjectNode columnNode = Jackson.MAPPER.createObjectNode();
columnNode.put("name", "column");
columnNode.put("type", MajorTypeId.INTEGER.name());
columnNode.put("secondaryId", description.getId().toStringWithoutDataset());
tableNode.put("columns", columnNode);
final Response response = uploadTable(conquery, tableNode);
assertThat(response.getStatusInfo().getFamily()).isEqualTo(Response.Status.Family.SUCCESSFUL);
}
}
{
final URI uri = HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder(), DatasetsUIResource.class, "getDataset").buildFromMap(Map.of("dataset", conquery.getDataset().getName()));
final Response actual = conquery.getClient().target(uri).request().get();
assertThat(actual).returns(Response.Status.Family.SUCCESSFUL, response -> response.getStatusInfo().getFamily());
}
{
// First one fails because table depends on it
assertThat(deleteDescription(conquery, id)).returns(Response.Status.Family.CLIENT_ERROR, response -> response.getStatusInfo().getFamily());
deleteTable(conquery, new TableId(conquery.getDataset().getId(), "table"));
// We've deleted the table, now it should be successful
assertThat(deleteDescription(conquery, id)).returns(Response.Status.Family.SUCCESSFUL, response -> response.getStatusInfo().getFamily());
final Set<FESecondaryId> secondaryIds = fetchSecondaryIdDescriptions(conquery);
log.info("{}", secondaryIds);
assertThat(secondaryIds).isEmpty();
}
}
use of com.bakdata.conquery.models.datasets.SecondaryIdDescription in project conquery by bakdata.
the class QueryProcessor method tryReuse.
/**
* Determine if the submitted query does reuse ONLY another query and restart that instead of creating another one.
*/
private ManagedExecution<?> tryReuse(QueryDescription query, ManagedExecutionId executionId, DatasetRegistry datasetRegistry, ConqueryConfig config, ExecutionManager executionManager, User user) {
ManagedExecution<?> execution = datasetRegistry.getMetaRegistry().resolve(executionId);
if (execution == null) {
return null;
}
// Direct reuse only works if the queries are of the same type (As reuse reconstructs the Query for different types)
if (!query.getClass().equals(execution.getSubmitted().getClass())) {
return null;
}
// If SecondaryIds differ from selected and prior, we cannot reuse them.
if (query instanceof SecondaryIdQuery) {
final SecondaryIdDescription selectedSecondaryId = ((SecondaryIdQuery) query).getSecondaryId();
final SecondaryIdDescription reusedSecondaryId = ((SecondaryIdQuery) execution.getSubmitted()).getSecondaryId();
if (!selectedSecondaryId.equals(reusedSecondaryId)) {
return null;
}
}
// If the user is not the owner of the execution, we definitely create a new Execution, so the owner can cancel it
if (!user.isOwner(execution)) {
final ManagedExecution<?> newExecution = executionManager.createExecution(datasetRegistry, execution.getSubmitted(), user, execution.getDataset());
newExecution.setLabel(execution.getLabel());
newExecution.setTags(execution.getTags().clone());
storage.updateExecution(newExecution);
execution = newExecution;
}
ExecutionState state = execution.getState();
if (state.equals(ExecutionState.RUNNING)) {
log.trace("The Execution[{}] was already started and its state is: {}", execution.getId(), state);
return execution;
}
log.trace("Re-executing Query {}", execution);
executionManager.execute(datasetRegistry, execution, config);
return execution;
}
use of com.bakdata.conquery.models.datasets.SecondaryIdDescription in project conquery by bakdata.
the class LoadingUtil method importSecondaryIds.
public static Map<String, SecondaryIdDescription> importSecondaryIds(StandaloneSupport support, List<RequiredSecondaryId> secondaryIds) {
Map<String, SecondaryIdDescription> out = new HashMap<>();
for (RequiredSecondaryId required : secondaryIds) {
final SecondaryIdDescription description = required.toSecondaryId();
support.getDatasetsProcessor().addSecondaryId(support.getNamespace(), description);
out.put(description.getName(), description);
}
return out;
}
Aggregations