use of com.bakdata.conquery.models.datasets.Dataset in project conquery by bakdata.
the class TableDeletionTest method execute.
@Override
public void execute(String name, TestConquery testConquery) throws Exception {
final StandaloneSupport conquery = testConquery.getSupport(name);
final MetaStorage storage = conquery.getMetaStorage();
final String testJson = In.resource("/tests/query/DELETE_IMPORT_TESTS/SIMPLE_TREECONCEPT_Query.test.json").withUTF8().readAll();
final Dataset dataset = conquery.getDataset();
final Namespace namespace = conquery.getNamespace();
final TableId tableId = TableId.Parser.INSTANCE.parse(dataset.getName(), "test_table2");
final QueryTest test = (QueryTest) JsonIntegrationTest.readJson(dataset, testJson);
// Manually import data, so we can do our own work.
{
ValidatorHelper.failOnError(log, conquery.getValidator().validate(test));
importSecondaryIds(conquery, test.getContent().getSecondaryIds());
conquery.waitUntilWorkDone();
LoadingUtil.importTables(conquery, test.getContent().getTables());
conquery.waitUntilWorkDone();
LoadingUtil.importConcepts(conquery, test.getRawConcepts());
conquery.waitUntilWorkDone();
LoadingUtil.importTableContents(conquery, test.getContent().getTables());
conquery.waitUntilWorkDone();
}
final Query query = IntegrationUtils.parseQuery(conquery, test.getRawQuery());
final int nImports = namespace.getStorage().getAllImports().size();
// State before deletion.
{
log.info("Checking state before deletion");
// Must contain the import.
assertThat(namespace.getStorage().getCentralRegistry().getOptional(tableId)).isNotEmpty();
for (ShardNode node : conquery.getShardNodes()) {
for (Worker value : node.getWorkers().getWorkers().values()) {
if (!value.getInfo().getDataset().equals(dataset.getId())) {
continue;
}
final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).isNotEmpty();
assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
}
}
log.info("Executing query before deletion");
IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
}
// Delete the import.
{
log.info("Issuing deletion of import {}", tableId);
// Delete the import via API.
// But, we do not allow deletion of tables with associated connectors, so this should throw!
final URI deleteTable = HierarchyHelper.hierarchicalPath(conquery.defaultAdminURIBuilder(), AdminTablesResource.class, "remove").buildFromMap(Map.of(ResourceConstants.DATASET, conquery.getDataset().getName(), ResourceConstants.TABLE, tableId.toString()));
final Response failed = conquery.getClient().target(deleteTable).request().delete();
assertThat(failed.getStatusInfo().getFamily()).isEqualTo(Response.Status.Family.CLIENT_ERROR);
conquery.getDatasetsProcessor().deleteConcept(conquery.getNamespace().getStorage().getAllConcepts().iterator().next());
Thread.sleep(100);
conquery.waitUntilWorkDone();
final Response success = conquery.getClient().target(deleteTable).request().delete();
assertThat(success.getStatusInfo().getStatusCode()).isEqualTo(Response.Status.OK.getStatusCode());
Thread.sleep(100);
conquery.waitUntilWorkDone();
}
// State after deletion.
{
log.info("Checking state after deletion");
// We have deleted an import now there should be two less!
assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports - 1);
// The deleted import should not be found.
assertThat(namespace.getStorage().getAllImports()).filteredOn(imp -> imp.getId().getTable().equals(tableId)).isEmpty();
for (ShardNode node : conquery.getShardNodes()) {
for (Worker value : node.getWorkers().getWorkers().values()) {
if (!value.getInfo().getDataset().equals(dataset.getId())) {
continue;
}
final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
// No bucket should be found referencing the import.
assertThat(workerStorage.getAllBuckets()).describedAs("Buckets for Worker %s", value.getInfo().getId()).filteredOn(bucket -> bucket.getImp().getTable().getId().equals(tableId)).isEmpty();
// No CBlock associated with import may exist
assertThat(workerStorage.getAllCBlocks()).describedAs("CBlocks for Worker %s", value.getInfo().getId()).filteredOn(cBlock -> cBlock.getBucket().getImp().getTable().getId().equals(tableId)).isEmpty();
}
}
log.info("Executing query after deletion");
// Issue a query and asseert that it has less content.
IntegrationUtils.assertQueryResult(conquery, query, 0L, ExecutionState.FAILED, conquery.getTestUser(), 400);
}
conquery.waitUntilWorkDone();
// Load the same import into the same table, with only the deleted import/table
{
// only import the deleted import/table
LoadingUtil.importTables(conquery, test.getContent().getTables().stream().filter(table -> table.getName().equalsIgnoreCase(tableId.getTable())).collect(Collectors.toList()));
conquery.waitUntilWorkDone();
LoadingUtil.importTableContents(conquery, test.getContent().getTables().stream().filter(table -> table.getName().equalsIgnoreCase(tableId.getTable())).collect(Collectors.toList()));
conquery.waitUntilWorkDone();
LoadingUtil.importConcepts(conquery, test.getRawConcepts());
conquery.waitUntilWorkDone();
assertThat(namespace.getStorage().getTable(tableId)).describedAs("Table after re-import.").isNotNull();
for (ShardNode node : conquery.getShardNodes()) {
for (Worker value : node.getWorkers().getWorkers().values()) {
if (!value.getInfo().getDataset().equals(dataset.getId())) {
continue;
}
assertThat(value.getStorage().getCentralRegistry().resolve(tableId)).describedAs("Table in worker storage.").isNotNull();
}
}
}
// Test state after reimport.
{
log.info("Checking state after re-import");
assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(nImports);
for (ShardNode node : conquery.getShardNodes()) {
for (Worker value : node.getWorkers().getWorkers().values()) {
if (!value.getInfo().getDataset().equals(dataset.getId())) {
continue;
}
final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getImp().getTable().getId().equals(tableId))).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
}
}
log.info("Executing query after re-import");
// Issue a query and assert that it has the same content as the first time around.
IntegrationUtils.assertQueryResult(conquery, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
}
// Finally, restart conquery and assert again, that the data is correct.
{
testConquery.shutdown();
// restart
testConquery.beforeAll();
StandaloneSupport conquery2 = testConquery.openDataset(dataset.getId());
log.info("Checking state after re-start");
{
assertThat(namespace.getStorage().getAllImports().size()).isEqualTo(2);
for (ShardNode node : conquery2.getShardNodes()) {
for (Worker value : node.getWorkers().getWorkers().values()) {
if (!value.getInfo().getDataset().equals(dataset.getId())) {
continue;
}
final ModificationShieldedWorkerStorage workerStorage = value.getStorage();
assertThat(workerStorage.getAllBuckets().stream().filter(bucket -> bucket.getImp().getTable().getId().equals(tableId))).describedAs("Buckets for Worker %s", value.getInfo().getId()).isNotEmpty();
}
}
log.info("Executing query after re-import");
// Issue a query and assert that it has the same content as the first time around.
IntegrationUtils.assertQueryResult(conquery2, query, 2L, ExecutionState.DONE, conquery.getTestUser(), 201);
}
}
}
use of com.bakdata.conquery.models.datasets.Dataset in project conquery by bakdata.
the class FEValueTest method sortedValidityDates.
@Test
public void sortedValidityDates() {
Dataset dataset = new Dataset();
dataset.setName("testDataset");
Table table = new Table();
table.setDataset(dataset);
table.setName("testTable");
Column column = new Column();
column.setName("testColumn");
column.setTable(table);
ConceptTreeConnector connector = new ConceptTreeConnector();
connector.setName("testConnector");
TreeConcept concept = new TreeConcept();
concept.setDataset(dataset);
concept.setName("testConcept");
ValidityDate val0 = new ValidityDate();
val0.setName("val0");
val0.setConnector(connector);
ValidityDate val1 = new ValidityDate();
val1.setName("val1");
val1.setConnector(connector);
ValidityDate val2 = new ValidityDate();
val2.setName("val2");
val2.setConnector(connector);
List<ValidityDate> validityDates = List.of(val0, val1, val2);
connector.setColumn(column);
connector.setConcept(concept);
connector.setValidityDates(validityDates);
FETable feTable = FrontEndConceptBuilder.createTable(connector);
assertThat(feTable.getDateColumn().getOptions()).containsExactly(new FEValue("val0", val0.getId().toString()), new FEValue("val1", val1.getId().toString()), new FEValue("val2", val2.getId().toString()));
}
use of com.bakdata.conquery.models.datasets.Dataset in project conquery by bakdata.
the class IdMapTest method generalTest.
@Test
public void generalTest() throws JsonParseException, JsonMappingException, JsonProcessingException, IOException, JSONException {
Dataset d1 = new Dataset();
d1.setName("d1");
Dataset d2 = new Dataset();
d2.setName("d2");
IdMap<DatasetId, Dataset> idMap = new IdMap<DatasetId, Dataset>();
idMap.add(d1);
idMap.add(d2);
ContainingDataset containedDataset = new ContainingDataset(idMap);
JsonNode json = Jackson.MAPPER.valueToTree(containedDataset);
/*assertThat(json.isArray()).isTrue();
assertThat(json.get(0)).isEqualTo(Jackson.MAPPER.valueToTree(d1));*/
ContainingDataset constructed = Jackson.MAPPER.treeToValue(json, ContainingDataset.class);
assertThat(constructed.idMap.entrySet()).isEqualTo(containedDataset.idMap.entrySet());
}
use of com.bakdata.conquery.models.datasets.Dataset in project conquery by bakdata.
the class PermissionCleanupTaskTest method createManagedQuery.
private ManagedQuery createManagedQuery() {
final CQAnd root = new CQAnd();
root.setChildren(new ArrayList<>());
ConceptQuery query = new ConceptQuery(root);
final ManagedQuery managedQuery = new ManagedQuery(query, null, new Dataset("test"));
managedQuery.setCreationTime(LocalDateTime.now().minusDays(1));
STORAGE.addExecution(managedQuery);
return managedQuery;
}
use of com.bakdata.conquery.models.datasets.Dataset in project conquery by bakdata.
the class GroovyIndexedTest method init.
@BeforeAll
public static void init() throws IOException, JSONException, ConfigurationException {
ObjectNode node = Jackson.MAPPER.readerFor(ObjectNode.class).readValue(In.resource(GroovyIndexedTest.class, CONCEPT_SOURCE).asStream());
// load concept tree from json
CentralRegistry registry = new CentralRegistry();
Table table = new Table();
table.setName("the_table");
Dataset dataset = new Dataset();
dataset.setName("the_dataset");
registry.register(dataset);
table.setDataset(dataset);
Column column = new Column();
column.setName("the_column");
column.setType(MajorTypeId.STRING);
table.setColumns(new Column[] { column });
column.setTable(table);
registry.register(table);
registry.register(column);
// Prepare Serdes injections
final Validator validator = Validators.newValidator();
final ObjectReader conceptReader = new Injectable() {
@Override
public MutableInjectableValues inject(MutableInjectableValues values) {
return values.add(Validator.class, validator);
}
}.injectInto(registry.injectIntoNew(dataset.injectIntoNew(Jackson.MAPPER))).readerFor(Concept.class);
// load tree twice to to avoid references
indexedConcept = conceptReader.readValue(node);
indexedConcept.setDataset(dataset);
indexedConcept.initElements();
TreeChildPrefixIndex.putIndexInto(indexedConcept);
oldConcept = conceptReader.readValue(node);
oldConcept.setDataset(dataset);
oldConcept.initElements();
}
Aggregations