use of com.bakdata.conquery.util.progressreporter.ProgressReporter in project conquery by bakdata.
the class ImportJob method sendBuckets.
/**
* select, then send buckets.
*/
private Map<WorkerId, Set<BucketId>> sendBuckets(Map<Integer, Integer> starts, Map<Integer, Integer> lengths, DictionaryMapping primaryMapping, Import imp, Map<Integer, List<Integer>> buckets2LocalEntities, ColumnStore[] storesSorted) throws JsonProcessingException {
Map<WorkerId, Set<BucketId>> newWorkerAssignments = new HashMap<>();
final ProgressReporter subJob = getProgressReporter().subJob(buckets2LocalEntities.size());
for (Map.Entry<Integer, List<Integer>> bucket2entities : buckets2LocalEntities.entrySet()) {
WorkerInformation responsibleWorker = Objects.requireNonNull(namespace.getResponsibleWorkerForBucket(bucket2entities.getKey()), () -> "No responsible worker for Bucket#" + bucket2entities.getKey());
awaitFreeJobQueue(responsibleWorker);
final Bucket bucket = selectBucket(starts, lengths, storesSorted, primaryMapping, imp, bucket2entities.getKey(), bucket2entities.getValue());
newWorkerAssignments.computeIfAbsent(responsibleWorker.getId(), (ignored) -> new HashSet<>()).add(bucket.getId());
log.trace("Sending Bucket[{}] to {}", bucket.getId(), responsibleWorker.getId());
responsibleWorker.send(ImportBucket.forBucket(bucket));
subJob.report(1);
}
subJob.done();
return newWorkerAssignments;
}
use of com.bakdata.conquery.util.progressreporter.ProgressReporter in project conquery by bakdata.
the class ImportJob method applyDictionaryMappings.
/**
* Apply new positions into incoming shared dictionaries.
*/
private void applyDictionaryMappings(Map<String, DictionaryMapping> mappings, Map<String, ColumnStore> values) {
final ProgressReporter subJob = getProgressReporter().subJob(mappings.size());
for (Map.Entry<String, DictionaryMapping> entry : mappings.entrySet()) {
final String columnName = entry.getKey();
final DictionaryMapping mapping = entry.getValue();
final StringStore stringStore = (StringStore) values.get(columnName);
log.debug("Remapping Column[{}] = {} with {}", columnName, stringStore, mapping);
// we need to find a new Type for the index-Column as it's going to be remapped and might change in size
final IntegerParser indexParser = new IntegerParser(config);
final IntSummaryStatistics statistics = mapping.target().intStream().summaryStatistics();
indexParser.setLines(stringStore.getLines());
indexParser.setMinValue(statistics.getMin());
indexParser.setMaxValue(statistics.getMax());
final IntegerStore newType = indexParser.findBestType();
log.trace("Decided for {}", newType);
mapping.applyToStore(stringStore, newType);
stringStore.setIndexStore(newType);
subJob.report(1);
}
}
Aggregations