use of io.datarouter.model.databean.Databean in project datarouter by hotpads.
the class JsonDatabeanToolTests method testMultiRoundTrip.
@Test
public void testMultiRoundTrip() {
SortedBeanKey key0 = new SortedBeanKey("a", "b", 0, "d");
SortedBeanKey key1 = new SortedBeanKey("a", "b", 1, "dasdf");
SortedBeanKey key2 = new SortedBeanKey("a", "basdf", 2, "sdsdsd");
List<SortedBeanKey> keysIn = List.of(key0, key1, key2);
JsonArray jsonKeys = JsonDatabeanTool.primaryKeysToJson(keysIn, sortedBeanFielder.getKeyFielder());
List<SortedBeanKey> keysOut = JsonDatabeanTool.primaryKeysFromJson(SortedBeanKey.class, sortedBeanFielder.getKeyFielder(), jsonKeys);
Assert.assertEquals(keysOut.size(), 3);
Assert.assertEquals(keysOut.toArray(), keysIn.toArray());
SortedBean bean0 = new SortedBean(key0, "1", 2L, null, 45.67d);
SortedBean bean1 = new SortedBean(key1, "ert", -987654L, "cheesetoast", -45.67d);
List<SortedBean> databeansIn = List.of(bean0, bean1);
JsonArray jsonDatabeans = JsonDatabeanTool.databeansToJson(databeansIn, sortedBeanFielder);
List<SortedBean> databeansOut = JsonDatabeanTool.databeansFromJson(ReflectionTool.supplier(SortedBean.class), sortedBeanFielder, jsonDatabeans);
Assert.assertEquals(databeansOut.size(), 2);
Assert.assertEquals(databeansOut.toArray(), databeansIn.toArray());
Assert.assertEquals(Scanner.of(databeansOut).map(Databean::getKey).list(), keysIn.subList(0, 2));
}
use of io.datarouter.model.databean.Databean in project datarouter by hotpads.
the class LongRunningTaskVacuumService method vacuumRelatedTasks.
private void vacuumRelatedTasks(List<LongRunningTask> tasks) {
// remove really old entries
Instant tooOldCutoff = Instant.now().minus(settings.maxAge.get().toJavaDuration());
List<LongRunningTask> tooOld = tasks.stream().filter(task -> task.getKey().getTriggerTime().toInstant().isBefore(tooOldCutoff)).collect(Collectors.toList());
Scanner.of(tooOld).map(Databean::getKey).then(dao::deleteBatched);
// keep the latest N
List<LongRunningTask> remaining = new ArrayList<>(tasks);
remaining.removeAll(tooOld);
if (remaining.size() <= settings.countToKeep.get()) {
return;
}
Scanner.of(remaining).limit(remaining.size() - settings.countToKeep.get()).map(Databean::getKey).then(dao::deleteBatched);
}
use of io.datarouter.model.databean.Databean in project datarouter by hotpads.
the class MapCachingMapStorageWriterMixin method putMulti.
@Override
public void putMulti(Collection<D> databeans, Config config) {
if (databeans == null || databeans.isEmpty()) {
return;
}
if (BaseMapCachingNode.useCache(config)) {
target.updateLastAttemptedContact();
Config effectiveCachingNodeConfig = MapCachingMapStorageReaderNode.getEffectiveCachingNodeConfig(config);
if (cacheWrites) {
target.getCachingNode().putMulti(databeans, effectiveCachingNodeConfig);
} else {
// TODO check config for ignoring caching
Scanner.of(databeans).map(Databean::getKey).flush(keys -> target.getCachingNode().deleteMulti(keys, effectiveCachingNodeConfig));
}
target.updateLastContact();
}
try {
target.getBackingNode().putMulti(databeans, config);
} catch (Exception ex) {
if (BaseMapCachingNode.useCache(config)) {
target.updateLastAttemptedContact();
Config effectiveCachingNodeConfig = MapCachingMapStorageReaderNode.getEffectiveCachingNodeConfig(config);
Scanner.of(databeans).map(Databean::getKey).flush(keys -> target.getCachingNode().deleteMulti(keys, effectiveCachingNodeConfig));
target.updateLastContact();
}
throw ex;
}
}
use of io.datarouter.model.databean.Databean in project datarouter by hotpads.
the class JobletCopyTableHandler method defaultHandler.
@Handler(defaultHandler = true)
private <PK extends PrimaryKey<PK>, D extends Databean<PK, D>> Mav defaultHandler(@Param(P_sourceNodeName) OptionalString sourceNodeName, @Param(P_targetNodeName) OptionalString targetNodeName, @Param(P_putBatchSize) OptionalString putBatchSize, @Param(P_submitAction) OptionalString submitAction) {
String errorPutBatchSize = null;
if (submitAction.isPresent()) {
try {
if (putBatchSize.map(StringTool::nullIfEmpty).isPresent()) {
Integer.valueOf(putBatchSize.get());
}
} catch (Exception e) {
errorPutBatchSize = "Please specify an integer";
}
}
List<String> possibleNodes = tableSamplerService.scanCountableNodes().map(node -> node.getClientId().getName() + "." + node.getFieldInfo().getTableName()).append("").sort().list();
var form = new HtmlForm().withMethod("post");
form.addSelectField().withDisplay("Source Node Name").withName(P_sourceNodeName).withValues(possibleNodes);
form.addSelectField().withDisplay("Target Node Name").withName(P_targetNodeName).withValues(possibleNodes);
form.addTextField().withDisplay("Batch Size").withError(errorPutBatchSize).withName(P_putBatchSize).withPlaceholder(DEFAULT_BATCH_SIZE + "").withValue(putBatchSize.orElse(null));
form.addButton().withDisplay("Create Joblets").withValue("anything");
if (submitAction.isEmpty() || form.hasErrors()) {
return pageFactory.startBuilder(request).withTitle("Copy Table - Joblets").withContent(Html.makeContent(form)).buildMav();
}
@SuppressWarnings("unchecked") PhysicalSortedStorageNode<PK, D, ?> sourceNode = (PhysicalSortedStorageNode<PK, D, ?>) nodes.getNode(sourceNodeName.get());
String tableName = sourceNode.getFieldInfo().getTableName();
List<TableSample> samples = tableSamplerService.scanSamplesForNode(sourceNode).list();
TableSampleKey previousSampleKey = null;
List<JobletPackage> jobletPackages = new ArrayList<>();
// +1 for databeans beyond the final sample
long numJoblets = samples.size() + 1;
long counter = 1;
int batchSize = putBatchSize.map(StringTool::nullIfEmpty).map(Integer::valueOf).orElse(DEFAULT_BATCH_SIZE);
for (TableSample sample : samples) {
PK fromKeyExclusive = TableSamplerTool.extractPrimaryKeyFromSampleKey(sourceNode, previousSampleKey);
PK toKeyInclusive = TableSamplerTool.extractPrimaryKeyFromSampleKey(sourceNode, sample.getKey());
jobletPackages.add(createJobletPackage(tableName, sourceNodeName.get(), targetNodeName.get(), fromKeyExclusive, toKeyInclusive, batchSize, sample.getNumRows(), counter, numJoblets));
++counter;
previousSampleKey = sample.getKey();
}
// include any rows created since the last sample
PK fromKeyExclusive = TableSamplerTool.extractPrimaryKeyFromSampleKey(sourceNode, previousSampleKey);
jobletPackages.add(createJobletPackage(tableName, sourceNodeName.get(), targetNodeName.get(), fromKeyExclusive, // open-ended
null, batchSize, // we have no idea about the true estNumDatabeans
1, counter, numJoblets));
++counter;
// shuffle as optimization to spread write load. could be optional
Scanner.of(jobletPackages).shuffle().flush(jobletService::submitJobletPackages);
changelogRecorderService.recordChangelog(getSessionInfo(), "Joblet", sourceNodeName.get(), targetNodeName.get());
return pageFactory.message(request, "created " + numJoblets + " joblets");
}
use of io.datarouter.model.databean.Databean in project datarouter by hotpads.
the class JobletTableProcessorHandler method defaultHandler.
@Handler(defaultHandler = true)
private <PK extends PrimaryKey<PK>, D extends Databean<PK, D>> Mav defaultHandler(@Param(P_nodeName) OptionalString nodeName, @Param(P_scanBatchSize) OptionalString scanBatchSize, @Param(P_processorName) OptionalString processorName, @Param(P_submitAction) OptionalString submitAction) {
String errorScanBatchSize = null;
if (submitAction.isPresent()) {
try {
if (scanBatchSize.map(StringTool::nullIfEmpty).isPresent()) {
Integer.valueOf(scanBatchSize.get());
}
} catch (Exception e) {
errorScanBatchSize = "Please specify an integer";
}
}
List<String> possibleNodes = tableSamplerService.scanCountableNodes().map(node -> node.getClientId().getName() + "." + node.getFieldInfo().getTableName()).append("").sort().list();
List<String> possibleProcessors = Scanner.of(processorRegistry.getAll()).map(Class::getSimpleName).append("").sort().list();
var form = new HtmlForm().withMethod("post");
form.addSelectField().withDisplay("Node Name").withName(P_nodeName).withValues(possibleNodes);
form.addTextField().withDisplay("Scan Batch Size").withError(errorScanBatchSize).withName(P_scanBatchSize).withPlaceholder(DEFAULT_SCAN_BATCH_SIZE + "").withValue(scanBatchSize.orElse(null));
form.addSelectField().withDisplay("Processor Name").withName(P_processorName).withValues(possibleProcessors);
form.addButton().withDisplay("Create Joblets").withValue("anything");
if (submitAction.isEmpty() || form.hasErrors()) {
return pageFactory.startBuilder(request).withTitle("Table Processor - Joblets").withContent(Html.makeContent(form)).buildMav();
}
@SuppressWarnings("unchecked") PhysicalSortedStorageNode<PK, D, ?> sourceNode = (PhysicalSortedStorageNode<PK, D, ?>) nodes.getNode(nodeName.get());
String tableName = sourceNode.getFieldInfo().getTableName();
List<TableSample> samples = tableSamplerService.scanSamplesForNode(sourceNode).list();
TableSampleKey previousSampleKey = null;
List<JobletPackage> jobletPackages = new ArrayList<>();
long totalItemsProcessed = 1;
long counter = 1;
int actualScanBatchSize = scanBatchSize.map(StringTool::nullIfEmpty).map(Integer::valueOf).orElse(DEFAULT_SCAN_BATCH_SIZE);
long numJoblets = 0;
for (TableSample sample : samples) {
PK fromKeyExclusive = TableSamplerTool.extractPrimaryKeyFromSampleKey(sourceNode, previousSampleKey);
PK toKeyInclusive = TableSamplerTool.extractPrimaryKeyFromSampleKey(sourceNode, sample.getKey());
var jobletPackage = createJobletPackage(tableName, nodeName.get(), fromKeyExclusive, toKeyInclusive, actualScanBatchSize, processorName.get(), sample.getNumRows(), counter, numJoblets);
jobletPackages.add(jobletPackage);
++numJoblets;
counter++;
totalItemsProcessed++;
previousSampleKey = sample.getKey();
}
// include any rows created since the last sample
PK fromKeyExclusive = TableSamplerTool.extractPrimaryKeyFromSampleKey(sourceNode, previousSampleKey);
var jobletPackage = createJobletPackage(tableName, nodeName.get(), fromKeyExclusive, // open-ended
null, actualScanBatchSize, processorName.get(), // we have no idea about the true estNumDatabeans
1, counter, numJoblets);
++numJoblets;
jobletPackages.add(jobletPackage);
totalItemsProcessed++;
// jobletPackage.size() == counter == numJoblets
counter++;
// shuffle as optimization to spread write load. could be optional
Scanner.of(jobletPackages).shuffle().flush(jobletService::submitJobletPackages);
changelogRecorderService.recordChangelogForTableProcessor(getSessionInfo(), "Joblet", nodeName.get(), processorName.get());
return pageFactory.message(request, "jobletsCreated=" + numJoblets + " totalSamplesProcessed=" + totalItemsProcessed);
}
Aggregations