use of io.datarouter.joblet.model.JobletPackage in project datarouter by hotpads.
the class JobletService method getJobletPackagesForJobletRequests.
private List<JobletPackage> getJobletPackagesForJobletRequests(Collection<JobletRequest> jobletRequests) {
List<JobletDataKey> keys = Scanner.of(jobletRequests).map(JobletRequest::getJobletDataKey).list();
Map<Long, JobletData> dataKeyToJobletData = jobletDataDao.getMulti(keys).stream().collect(Collectors.toMap(jobletData -> jobletData.getKey().getId(), Function.identity()));
return jobletRequests.stream().map(jobletRequest -> {
Long dataKey = jobletRequest.getJobletDataId();
JobletData jobletData = dataKeyToJobletData.get(dataKey);
return new JobletPackage(jobletRequest, jobletData);
}).collect(Collectors.toList());
}
use of io.datarouter.joblet.model.JobletPackage in project datarouter by hotpads.
the class JobletCallable method dequeueJobletPackage.
private final Optional<JobletPackage> dequeueJobletPackage(PhaseTimer timer) {
String reservedBy = getReservedByString();
Optional<JobletRequest> optJobletRequest = jobletService.getJobletRequestForProcessing(timer, jobletType, reservedBy);
if (optJobletRequest.isEmpty()) {
timer.add("no JobletRequest found");
return Optional.empty();
}
JobletRequest jobletRequest = optJobletRequest.get();
timer.add("dequeued " + jobletRequest.getKey());
JobletPackage jobletPackage = jobletService.getJobletPackageForJobletRequest(jobletRequest);
if (jobletPackage.getJobletData() == null) {
datarouterJobletCounters.ignoredDataMissingFromDb(jobletType);
jobletService.handleMissingJobletData(jobletRequest);
timer.add("deleted, missing JobletData");
return Optional.empty();
}
timer.add("getJobletData");
jobletRequest.setShutdownRequested(shutdownRequested);
return Optional.of(jobletPackage);
}
use of io.datarouter.joblet.model.JobletPackage in project datarouter by hotpads.
the class WebSocketSessionVacuumJob method run.
@Override
public void run(TaskTracker tracker) {
JobletPackage jobletPackage = JobletPackage.create(WebSocketSessionVacuumJoblet.JOBLET_TYPE, JobletPriority.DEFAULT, true, null, null, Instant.now());
jobletService.submitJobletPackages(List.of(jobletPackage));
}
use of io.datarouter.joblet.model.JobletPackage in project datarouter by hotpads.
the class JobletCopyTableHandler method defaultHandler.
@Handler(defaultHandler = true)
private <PK extends PrimaryKey<PK>, D extends Databean<PK, D>> Mav defaultHandler(@Param(P_sourceNodeName) OptionalString sourceNodeName, @Param(P_targetNodeName) OptionalString targetNodeName, @Param(P_putBatchSize) OptionalString putBatchSize, @Param(P_submitAction) OptionalString submitAction) {
String errorPutBatchSize = null;
if (submitAction.isPresent()) {
try {
if (putBatchSize.map(StringTool::nullIfEmpty).isPresent()) {
Integer.valueOf(putBatchSize.get());
}
} catch (Exception e) {
errorPutBatchSize = "Please specify an integer";
}
}
List<String> possibleNodes = tableSamplerService.scanCountableNodes().map(node -> node.getClientId().getName() + "." + node.getFieldInfo().getTableName()).append("").sort().list();
var form = new HtmlForm().withMethod("post");
form.addSelectField().withDisplay("Source Node Name").withName(P_sourceNodeName).withValues(possibleNodes);
form.addSelectField().withDisplay("Target Node Name").withName(P_targetNodeName).withValues(possibleNodes);
form.addTextField().withDisplay("Batch Size").withError(errorPutBatchSize).withName(P_putBatchSize).withPlaceholder(DEFAULT_BATCH_SIZE + "").withValue(putBatchSize.orElse(null));
form.addButton().withDisplay("Create Joblets").withValue("anything");
if (submitAction.isEmpty() || form.hasErrors()) {
return pageFactory.startBuilder(request).withTitle("Copy Table - Joblets").withContent(Html.makeContent(form)).buildMav();
}
@SuppressWarnings("unchecked") PhysicalSortedStorageNode<PK, D, ?> sourceNode = (PhysicalSortedStorageNode<PK, D, ?>) nodes.getNode(sourceNodeName.get());
String tableName = sourceNode.getFieldInfo().getTableName();
List<TableSample> samples = tableSamplerService.scanSamplesForNode(sourceNode).list();
TableSampleKey previousSampleKey = null;
List<JobletPackage> jobletPackages = new ArrayList<>();
// +1 for databeans beyond the final sample
long numJoblets = samples.size() + 1;
long counter = 1;
int batchSize = putBatchSize.map(StringTool::nullIfEmpty).map(Integer::valueOf).orElse(DEFAULT_BATCH_SIZE);
for (TableSample sample : samples) {
PK fromKeyExclusive = TableSamplerTool.extractPrimaryKeyFromSampleKey(sourceNode, previousSampleKey);
PK toKeyInclusive = TableSamplerTool.extractPrimaryKeyFromSampleKey(sourceNode, sample.getKey());
jobletPackages.add(createJobletPackage(tableName, sourceNodeName.get(), targetNodeName.get(), fromKeyExclusive, toKeyInclusive, batchSize, sample.getNumRows(), counter, numJoblets));
++counter;
previousSampleKey = sample.getKey();
}
// include any rows created since the last sample
PK fromKeyExclusive = TableSamplerTool.extractPrimaryKeyFromSampleKey(sourceNode, previousSampleKey);
jobletPackages.add(createJobletPackage(tableName, sourceNodeName.get(), targetNodeName.get(), fromKeyExclusive, // open-ended
null, batchSize, // we have no idea about the true estNumDatabeans
1, counter, numJoblets));
++counter;
// shuffle as optimization to spread write load. could be optional
Scanner.of(jobletPackages).shuffle().flush(jobletService::submitJobletPackages);
changelogRecorderService.recordChangelog(getSessionInfo(), "Joblet", sourceNodeName.get(), targetNodeName.get());
return pageFactory.message(request, "created " + numJoblets + " joblets");
}
use of io.datarouter.joblet.model.JobletPackage in project datarouter by hotpads.
the class JobletTableProcessorHandler method defaultHandler.
@Handler(defaultHandler = true)
private <PK extends PrimaryKey<PK>, D extends Databean<PK, D>> Mav defaultHandler(@Param(P_nodeName) OptionalString nodeName, @Param(P_scanBatchSize) OptionalString scanBatchSize, @Param(P_processorName) OptionalString processorName, @Param(P_submitAction) OptionalString submitAction) {
String errorScanBatchSize = null;
if (submitAction.isPresent()) {
try {
if (scanBatchSize.map(StringTool::nullIfEmpty).isPresent()) {
Integer.valueOf(scanBatchSize.get());
}
} catch (Exception e) {
errorScanBatchSize = "Please specify an integer";
}
}
List<String> possibleNodes = tableSamplerService.scanCountableNodes().map(node -> node.getClientId().getName() + "." + node.getFieldInfo().getTableName()).append("").sort().list();
List<String> possibleProcessors = Scanner.of(processorRegistry.getAll()).map(Class::getSimpleName).append("").sort().list();
var form = new HtmlForm().withMethod("post");
form.addSelectField().withDisplay("Node Name").withName(P_nodeName).withValues(possibleNodes);
form.addTextField().withDisplay("Scan Batch Size").withError(errorScanBatchSize).withName(P_scanBatchSize).withPlaceholder(DEFAULT_SCAN_BATCH_SIZE + "").withValue(scanBatchSize.orElse(null));
form.addSelectField().withDisplay("Processor Name").withName(P_processorName).withValues(possibleProcessors);
form.addButton().withDisplay("Create Joblets").withValue("anything");
if (submitAction.isEmpty() || form.hasErrors()) {
return pageFactory.startBuilder(request).withTitle("Table Processor - Joblets").withContent(Html.makeContent(form)).buildMav();
}
@SuppressWarnings("unchecked") PhysicalSortedStorageNode<PK, D, ?> sourceNode = (PhysicalSortedStorageNode<PK, D, ?>) nodes.getNode(nodeName.get());
String tableName = sourceNode.getFieldInfo().getTableName();
List<TableSample> samples = tableSamplerService.scanSamplesForNode(sourceNode).list();
TableSampleKey previousSampleKey = null;
List<JobletPackage> jobletPackages = new ArrayList<>();
long totalItemsProcessed = 1;
long counter = 1;
int actualScanBatchSize = scanBatchSize.map(StringTool::nullIfEmpty).map(Integer::valueOf).orElse(DEFAULT_SCAN_BATCH_SIZE);
long numJoblets = 0;
for (TableSample sample : samples) {
PK fromKeyExclusive = TableSamplerTool.extractPrimaryKeyFromSampleKey(sourceNode, previousSampleKey);
PK toKeyInclusive = TableSamplerTool.extractPrimaryKeyFromSampleKey(sourceNode, sample.getKey());
var jobletPackage = createJobletPackage(tableName, nodeName.get(), fromKeyExclusive, toKeyInclusive, actualScanBatchSize, processorName.get(), sample.getNumRows(), counter, numJoblets);
jobletPackages.add(jobletPackage);
++numJoblets;
counter++;
totalItemsProcessed++;
previousSampleKey = sample.getKey();
}
// include any rows created since the last sample
PK fromKeyExclusive = TableSamplerTool.extractPrimaryKeyFromSampleKey(sourceNode, previousSampleKey);
var jobletPackage = createJobletPackage(tableName, nodeName.get(), fromKeyExclusive, // open-ended
null, actualScanBatchSize, processorName.get(), // we have no idea about the true estNumDatabeans
1, counter, numJoblets);
++numJoblets;
jobletPackages.add(jobletPackage);
totalItemsProcessed++;
// jobletPackage.size() == counter == numJoblets
counter++;
// shuffle as optimization to spread write load. could be optional
Scanner.of(jobletPackages).shuffle().flush(jobletService::submitJobletPackages);
changelogRecorderService.recordChangelogForTableProcessor(getSessionInfo(), "Joblet", nodeName.get(), processorName.get());
return pageFactory.message(request, "jobletsCreated=" + numJoblets + " totalSamplesProcessed=" + totalItemsProcessed);
}
Aggregations