use of io.datarouter.nodewatch.storage.tablesample.TableSampleKey in project datarouter by hotpads.
the class MigrateTableCountMetadataHandler method migrate.
private void migrate(String sourceNode, String targetNode) {
String sourceClientName = sourceNode.split("\\.")[0];
String sourceTableName = sourceNode.split("\\.")[1];
String targetClientName = targetNode.split("\\.")[0];
String targetTableName = targetNode.split("\\.")[1];
// migrate rows in TableCount
var tableCountKeyPrefix = new TableCountKey(sourceClientName, sourceTableName, null);
tableCountDao.scanWithPrefix(tableCountKeyPrefix).map(tableCount -> {
tableCount.getKey().setClientName(targetClientName);
tableCount.getKey().setTableName(targetTableName);
return tableCount;
}).batch(100).forEach(tableCountDao::putMulti);
// migrate rows in TableSample
var tableSampleKeyPrefix = new TableSampleKey(sourceClientName, sourceTableName, null, null);
tableSampleDao.scanWithPrefix(tableSampleKeyPrefix).map(tableSample -> {
tableSample.getKey().setClientName(targetClientName);
tableSample.getKey().setTableName(targetTableName);
return tableSample;
}).batch(100).forEach(tableSampleDao::putMulti);
}
use of io.datarouter.nodewatch.storage.tablesample.TableSampleKey in project datarouter by hotpads.
the class TableCountHandler method deleteRowSamples.
@Handler
private Mav deleteRowSamples(String clientName, String tableName) {
// delete rows from TableSample
var tableSampleKeyPrefix = new TableSampleKey(clientName, tableName, null, null);
tableSampleDao.deleteWithPrefix(tableSampleKeyPrefix);
var dto = new DatarouterChangelogDtoBuilder("Nodewatch", clientName + "." + tableName, "deleted row samples", getSessionInfo().getNonEmptyUsernameOrElse("")).build();
changelogRecorder.record(dto);
return new InContextRedirectMav(request, paths.datarouter.nodewatch.tableCount.toSlashedString());
}
use of io.datarouter.nodewatch.storage.tablesample.TableSampleKey in project datarouter by hotpads.
the class TableSpanSamplerJobletCreator method createJobletPackage.
/*--------------------- create --------------------*/
private JobletPackage createJobletPackage(JobletPriority jobletPriority, TableSample start, TableSample end, long samplerId, boolean scanUntilEnd) {
Objects.requireNonNull(end);
// we want all joblets created by the parent job to have the same creation time so none have execution priority
Instant jobletCreationDate = Instant.ofEpochMilli(samplerStartMs);
int batchSequence = jobletBatchSequence++;
TableSampleKey startSampleKey = Optional.ofNullable(start).map(TableSample::getKey).orElse(null);
var params = new TableSpanSamplerJobletParams(scanUntilEnd, samplerStartMs, sampleSize, batchSize, startSampleKey, end, nodeNames, samplerId);
return JobletPackage.createDetailed(TableSpanSamplerJoblet.JOBLET_TYPE, jobletPriority, jobletCreationDate, batchSequence, false, params.nodeNames.getClientName(), null, params);
}
use of io.datarouter.nodewatch.storage.tablesample.TableSampleKey in project datarouter by hotpads.
the class TableSamplerService method getCurrentTableCountFromSamples.
public TableCount getCurrentTableCountFromSamples(String clientName, String tableName) {
// not distinguishing sub-entities at the moment
TableSampleKey clientTablePrefix = new TableSampleKey(clientName, tableName, null, null);
long totalRows = 0;
long totalCountTimeMs = 0;
long numSpans = 0;
long numSlowSpans = 0;
for (TableSample sample : tableSampleDao.scanWithPrefix(clientTablePrefix).iterable()) {
totalRows += sample.getNumRows();
totalCountTimeMs += sample.getCountTimeMs();
numSpans++;
if (sample.getCountTimeMs() > COUNT_TIME_MS_SLOW_SPAN_THRESHOLD) {
numSlowSpans++;
}
}
logger.info("total of {} rows for {}.{}", totalRows, clientName, tableName);
return new TableCount(clientName, tableName, System.currentTimeMillis(), totalRows, totalCountTimeMs, numSpans, numSlowSpans);
}
use of io.datarouter.nodewatch.storage.tablesample.TableSampleKey in project datarouter by hotpads.
the class JobletCopyTableHandler method defaultHandler.
@Handler(defaultHandler = true)
private <PK extends PrimaryKey<PK>, D extends Databean<PK, D>> Mav defaultHandler(@Param(P_sourceNodeName) OptionalString sourceNodeName, @Param(P_targetNodeName) OptionalString targetNodeName, @Param(P_putBatchSize) OptionalString putBatchSize, @Param(P_submitAction) OptionalString submitAction) {
String errorPutBatchSize = null;
if (submitAction.isPresent()) {
try {
if (putBatchSize.map(StringTool::nullIfEmpty).isPresent()) {
Integer.valueOf(putBatchSize.get());
}
} catch (Exception e) {
errorPutBatchSize = "Please specify an integer";
}
}
List<String> possibleNodes = tableSamplerService.scanCountableNodes().map(node -> node.getClientId().getName() + "." + node.getFieldInfo().getTableName()).append("").sort().list();
var form = new HtmlForm().withMethod("post");
form.addSelectField().withDisplay("Source Node Name").withName(P_sourceNodeName).withValues(possibleNodes);
form.addSelectField().withDisplay("Target Node Name").withName(P_targetNodeName).withValues(possibleNodes);
form.addTextField().withDisplay("Batch Size").withError(errorPutBatchSize).withName(P_putBatchSize).withPlaceholder(DEFAULT_BATCH_SIZE + "").withValue(putBatchSize.orElse(null));
form.addButton().withDisplay("Create Joblets").withValue("anything");
if (submitAction.isEmpty() || form.hasErrors()) {
return pageFactory.startBuilder(request).withTitle("Copy Table - Joblets").withContent(Html.makeContent(form)).buildMav();
}
@SuppressWarnings("unchecked") PhysicalSortedStorageNode<PK, D, ?> sourceNode = (PhysicalSortedStorageNode<PK, D, ?>) nodes.getNode(sourceNodeName.get());
String tableName = sourceNode.getFieldInfo().getTableName();
List<TableSample> samples = tableSamplerService.scanSamplesForNode(sourceNode).list();
TableSampleKey previousSampleKey = null;
List<JobletPackage> jobletPackages = new ArrayList<>();
// +1 for databeans beyond the final sample
long numJoblets = samples.size() + 1;
long counter = 1;
int batchSize = putBatchSize.map(StringTool::nullIfEmpty).map(Integer::valueOf).orElse(DEFAULT_BATCH_SIZE);
for (TableSample sample : samples) {
PK fromKeyExclusive = TableSamplerTool.extractPrimaryKeyFromSampleKey(sourceNode, previousSampleKey);
PK toKeyInclusive = TableSamplerTool.extractPrimaryKeyFromSampleKey(sourceNode, sample.getKey());
jobletPackages.add(createJobletPackage(tableName, sourceNodeName.get(), targetNodeName.get(), fromKeyExclusive, toKeyInclusive, batchSize, sample.getNumRows(), counter, numJoblets));
++counter;
previousSampleKey = sample.getKey();
}
// include any rows created since the last sample
PK fromKeyExclusive = TableSamplerTool.extractPrimaryKeyFromSampleKey(sourceNode, previousSampleKey);
jobletPackages.add(createJobletPackage(tableName, sourceNodeName.get(), targetNodeName.get(), fromKeyExclusive, // open-ended
null, batchSize, // we have no idea about the true estNumDatabeans
1, counter, numJoblets));
++counter;
// shuffle as optimization to spread write load. could be optional
Scanner.of(jobletPackages).shuffle().flush(jobletService::submitJobletPackages);
changelogRecorderService.recordChangelog(getSessionInfo(), "Joblet", sourceNodeName.get(), targetNodeName.get());
return pageFactory.message(request, "created " + numJoblets + " joblets");
}
Aggregations