use of io.datarouter.scanner.ParallelScannerContext in project datarouter by hotpads.
the class LoadTestGetHandler method get.
@Handler(defaultHandler = true)
private Mav get(@Param(P_num) OptionalString num, @Param(P_max) OptionalString max, @Param(P_numThreads) OptionalString numThreads, @Param(P_batchSize) OptionalString batchSize, @Param(P_logPeriod) OptionalString logPeriod, @Param(P_submitAction) OptionalString submitAction) {
var form = new HtmlForm().withMethod("post");
form.addTextField().withDisplay("Num").withName(P_num).withPlaceholder("100,000").withValue(num.orElse(null));
form.addTextField().withDisplay("Max").withName(P_max).withPlaceholder("10").withValue(max.orElse(null));
form.addTextField().withDisplay("Num Threads").withName(P_numThreads).withPlaceholder("10").withValue(numThreads.orElse(null));
form.addTextField().withDisplay("Batch Size").withName(P_batchSize).withPlaceholder("100").withValue(batchSize.orElse(null));
form.addTextField().withDisplay("Log Period").withName(P_logPeriod).withPlaceholder("1,0000").withValue(logPeriod.orElse(null));
form.addButton().withDisplay("Run Get").withValue("anything");
if (submitAction.isEmpty() || form.hasErrors()) {
return pageFactory.startBuilder(request).withTitle("Load Test - Get").withContent(Html.makeContent(form)).buildMav();
}
PhaseTimer timer = new PhaseTimer("get");
// params
int pNum = num.map(StringTool::nullIfEmpty).map(number -> number.replaceAll(",", "")).map(Integer::valueOf).orElse(DEFAULT_NUM);
int pMax = max.map(StringTool::nullIfEmpty).map(number -> number.replaceAll(",", "")).map(Integer::valueOf).orElse(pNum);
int pNumThreads = numThreads.map(StringTool::nullIfEmpty).map(number -> number.replaceAll(",", "")).map(Integer::valueOf).orElse(DEFAULT_NUM_THREADS);
int pBatchSize = batchSize.map(StringTool::nullIfEmpty).map(number -> number.replaceAll(",", "")).map(Integer::valueOf).orElse(DEFAULT_BATCH_SIZE);
int pLogPeriod = logPeriod.map(StringTool::nullIfEmpty).map(number -> number.replaceAll(",", "")).map(Integer::valueOf).orElse(DEFAULT_LOG_PERIOD);
// tracking
AtomicInteger rowCounter = new AtomicInteger(0);
AtomicLong lastBatchFinished = new AtomicLong(System.nanoTime());
// execute
int numBatches = LoadTestTool.numBatches(pNum, pBatchSize);
ExecutorService executor = Executors.newFixedThreadPool(pNumThreads);
Scanner.of(IntStream.range(0, numBatches).mapToObj(Integer::valueOf)).map(batchId -> LoadTestTool.makeRandomIdBatch(pNum, pMax, pBatchSize, batchId)).map(ids -> new GetBatchCallable(dao.getReaderNode(), ids, pLogPeriod, rowCounter, lastBatchFinished)).parallel(new ParallelScannerContext(executor, pNumThreads, true)).forEach(CallableTool::callUnchecked);
ExecutorServiceTool.shutdown(executor, Duration.ofSeconds(5));
timer.add("got " + rowCounter.get());
var message = div(h2("Load Test Get Results"), div(h3("Results"), dl(dt("Total Time"), dd(timer.getElapsedString()), dt("Rows per second"), dd(timer.getItemsPerSecond(rowCounter.get()) + ""))), div(h3("Params"), dl(dt("Num"), dd(pNum + ""), dt("Max"), dd(pMax + ""), dt("Num Threads"), dd(pNumThreads + ""), dt("Batch Size"), dd(pBatchSize + ""), dt("Log Period"), dd(pLogPeriod + "")))).withClass("container");
logger.warn("total={}, rps={}, num={}, max={}, numThreads={} batchSize={}, logPeriod={}", timer.getElapsedString(), timer.getItemsPerSecond(rowCounter.get()), pNum, pMax, pNumThreads, pBatchSize, pLogPeriod);
return pageFactory.message(request, message);
}
use of io.datarouter.scanner.ParallelScannerContext in project datarouter by hotpads.
the class BaseDatarouterServletContextListener method processListeners.
private void processListeners(OnAction onAction, boolean executeAllListenersSynchronously) {
ThreadFactory factory = new NamedThreadFactory("datarouterListenerExecutor", false);
ExecutorService executor = Executors.newFixedThreadPool(allListeners.size(), factory);
var timer = new PhaseTimer();
long shutdownStartMillis = System.currentTimeMillis();
for (Pair<ExecutionMode, List<DatarouterAppListener>> listenersByShutdownMode : listenersByExecutionMods) {
List<DatarouterAppListener> listeners = listenersByShutdownMode.getRight();
ExecutionMode executionMode = executeAllListenersSynchronously ? ExecutionMode.SYNCHRONOUS : listenersByShutdownMode.getLeft();
logger.warn("{} {}: [{}", onAction.display, executionMode.display, listeners.stream().map(listener -> listener.getClass().getSimpleName()).collect(Collectors.joining(", ")) + "]");
if (executionMode == ExecutionMode.SYNCHRONOUS) {
Scanner.of(listeners).map(executeOnAction(onAction)).forEach(timer::add);
} else if (executionMode == ExecutionMode.PARALLEL) {
long shutdownParallelStartMillis = System.currentTimeMillis();
Scanner.of(listeners).parallel(new ParallelScannerContext(executor, listeners.size(), true)).map(executeOnAction(onAction)).forEach(timer::add);
logger.info("Parallel {} total={}", onAction.display, System.currentTimeMillis() - shutdownParallelStartMillis);
}
}
logger.warn(String.format("%s [total=%d][%s]", onAction, System.currentTimeMillis() - shutdownStartMillis, timer.getPhaseNamesAndTimes().stream().map(pair -> pair.getLeft() + "=" + pair.getRight()).collect(Collectors.joining("]["))));
ExecutorServiceTool.shutdown(executor, Duration.ofSeconds(2));
}
use of io.datarouter.scanner.ParallelScannerContext in project datarouter by hotpads.
the class BaseSnapshotTests method testSearches.
@Test
public void testSearches() {
if (!ENABLED_TESTS.contains(TestId.SEARCHES)) {
return;
}
BlockLoader blockLoader = makeBlockLoader(useMemoryCache(), shareMemoryCache());
var reader = new ScanningSnapshotReader(snapshotKey, exec, getNumThreads(), blockLoader, SCAN_NUM_BLOCKS);
int step = 1000;
int limit = 1000;
Scanner.iterate(0, fromId -> fromId += step).advanceWhile(fromId -> fromId < sortedInputs.size() - limit).parallel(new ParallelScannerContext(scanExec, getNumThreads(), true)).forEach(fromId -> {
var idReader = new SnapshotIdReader(snapshotKey, blockLoader);
// known first key inclusive
byte[] searchKey = idReader.getRecord(fromId).key;
List<SnapshotLeafRecord> outputsInclusive = reader.scanLeafRecords(searchKey, true).limit(limit).list();
for (int i = 0; i < limit; ++i) {
Input input = sortedInputs.get(fromId + i);
SnapshotLeafRecord output = outputsInclusive.get(i);
Assert.assertEquals(fromId + i, output.id);
Assert.assertEquals(new Bytes(input.entry.key()), new Bytes(output.key));
}
// known first key exclusive
List<SnapshotLeafRecord> outputsExclusive = reader.scanLeafRecords(searchKey, false).limit(limit).list();
for (int i = 0; i < limit; ++i) {
// plus one because exclusive
Input input = sortedInputs.get(fromId + i + 1);
SnapshotLeafRecord output = outputsExclusive.get(i);
Assert.assertEquals(input.id, output.id);
Assert.assertEquals(new Bytes(input.entry.key()), new Bytes(output.key));
}
// fake first key (should act like exclusive)
byte[] nonExistentKey = ByteTool.concat(searchKey, new byte[] { 0, 0, 0, 0, 0, 0, 0, 0, 0 });
List<SnapshotLeafRecord> outputsNonExistentKey = reader.scanLeafRecords(nonExistentKey, true).limit(limit).list();
for (int i = 0; i < limit; ++i) {
// plus one because the first key didn't exist
Input input = sortedInputs.get(fromId + i + 1);
SnapshotLeafRecord output = outputsNonExistentKey.get(i);
Assert.assertEquals(input.id, output.id);
Assert.assertEquals(new Bytes(input.entry.key()), new Bytes(output.key));
}
});
}
use of io.datarouter.scanner.ParallelScannerContext in project datarouter by hotpads.
the class BaseSnapshotTests method testOperationInternal.
private void testOperationInternal(BlockLoader threadSafeBlockLoader, boolean random, boolean multiThreaded, Operation operation) {
List<Input> searchKeys = random ? randomInputs : sortedInputs;
int batchSize = 10_000;
var parallelScannerContext = new ParallelScannerContext(exec, getNumThreads(), true, multiThreaded);
var count = new AtomicLong();
Scanner.of(searchKeys).batch(batchSize).parallel(parallelScannerContext).forEach(batch -> {
var idReader = new SnapshotIdReader(snapshotKey, threadSafeBlockLoader);
var keyReader = new SnapshotKeyReader(snapshotKey, threadSafeBlockLoader);
for (int i = 0; i < batch.size(); ++i) {
Input input = batch.get(i);
long id = input.id;
byte[] key = input.entry.key();
byte[] value = input.entry.value();
if (Operation.GET_LEAF_RECORD == operation) {
SnapshotLeafRecord leafRecord = idReader.leafRecord(id);
if (!Arrays.equals(key, leafRecord.key)) {
String message = String.format("%s, expected=%s, actual=%s", id, utf8(key), utf8(leafRecord.key));
throw new RuntimeException(message);
}
if (!Arrays.equals(value, leafRecord.value)) {
String message = String.format("%s, expected=%s, actual=%s", id, utf8(value), utf8(leafRecord.value));
throw new RuntimeException(message);
}
} else if (Operation.GET_RECORD == operation) {
SnapshotRecord result = idReader.getRecord(id);
if (id != result.id) {
String message = String.format("%s, expected=%s, actual=%s", id, id, result.id);
throw new RuntimeException(message);
}
if (!Arrays.equals(key, result.key)) {
String message = String.format("%s, expected=%s, actual=%s", id, utf8(key), utf8(result.key));
throw new RuntimeException(message);
}
if (!SnapshotEntry.equal(input.entry, result.entry())) {
String message = String.format("%s, expected=%s, actual=%s", i, // TODO print more than column 0
utf8(input.entry.columnValues[0]), utf8(result.columnValues[0]));
throw new RuntimeException(message);
}
} else if (Operation.FIND_ID == operation) {
if (keyReader.findRecordId(key).isEmpty()) {
String message = String.format("%s, %s not found", i, utf8(key));
throw new RuntimeException(message);
}
if (id != keyReader.findRecordId(key).get().longValue()) {
String message = String.format("%s, %s not found", i, utf8(key));
throw new RuntimeException(message);
}
} else if (Operation.FIND_RECORD == operation) {
Optional<SnapshotRecord> output = keyReader.findRecord(key);
if (output.isEmpty()) {
String message = String.format("%s, %s not found", i, utf8(key));
throw new RuntimeException(message);
}
if (!SnapshotEntry.equal(input.entry, output.get().entry())) {
String message = String.format("%s, expected=%s, actual=%s", i, // TODO print more than column 0
utf8(batch.get(i).entry.columnValues[0]), utf8(output.get().columnValues[0]));
throw new RuntimeException(message);
}
}
}
count.addAndGet(batch.size());
logger.warn("{}, {}, {} for {}/{} {}", random ? "random" : "sorted", multiThreaded ? "multi" : "single", operation.toString().toLowerCase(), NumberFormatter.addCommas(count.get()), NumberFormatter.addCommas(searchKeys.size()), utf8(ListTool.getLast(batch).entry.key()));
});
}
use of io.datarouter.scanner.ParallelScannerContext in project datarouter by hotpads.
the class DatarouterSnapshotGroupsHandler method buildGroupList.
private ContainerTag<?> buildGroupList() {
var thead = thead(tr(th("ID"), th("numSnapshots")));
var table = table().withClasses("sortable table table-sm table-striped my-4 border").with(thead);
groups.scanIds().sort().parallel(new ParallelScannerContext(exec, exec.getMaximumPoolSize(), false)).map(id -> {
String href = new URIBuilder().setPath(request.getContextPath() + snapshotPaths.datarouter.snapshot.group.listSnapshots.toSlashedString()).addParameter(P_groupId, id).toString();
var anchor = a(id).withHref(href);
String numSnapshots = groups.getGroup(id).keyReadOps(false).scanSnapshotKeys().count() + "";
return tr(TagCreator.td(anchor), TagCreator.td(numSnapshots));
}).forEach(table::with);
var header = h4("Snapshot Groups");
return div(header, table).withClass("container-fluid my-4").withStyle("padding-left: 0px");
}
Aggregations