use of java.util.AbstractMap.SimpleEntry in project pravega by pravega.
the class ControllerServiceTest method setup.
@Before
public void setup() throws ExecutionException, InterruptedException {
final ScalingPolicy policy1 = ScalingPolicy.fixed(2);
final ScalingPolicy policy2 = ScalingPolicy.fixed(3);
final StreamConfiguration configuration1 = StreamConfiguration.builder().scope(SCOPE).streamName(stream1).scalingPolicy(policy1).build();
final StreamConfiguration configuration2 = StreamConfiguration.builder().scope(SCOPE).streamName(stream2).scalingPolicy(policy2).build();
// createScope
streamStore.createScope(SCOPE).get();
// region createStream
startTs = System.currentTimeMillis();
OperationContext context = streamStore.createContext(SCOPE, stream1);
streamStore.createStream(SCOPE, stream1, configuration1, startTs, context, executor).get();
streamStore.setState(SCOPE, stream1, State.ACTIVE, context, executor);
OperationContext context2 = streamStore.createContext(SCOPE, stream2);
streamStore.createStream(SCOPE, stream2, configuration2, startTs, context2, executor).get();
streamStore.setState(SCOPE, stream2, State.ACTIVE, context2, executor);
// endregion
// region scaleSegments
SimpleEntry<Double, Double> segment1 = new SimpleEntry<>(0.5, 0.75);
SimpleEntry<Double, Double> segment2 = new SimpleEntry<>(0.75, 1.0);
List<Integer> sealedSegments = Collections.singletonList(1);
scaleTs = System.currentTimeMillis();
StartScaleResponse startScaleResponse = streamStore.startScale(SCOPE, stream1, sealedSegments, Arrays.asList(segment1, segment2), startTs + 20, false, null, executor).get();
List<Segment> segmentCreated = startScaleResponse.getSegmentsCreated();
streamStore.setState(SCOPE, stream1, State.SCALING, null, executor).get();
streamStore.scaleNewSegmentsCreated(SCOPE, stream1, sealedSegments, segmentCreated, startScaleResponse.getActiveEpoch(), scaleTs, null, executor).get();
streamStore.scaleSegmentsSealed(SCOPE, stream1, sealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 0L)), segmentCreated, startScaleResponse.getActiveEpoch(), scaleTs, null, executor).get();
SimpleEntry<Double, Double> segment3 = new SimpleEntry<>(0.0, 0.5);
SimpleEntry<Double, Double> segment4 = new SimpleEntry<>(0.5, 0.75);
SimpleEntry<Double, Double> segment5 = new SimpleEntry<>(0.75, 1.0);
sealedSegments = Arrays.asList(0, 1, 2);
startScaleResponse = streamStore.startScale(SCOPE, stream2, sealedSegments, Arrays.asList(segment3, segment4, segment5), startTs + 20, false, null, executor).get();
segmentCreated = startScaleResponse.getSegmentsCreated();
streamStore.setState(SCOPE, stream2, State.SCALING, null, executor).get();
streamStore.scaleNewSegmentsCreated(SCOPE, stream2, sealedSegments, segmentCreated, startScaleResponse.getActiveEpoch(), scaleTs, null, executor).get();
streamStore.scaleSegmentsSealed(SCOPE, stream2, sealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 0L)), segmentCreated, startScaleResponse.getActiveEpoch(), scaleTs, null, executor).get();
// endregion
}
use of java.util.AbstractMap.SimpleEntry in project pravega by pravega.
the class StreamMetadataStoreTest method scaleWithTxTest.
@Test
public void scaleWithTxTest() throws Exception {
final String scope = "ScopeScaleWithTx";
final String stream = "StreamScaleWithTx";
final ScalingPolicy policy = ScalingPolicy.fixed(2);
final StreamConfiguration configuration = StreamConfiguration.builder().scope(scope).streamName(stream).scalingPolicy(policy).build();
long start = System.currentTimeMillis();
store.createScope(scope).get();
store.createStream(scope, stream, configuration, start, null, executor).get();
store.setState(scope, stream, State.ACTIVE, null, executor).get();
long scaleTs = System.currentTimeMillis();
SimpleEntry<Double, Double> segment2 = new SimpleEntry<>(0.5, 0.75);
SimpleEntry<Double, Double> segment3 = new SimpleEntry<>(0.75, 1.0);
List<Integer> scale1SealedSegments = Collections.singletonList(1);
// region Txn created before scale and during scale
// scale with transaction test
VersionedTransactionData tx1 = store.createTransaction(scope, stream, UUID.randomUUID(), 100, 100, 100, null, executor).get();
assertEquals(0, tx1.getEpoch());
StartScaleResponse response = store.startScale(scope, stream, scale1SealedSegments, Arrays.asList(segment2, segment3), scaleTs, false, null, executor).join();
final List<Segment> scale1SegmentsCreated = response.getSegmentsCreated();
final int epoch = response.getActiveEpoch();
assertEquals(0, epoch);
assertNotNull(scale1SegmentsCreated);
store.setState(scope, stream, State.SCALING, null, executor).join();
// assert that txn is created on old epoch
VersionedTransactionData tx2 = store.createTransaction(scope, stream, UUID.randomUUID(), 100, 100, 100, null, executor).get();
assertEquals(0, tx2.getEpoch());
store.scaleNewSegmentsCreated(scope, stream, scale1SealedSegments, scale1SegmentsCreated, response.getActiveEpoch(), scaleTs, null, executor).join();
VersionedTransactionData tx3 = store.createTransaction(scope, stream, UUID.randomUUID(), 100, 100, 100, null, executor).get();
assertEquals(1, tx3.getEpoch());
// should not delete epoch
DeleteEpochResponse deleteResponse = store.tryDeleteEpochIfScaling(scope, stream, 0, null, executor).get();
assertEquals(false, deleteResponse.isDeleted());
assertEquals(null, deleteResponse.getSegmentsCreated());
assertEquals(null, deleteResponse.getSegmentsSealed());
store.sealTransaction(scope, stream, tx2.getId(), true, Optional.of(tx2.getVersion()), null, executor).get();
// should not happen
store.commitTransaction(scope, stream, tx2.getEpoch(), tx2.getId(), null, executor).get();
// should not delete epoch
deleteResponse = store.tryDeleteEpochIfScaling(scope, stream, 0, null, executor).get();
assertEquals(false, deleteResponse.isDeleted());
store.sealTransaction(scope, stream, tx1.getId(), true, Optional.of(tx1.getVersion()), null, executor).get();
// should not happen
store.commitTransaction(scope, stream, tx1.getEpoch(), tx1.getId(), null, executor).get();
// should not delete epoch
deleteResponse = store.tryDeleteEpochIfScaling(scope, stream, 0, null, executor).get();
assertEquals(true, deleteResponse.isDeleted());
store.sealTransaction(scope, stream, tx3.getId(), true, Optional.of(tx3.getVersion()), null, executor).get();
// should not happen
store.commitTransaction(scope, stream, tx3.getEpoch(), tx3.getId(), null, executor).get();
store.scaleSegmentsSealed(scope, stream, scale1SealedSegments.stream().collect(Collectors.toMap(x -> x, x -> 0L)), scale1SegmentsCreated, response.getActiveEpoch(), scaleTs, null, executor).join();
// should not delete epoch
deleteResponse = store.tryDeleteEpochIfScaling(scope, stream, 1, null, executor).get();
assertEquals(false, deleteResponse.isDeleted());
// endregion
// region Txn created and deleted after scale starts
List<Integer> scale2SealedSegments = Collections.singletonList(0);
long scaleTs2 = System.currentTimeMillis();
SimpleEntry<Double, Double> segment4 = new SimpleEntry<>(0.0, 0.25);
SimpleEntry<Double, Double> segment5 = new SimpleEntry<>(0.25, 0.5);
StartScaleResponse response2 = store.startScale(scope, stream, scale2SealedSegments, Arrays.asList(segment4, segment5), scaleTs2, false, null, executor).join();
final List<Segment> scale2SegmentsCreated = response2.getSegmentsCreated();
final int epoch2 = response2.getActiveEpoch();
assertEquals(1, epoch2);
assertNotNull(scale2SegmentsCreated);
VersionedTransactionData txn = store.createTransaction(scope, stream, UUID.randomUUID(), 100, 100, 100, null, executor).get();
assertEquals(1, txn.getEpoch());
store.sealTransaction(scope, stream, txn.getId(), true, Optional.of(txn.getVersion()), null, executor).get();
// should not happen
store.commitTransaction(scope, stream, txn.getEpoch(), txn.getId(), null, executor).get();
// should not delete epoch
deleteResponse = store.tryDeleteEpochIfScaling(scope, stream, 1, null, executor).get();
// verify that epoch is not deleted as new epoch is not yet created
assertEquals(false, deleteResponse.isDeleted());
// verify that new txns can be created and are created on old epoch
VersionedTransactionData txn2 = store.createTransaction(scope, stream, UUID.randomUUID(), 100, 100, 100, null, executor).get();
assertEquals(1, txn2.getEpoch());
store.setState(scope, stream, State.SCALING, null, executor).get();
store.scaleNewSegmentsCreated(scope, stream, scale2SealedSegments, scale2SegmentsCreated, response2.getActiveEpoch(), scaleTs2, null, executor).join();
store.sealTransaction(scope, stream, txn2.getId(), true, Optional.of(txn2.getVersion()), null, executor).get();
// should not happen
store.commitTransaction(scope, stream, txn2.getEpoch(), txn2.getId(), null, executor).get();
// should not delete epoch
deleteResponse = store.tryDeleteEpochIfScaling(scope, stream, 1, null, executor).get();
// now that new segments are created, we should be able to delete old epoch.
assertEquals(true, deleteResponse.isDeleted());
}
use of java.util.AbstractMap.SimpleEntry in project pravega by pravega.
the class ZKStreamMetadataStoreTest method testScaleMetadata.
@Test
public void testScaleMetadata() throws Exception {
String scope = "testScopeScale";
String stream = "testStreamScale";
ScalingPolicy policy = ScalingPolicy.fixed(3);
StreamConfiguration configuration = StreamConfiguration.builder().scope(scope).streamName(stream).scalingPolicy(policy).build();
SimpleEntry<Double, Double> segment1 = new SimpleEntry<>(0.0, 0.5);
SimpleEntry<Double, Double> segment2 = new SimpleEntry<>(0.5, 1.0);
List<SimpleEntry<Double, Double>> newRanges = Arrays.asList(segment1, segment2);
store.createScope(scope).get();
store.createStream(scope, stream, configuration, System.currentTimeMillis(), null, executor).get();
store.setState(scope, stream, State.ACTIVE, null, executor).get();
List<ScaleMetadata> scaleIncidents = store.getScaleMetadata(scope, stream, null, executor).get();
assertTrue(scaleIncidents.size() == 1);
assertTrue(scaleIncidents.get(0).getSegments().size() == 3);
// scale
scale(scope, stream, scaleIncidents.get(0).getSegments(), newRanges);
scaleIncidents = store.getScaleMetadata(scope, stream, null, executor).get();
assertTrue(scaleIncidents.size() == 2);
assertTrue(scaleIncidents.get(0).getSegments().size() == 2);
assertTrue(scaleIncidents.get(1).getSegments().size() == 3);
// scale again
scale(scope, stream, scaleIncidents.get(0).getSegments(), newRanges);
scaleIncidents = store.getScaleMetadata(scope, stream, null, executor).get();
assertTrue(scaleIncidents.size() == 3);
assertTrue(scaleIncidents.get(0).getSegments().size() == 2);
assertTrue(scaleIncidents.get(1).getSegments().size() == 2);
// scale again
scale(scope, stream, scaleIncidents.get(0).getSegments(), newRanges);
scaleIncidents = store.getScaleMetadata(scope, stream, null, executor).get();
assertTrue(scaleIncidents.size() == 4);
assertTrue(scaleIncidents.get(0).getSegments().size() == 2);
assertTrue(scaleIncidents.get(1).getSegments().size() == 2);
}
use of java.util.AbstractMap.SimpleEntry in project linuxtools by eclipse.
the class SystemTapRegexGenerator method generateFromPrintf.
/**
* Generate a list of regular expressions that will capture the output of a given .stp script.
* Only output coming from <code>printf</code> statements will be captured.
* @param scriptPath The absolute path of the script to capture the output of.
* @param maxToFind The maximum number of regexs to create and return.
* A negative value indicates no limit.
* @return A list of generated regexs, each paired with the number of capturing groups it has.
*/
public static List<Entry<String, Integer>> generateFromPrintf(IPath scriptPath, int maxToFind) {
List<Entry<String, Integer>> regexs = new ArrayList<>();
if (maxToFind == 0) {
return regexs;
}
String contents = null;
IWorkbench workbench = PlatformUI.getWorkbench();
IWorkspaceRoot root = ResourcesPlugin.getWorkspace().getRoot();
IEditorPart editor = ResourceUtil.findEditor(workbench.getActiveWorkbenchWindow().getActivePage(), root.getFile(scriptPath.makeRelativeTo(root.getLocation())));
if (editor != null) {
// If editor of this file is open, take current file contents.
ITextEditor tEditor = editor.getAdapter(ITextEditor.class);
IDocument document = tEditor.getDocumentProvider().getDocument(tEditor.getEditorInput());
contents = CommentRemover.exec(document.get());
} else {
// If chosen file is not being edited or is outside of the workspace, use the saved contents of the file itself.
contents = CommentRemover.execWithFile(scriptPath.toString());
}
// Now actually search the contents for "printf(...)" statements. (^|[\s({;])printf\("(.+?)",.+\)
// $NON-NLS-1$
Pattern pattern = Pattern.compile("(?<=[^\\w])printf\\(\"(.+?)\",.+?\\)");
Matcher matcher = pattern.matcher(contents);
while (matcher.find() && (maxToFind < 0 || regexs.size() < maxToFind)) {
String regex = null;
// Note: allow optional "long" modifier 'l'. Not captured because it doesn't impact output format.
// Also, don't support variable width/precision modifiers (*).
// TODO: Consider %m & %M support.
// $NON-NLS-1$
Pattern format = Pattern.compile("%([-\\+ \\#0])?(\\d+)?(\\.\\d*)?l?([bcdiopsuxX%])");
// Only capture until newlines to preserve the "column" format.
// Don't try gluing together output from multiple printfs
// since asynchronous prints would make things messy.
// $NON-NLS-1$
String[] printls = matcher.group(1).split("\\\\n");
for (int i = 0; i < printls.length; i++) {
String printl = printls[i];
// Ignore newlines if they are escaped ("\\n").
if (printl.endsWith("\\")) {
// $NON-NLS-1$
// $NON-NLS-1$
printls[i + 1] = printl.concat("\\n" + printls[i + 1]);
continue;
}
Matcher fmatch = format.matcher(printl);
int lastend = 0;
int numColumns = 0;
while (fmatch.find()) {
numColumns++;
char chr = fmatch.group(4) == null ? '\0' : fmatch.group(4).charAt(0);
if (chr == '\0') {
// Skip this statement if an invalid regex is found.
regex = null;
break;
}
char flag = fmatch.group(1) == null ? '\0' : fmatch.group(1).charAt(0);
int width = fmatch.group(2) == null ? 0 : Integer.parseInt(fmatch.group(2));
String precision = fmatch.group(3) == null ? null : fmatch.group(3).substring(1);
// First, add any non-capturing characters.
String pre = addRegexEscapes(printl.substring(lastend, fmatch.start()));
regex = lastend > 0 ? regex.concat(pre) : pre;
lastend = fmatch.end();
// Now add what will be captured.
// $NON-NLS-1$
String target = "(";
if (chr == 'u' || (flag != '#' && chr == 'o')) {
// $NON-NLS-1$
target = target.concat("\\d+");
} else if (chr == 'd' || chr == 'i') {
if (flag == '+') {
// $NON-NLS-1$
target = target.concat("\\+|");
} else if (flag == ' ') {
// $NON-NLS-1$
target = target.concat(" |");
}
// $NON-NLS-1$
target = target.concat("-?\\d+");
} else if (flag == '#' && chr == 'o') {
// $NON-NLS-1$
target = target.concat("0\\d+");
} else if (chr == 'p') {
// $NON-NLS-1$
target = target.concat("0x[a-f0-9]+");
} else if (chr == 'x') {
if (flag == '#') {
// $NON-NLS-1$
target = target.concat("0x");
}
// $NON-NLS-1$
target = target.concat("[a-f0-9]+");
} else if (chr == 'X') {
if (flag == '#') {
// $NON-NLS-1$
target = target.concat("0X");
}
// $NON-NLS-1$
target = target.concat("[A-F0-9]+");
} else if (chr == 'b') {
// $NON-NLS-1$
target = target.concat(".");
} else if (chr == 'c') {
if (flag != '#') {
// $NON-NLS-1$
target = target.concat(".");
} else {
// $NON-NLS-1$
target = target.concat("\\([a-z]|[0-9]{3})|.|\\\\");
}
} else if (chr == 's') {
if (precision != null) {
// $NON-NLS-1$ //$NON-NLS-2$
target = target.concat(".{" + precision + "}");
} else {
// $NON-NLS-1$
target = target.concat(".+");
}
} else {
// Invalid or unhandled format specifier. Skip this regex.
regex = null;
break;
}
// $NON-NLS-1$
target = target.concat(")");
// Ignore it for %b, which uses the width value in a different way.
if (chr != 'b' && --width > 0) {
if (flag == '-') {
// $NON-NLS-1$ //$NON-NLS-2$
target = target.concat(" {0," + width + "}");
} else if (flag != '0' || chr == 's' || chr == 'c') {
// $NON-NLS-1$ //$NON-NLS-2$
target = " {0," + width + "}".concat(target);
}
}
regex = regex.concat(target);
}
if (regex != null) {
// Finally, add the uncaptured remainder of the print statement to the regex.
regexs.add(new SimpleEntry<>(regex.concat(addRegexEscapes(printl.substring(lastend))), numColumns));
}
}
}
return regexs;
}
Aggregations