use of com.carrotsearch.randomizedtesting.annotations.Repeat in project randomizedtesting by randomizedtesting.
the class TestEventBusSanityCheck method testArrayQueueReentrance.
@Test
@Repeat(iterations = 100)
public void testArrayQueueReentrance() throws Exception {
// Mockups.
final List<String> foo = new ArrayList<>();
for (int i = randomIntBetween(2, 1000); --i > 0; ) {
foo.add(randomAsciiOfLength(20));
}
final EventBus aggregatedBus = new EventBus("aggregated");
final AtomicBoolean hadErrors = new AtomicBoolean();
// Code mirrors JUnit4's behavior.
final Deque<String> stealingQueue = new ArrayDeque<String>(foo);
aggregatedBus.register(new Object() {
volatile Thread foo;
@Subscribe
public void onSlaveIdle(SlaveIdle slave) {
final Thread other = foo;
if (other != null) {
hadErrors.set(true);
throw new RuntimeException("Wtf? two threads in a handler: " + other + " and " + Thread.currentThread());
}
foo = Thread.currentThread();
if (stealingQueue.isEmpty()) {
slave.finished();
} else {
String suiteName = stealingQueue.pop();
slave.newSuite(suiteName);
}
foo = null;
}
});
// stress.
ExecutorService executor = Executors.newCachedThreadPool();
final List<Callable<Void>> slaves = new ArrayList<>();
for (int i = 0; i < randomIntBetween(1, 10); i++) {
slaves.add(new Callable<Void>() {
@Override
public Void call() throws Exception {
aggregatedBus.post(new SlaveIdle());
return null;
}
});
}
for (Future<Void> f : executor.invokeAll(slaves)) {
f.get();
}
executor.shutdown();
assertFalse(hadErrors.get());
}
use of com.carrotsearch.randomizedtesting.annotations.Repeat in project lucene-solr by apache.
the class TestCloudNestedDocsSort method test.
@Test
@Repeat(iterations = 2)
public void test() throws SolrServerException, IOException {
final boolean asc = random().nextBoolean();
final String dir = asc ? "asc" : "desc";
final String parentFilter = "+parentFilter_s:(" + matchingParent + " " + anyValsSpaceDelim(2) + ")^=0";
String childFilter = "+childFilter_s:(" + matchingChild + " " + anyValsSpaceDelim(4) + ")^=0";
final String fl = "id,type_s,parent_id_s1,val_s1,score,parentFilter_s,childFilter_s,parentTie_s1";
String sortClause = "val_s1 " + dir + ", " + "parent_id_s1 " + ascDesc();
if (rarely()) {
sortClause = "parentTie_s1 " + ascDesc() + "," + sortClause;
}
final SolrQuery q = new SolrQuery("q", "+type_s:child^=0 " + parentFilter + " " + childFilter, "sort", sortClause, "rows", "" + maxDocs, "fl", fl);
final QueryResponse children = client.query(q);
final SolrQuery bjq = new SolrQuery("q", "{!parent which=type_s:parent}(+type_s:child^=0 " + parentFilter + " " + childFilter + ")", "sort", sortClause.replace("val_s1 ", "childfield(val_s1)"), "rows", "" + maxDocs, "fl", fl);
final QueryResponse parents = client.query(bjq);
Set<String> parentIds = new LinkedHashSet<>();
assertTrue("it can never be empty for sure", parents.getResults().size() > 0);
for (Iterator<SolrDocument> parentIter = parents.getResults().iterator(); parentIter.hasNext(); ) {
for (SolrDocument child : children.getResults()) {
assertEquals("child", child.getFirstValue("type_s"));
final String parentId = (String) child.getFirstValue("parent_id_s1");
if (parentIds.add(parentId)) {
// in children the next parent appears, it should be next at parents
final SolrDocument parent = parentIter.next();
assertEquals("parent", parent.getFirstValue("type_s"));
final String actParentId = "" + parent.get("id");
if (!actParentId.equals(parentId)) {
final String chDump = children.toString().replace("SolrDocument", "\nSolrDocument");
System.out.println("\n\n" + chDump.substring(0, 5000) + "\n\n");
System.out.println("\n\n" + chDump + "\n\n");
}
assertEquals(actParentId, parentId);
}
}
}
}
use of com.carrotsearch.randomizedtesting.annotations.Repeat in project lucene-solr by apache.
the class TestBBoxStrategy method testOperations.
@Test
@Repeat(iterations = 15)
public void testOperations() throws IOException {
//setup
if (random().nextInt(4) > 0) {
//75% of the time choose geo (more interesting to test)
this.ctx = SpatialContext.GEO;
} else {
SpatialContextFactory factory = new SpatialContextFactory();
factory.geo = false;
factory.worldBounds = new RectangleImpl(-300, 300, -100, 100, null);
this.ctx = factory.newSpatialContext();
}
this.strategy = BBoxStrategy.newInstance(ctx, "bbox");
//test we can disable docValues for predicate tests
if (random().nextBoolean()) {
FieldType fieldType = new FieldType(((BBoxStrategy) strategy).getFieldType());
fieldType.setDocValuesType(DocValuesType.NONE);
strategy = new BBoxStrategy(ctx, strategy.getFieldName(), fieldType);
}
for (SpatialOperation operation : SpatialOperation.values()) {
if (operation == SpatialOperation.Overlaps)
//unsupported
continue;
testOperationRandomShapes(operation);
deleteAll();
commit();
}
}
use of com.carrotsearch.randomizedtesting.annotations.Repeat in project lucene-solr by apache.
the class TokenSourcesTest method testRandomizedRoundTrip.
@Repeat(iterations = 10)
public //@Seed("947083AB20AB2D4F")
void testRandomizedRoundTrip() throws Exception {
final int distinct = TestUtil.nextInt(random(), 1, 10);
String[] terms = new String[distinct];
BytesRef[] termBytes = new BytesRef[distinct];
for (int i = 0; i < distinct; ++i) {
terms[i] = TestUtil.randomRealisticUnicodeString(random());
termBytes[i] = new BytesRef(terms[i]);
}
final BaseTermVectorsFormatTestCase.RandomTokenStream rTokenStream = new BaseTermVectorsFormatTestCase.RandomTokenStream(TestUtil.nextInt(random(), 1, 10), terms, termBytes);
//check to see if the token streams might have non-deterministic testable result
final boolean storeTermVectorPositions = random().nextBoolean();
final int[] startOffsets = rTokenStream.getStartOffsets();
final int[] positionsIncrements = rTokenStream.getPositionsIncrements();
for (int i = 1; i < positionsIncrements.length; i++) {
if (storeTermVectorPositions && positionsIncrements[i] != 0) {
continue;
}
// than previous token's endOffset? That would increase the testable possibilities.
if (startOffsets[i] == startOffsets[i - 1]) {
if (VERBOSE)
System.out.println("Skipping test because can't easily validate random token-stream is correct.");
return;
}
}
//sanity check itself
assertTokenStreamContents(rTokenStream, rTokenStream.getTerms(), rTokenStream.getStartOffsets(), rTokenStream.getEndOffsets(), rTokenStream.getPositionsIncrements());
Directory dir = newDirectory();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
FieldType myFieldType = new FieldType(TextField.TYPE_NOT_STORED);
myFieldType.setStoreTermVectors(true);
myFieldType.setStoreTermVectorOffsets(true);
myFieldType.setStoreTermVectorPositions(storeTermVectorPositions);
//payloads require positions; it will throw an error otherwise
myFieldType.setStoreTermVectorPayloads(storeTermVectorPositions && random().nextBoolean());
Document doc = new Document();
doc.add(new Field("field", rTokenStream, myFieldType));
writer.addDocument(doc);
IndexReader reader = writer.getReader();
writer.close();
assertEquals(1, reader.numDocs());
TokenStream vectorTokenStream = TokenSources.getTermVectorTokenStreamOrNull("field", reader.getTermVectors(0), -1);
//sometimes check payloads
PayloadAttribute payloadAttribute = null;
if (myFieldType.storeTermVectorPayloads() && usually()) {
payloadAttribute = vectorTokenStream.addAttribute(PayloadAttribute.class);
}
assertTokenStreamContents(vectorTokenStream, rTokenStream.getTerms(), rTokenStream.getStartOffsets(), rTokenStream.getEndOffsets(), myFieldType.storeTermVectorPositions() ? rTokenStream.getPositionsIncrements() : null);
//test payloads
if (payloadAttribute != null) {
vectorTokenStream.reset();
for (int i = 0; vectorTokenStream.incrementToken(); i++) {
assertEquals(rTokenStream.getPayloads()[i], payloadAttribute.getPayload());
}
}
reader.close();
dir.close();
}
Aggregations