use of com.carrotsearch.randomizedtesting.RandomizedContext in project lucene-solr by apache.
the class TestRuleTemporaryFilesCleanup method getPerTestClassTempDir.
Path getPerTestClassTempDir() {
if (tempDirBase == null) {
RandomizedContext ctx = RandomizedContext.current();
Class<?> clazz = ctx.getTargetClass();
String prefix = clazz.getName();
prefix = prefix.replaceFirst("^org.apache.lucene.", "lucene.");
prefix = prefix.replaceFirst("^org.apache.solr.", "solr.");
int attempt = 0;
Path f;
boolean success = false;
do {
if (attempt++ >= TEMP_NAME_RETRY_THRESHOLD) {
throw new RuntimeException("Failed to get a temporary name too many times, check your temp directory and consider manually cleaning it: " + javaTempDir.toAbsolutePath());
}
f = javaTempDir.resolve(prefix + "_" + ctx.getRunnerSeedAsString() + "-" + String.format(Locale.ENGLISH, "%03d", attempt));
try {
Files.createDirectory(f);
success = true;
} catch (IOException ignore) {
}
} while (!success);
tempDirBase = f;
registerToRemoveAfterSuite(tempDirBase);
}
return tempDirBase;
}
use of com.carrotsearch.randomizedtesting.RandomizedContext in project SearchServices by Alfresco.
the class DistributedPropertyBasedAlfrescoSolrTrackerTest method testProperty.
@Test
public void testProperty() throws Exception {
putHandleDefaults();
int numAcls = 25;
AclChangeSet bulkAclChangeSet = getAclChangeSet(numAcls);
List<Acl> bulkAcls = new ArrayList();
List<AclReaders> bulkAclReaders = new ArrayList();
for (int i = 0; i < numAcls; i++) {
Acl bulkAcl = getAcl(bulkAclChangeSet);
bulkAcls.add(bulkAcl);
bulkAclReaders.add(getAclReaders(bulkAclChangeSet, bulkAcl, list("king" + bulkAcl.getId()), list("king" + bulkAcl.getId()), null));
}
indexAclChangeSet(bulkAclChangeSet, bulkAcls, bulkAclReaders);
int numNodes = 1000;
List<Node> nodes = new ArrayList();
List<NodeMetaData> nodeMetaDatas = new ArrayList();
Transaction bigTxn = getTransaction(0, numNodes);
RandomizedContext context = RandomizedContext.current();
Random ints = context.getRandom();
for (int i = 0; i < numNodes; i++) {
int aclIndex = i % numAcls;
Node node = getNode(bigTxn, bulkAcls.get(aclIndex), Node.SolrApiNodeStatus.UPDATED);
String domain = DOMAINS[ints.nextInt(DOMAINS.length)];
domainsCount.put(domain, domainsCount.get(domain) + 1);
// String emailAddress = RANDOM_NAMES[ints.nextInt(RANDOM_NAMES.length)]+ "@"+ domain;
String emailAddress = "peter.pan" + "@" + domain;
node.setShardPropertyValue(emailAddress);
nodes.add(node);
NodeMetaData nodeMetaData = getNodeMetaData(node, bigTxn, bulkAcls.get(aclIndex), "king", null, false);
nodeMetaData.getProperties().put(ContentModel.PROP_NAME, new StringPropertyValue(emailAddress));
nodeMetaDatas.add(nodeMetaData);
}
indexTransaction(bigTxn, nodes, nodeMetaDatas);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), numNodes, 100000);
waitForDocCountAllCores(new TermQuery(new Term(FIELD_DOC_TYPE, SolrInformationServer.DOC_TYPE_ACL)), numAcls, 100000);
for (int i = 0; i < DOMAINS.length; i++) {
// We have split by email domain, so those should be co-located on the same shard.
// I am storing the email address in the cm:name field. This is purely to make it easier to write the TermQuery
// and doesn't effect the functionality.
assertCountAndColocation(new TermQuery(new Term("text@s____@{http://www.alfresco.org/model/content/1.0}name", "peter.pan" + "@" + DOMAINS[i])), domainsCount.get(DOMAINS[i]));
}
// Now test the fallback
nodes.clear();
nodeMetaDatas.clear();
deleteByQueryAllClients("*:*");
// Should now be nothing in the index
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), 0, 100000);
Transaction bigTxn1 = getTransaction(0, numNodes);
for (int i = 0; i < numNodes; i++) {
int aclIndex = i % numAcls;
Node node = getNode(bigTxn1, bulkAcls.get(aclIndex), Node.SolrApiNodeStatus.UPDATED);
String domain = DOMAINS[ints.nextInt(DOMAINS.length)];
domainsCount.put(domain, domainsCount.get(domain) + 1);
String emailAddress = "peter.pan" + "@" + domain;
// Don't add shared property so it falls back //node.setShardPropertyValue(emailAddress);
nodes.add(node);
NodeMetaData nodeMetaData = getNodeMetaData(node, bigTxn1, bulkAcls.get(aclIndex), "king", null, false);
nodeMetaData.getProperties().put(ContentModel.PROP_NAME, new StringPropertyValue(emailAddress));
nodeMetaDatas.add(nodeMetaData);
}
indexTransaction(bigTxn1, nodes, nodeMetaDatas);
waitForDocCount(new TermQuery(new Term("content@s___t@{http://www.alfresco.org/model/content/1.0}content", "world")), numNodes, 100000);
// There are 4 shards. We should expect roughly a quarter of the nodes on each shard
assertNodesPerShardGreaterThan((int) ((numNodes) * .21));
}
use of com.carrotsearch.randomizedtesting.RandomizedContext in project randomizedtesting by randomizedtesting.
the class Test002ExtendingRandomizedTest method getContextByHand.
@Test
public void getContextByHand() {
RandomizedContext context = RandomizedContext.current();
Random rnd = context.getRandom();
System.out.println("Random, next int: " + rnd.nextInt());
}
use of com.carrotsearch.randomizedtesting.RandomizedContext in project randomizedtesting by randomizedtesting.
the class Test011RunawayThreads method ExecutorServiceContextPropagation.
@ThreadLeakLingering(linger = 2000)
@Test
public void ExecutorServiceContextPropagation() throws Throwable {
final long seed = Utils.getRunnerSeed();
final ExecutorService executor = Executors.newCachedThreadPool();
try {
executor.submit(new Runnable() {
public void run() {
RandomizedContext ctx = RandomizedContext.current();
Assert.assertEquals(seed, Utils.getSeed(ctx.getRandomness()));
}
}).get();
} catch (ExecutionException e) {
throw e.getCause();
} finally {
executor.shutdown();
executor.awaitTermination(1, TimeUnit.SECONDS);
}
}
Aggregations