use of org.apache.commons.lang.time.StopWatch in project whirr by apache.
the class HadoopServiceTeraSortBenchmark method runTeraSort.
private void runTeraSort() throws Exception {
StopWatch stopWatch = new StopWatch();
TeraSort teraSort = new TeraSort();
teraSort.setConf(controller.getJobConf());
LOG.info("Starting TeraSort");
stopWatch.start();
teraSort.run(new String[] { "input", "output" });
stopWatch.stop();
LOG.info("TeraSort took {} ms", stopWatch.getTime());
}
use of org.apache.commons.lang.time.StopWatch in project titan by thinkaurelius.
the class VariableLongTest method readWriteTest.
private void readWriteTest(final ReadWriteLong impl, long maxValue, long jump, boolean negative, boolean backward) {
Preconditions.checkArgument(maxValue % jump == 0);
long allocate = maxValue / jump * 8 * (negative ? 2 : 1);
Preconditions.checkArgument(allocate < (1 << 28));
WriteBuffer wb = new WriteByteBuffer((int) allocate);
int num = 0;
StopWatch w = new StopWatch();
w.start();
for (long i = (negative ? -maxValue : 0); i <= maxValue; i += jump) {
impl.write(wb, i);
num++;
}
// for (int i=0;i<b.remaining();i++) System.out.print(b.get(i)+"|");
w.stop();
ReadBuffer rb = wb.getStaticBuffer().asReadBuffer();
log.info("Writing " + num + " longs in " + rb.length() + " bytes. in time: " + w.getTime());
final ReadVerify read = new ReadVerify() {
@Override
public void next(ReadBuffer rb, long expected) {
int beforePos = rb.getPosition();
long value = impl.read(rb);
assertEquals(expected, value);
int length = Math.abs(rb.getPosition() - beforePos);
assertEquals("On: " + expected, length, impl.length(expected));
}
};
if (backward) {
rb.movePosition(rb.length() - 1);
for (long i = maxValue; i != (negative ? -maxValue : 0); i -= jump) {
read.next(rb, i);
}
} else {
for (long i = (negative ? -maxValue : 0); i <= maxValue; i += jump) {
read.next(rb, i);
}
}
// Test boundaries
wb = new WriteByteBuffer(512);
impl.write(wb, 0);
impl.write(wb, Long.MAX_VALUE);
if (negative)
impl.write(wb, -Long.MAX_VALUE);
rb = wb.getStaticBuffer().asReadBuffer();
if (backward) {
rb.movePosition(rb.length() - 1);
if (negative)
assertEquals(-Long.MAX_VALUE, impl.read(rb));
assertEquals(Long.MAX_VALUE, impl.read(rb));
assertEquals(0, impl.read(rb));
} else {
assertEquals(0, impl.read(rb));
assertEquals(Long.MAX_VALUE, impl.read(rb));
if (negative)
assertEquals(-Long.MAX_VALUE, impl.read(rb));
}
}
use of org.apache.commons.lang.time.StopWatch in project mycore by MyCoRe-Org.
the class MCRSolrIndexer method rebuildMetadataIndex.
/**
* Rebuilds solr's metadata index.
*
* @param list
* list of identifiers of the objects to index
* @param solrClient
* solr server to index
*/
public static void rebuildMetadataIndex(List<String> list, SolrClient solrClient) {
LOGGER.info("Re-building Metadata Index");
if (list.isEmpty()) {
LOGGER.info("Sorry, no documents to index");
return;
}
StopWatch swatch = new StopWatch();
swatch.start();
int totalCount = list.size();
LOGGER.info("Sending {} objects to solr for reindexing", totalCount);
MCRXMLMetadataManager metadataMgr = MCRXMLMetadataManager.instance();
MCRSolrIndexStatistic statistic = null;
HashMap<MCRObjectID, MCRContent> contentMap = new HashMap<>((int) (BULK_SIZE * 1.4));
int i = 0;
for (String id : list) {
i++;
try {
LOGGER.debug("Preparing \"{}\" for indexing", id);
MCRObjectID objId = MCRObjectID.getInstance(id);
MCRContent content = metadataMgr.retrieveContent(objId);
contentMap.put(objId, content);
if (i % BULK_SIZE == 0 || totalCount == i) {
MCRSolrIndexHandler indexHandler = MCRSolrIndexHandlerFactory.getInstance().getIndexHandler(contentMap);
indexHandler.setCommitWithin(BATCH_AUTO_COMMIT_WITHIN_MS);
indexHandler.setSolrServer(solrClient);
statistic = indexHandler.getStatistic();
submitIndexHandler(indexHandler);
contentMap = new HashMap<>((int) (BULK_SIZE * 1.4));
}
} catch (Exception ex) {
LOGGER.error("Error creating index thread for object {}", id, ex);
}
}
long durationInMilliSeconds = swatch.getTime();
if (statistic != null) {
statistic.addTime(durationInMilliSeconds);
}
}
use of org.apache.commons.lang.time.StopWatch in project sling by apache.
the class HealthCheckExecutorImpl method execute.
/**
* Execute a set of health checks
*/
private List<HealthCheckExecutionResult> execute(final ServiceReference[] healthCheckReferences, HealthCheckExecutionOptions options) {
final StopWatch stopWatch = new StopWatch();
stopWatch.start();
final List<HealthCheckExecutionResult> results = new ArrayList<HealthCheckExecutionResult>();
final List<HealthCheckMetadata> healthCheckDescriptors = getHealthCheckMetadata(healthCheckReferences);
createResultsForDescriptors(healthCheckDescriptors, results, options);
stopWatch.stop();
if (logger.isDebugEnabled()) {
logger.debug("Time consumed for all checks: {}", msHumanReadable(stopWatch.getTime()));
}
// sort result
Collections.sort(results, new Comparator<HealthCheckExecutionResult>() {
@Override
public int compare(final HealthCheckExecutionResult arg0, final HealthCheckExecutionResult arg1) {
return ((ExecutionResult) arg0).compareTo((ExecutionResult) arg1);
}
});
return results;
}
use of org.apache.commons.lang.time.StopWatch in project jspwiki by apache.
the class RenderingManagerTest method testCache.
/**
* Tests the relative speed of the DOM cache with respect to
* page being parsed every single time.
* @throws Exception
*/
@Test
public void testCache() throws Exception {
m_engine.saveText("TestPage", TEST_TEXT);
StopWatch sw = new StopWatch();
// System.out.println("DOM cache speed test:");
sw.start();
for (int i = 0; i < 300; i++) {
WikiPage page = m_engine.getPage("TestPage");
String pagedata = m_engine.getPureText(page);
WikiContext context = new WikiContext(m_engine, page);
MarkupParser p = m_manager.getParser(context, pagedata);
WikiDocument d = p.parse();
String html = m_manager.getHTML(context, d);
Assert.assertNotNull("noncached got null response", html);
}
sw.stop();
// System.out.println(" Nocache took "+sw);
long nocachetime = sw.getTime();
sw.reset();
sw.start();
for (int i = 0; i < 300; i++) {
WikiPage page = m_engine.getPage("TestPage");
String pagedata = m_engine.getPureText(page);
WikiContext context = new WikiContext(m_engine, page);
String html = m_manager.getHTML(context, pagedata);
Assert.assertNotNull("cached got null response", html);
}
sw.stop();
// System.out.println(" Cache took "+sw);
long speedup = nocachetime / sw.getTime();
// System.out.println(" Approx speedup: "+speedup+"x");
}
Aggregations