use of org.apache.commons.io.LineIterator in project opennms by OpenNMS.
the class JmxRrdMigratorOfflineTest method executeMigrator.
/**
* Executes the JMX Migrator.
*
* @return the JMX Migrator object
* @throws Exception the exception
*/
private JmxRrdMigratorOffline executeMigrator() throws Exception {
JmxRrdMigratorOffline jmxMigrator = new JmxRrdMigratorOffline();
jmxMigrator.preExecute();
jmxMigrator.execute();
jmxMigrator.postExecute();
Assert.assertEquals(60, jmxMigrator.badMetrics.size());
// Verify graph templates
File templates = new File("target/home/etc/snmp-graph.properties.d/jvm-graph.properties");
Pattern defRegex = Pattern.compile("DEF:.+:(.+\\..+):");
Pattern colRegex = Pattern.compile("\\.columns=(.+)$");
for (LineIterator it = FileUtils.lineIterator(templates); it.hasNext(); ) {
String line = it.next();
Matcher m = defRegex.matcher(line);
if (m.find()) {
String ds = m.group(1);
if (jmxMigrator.badMetrics.contains(ds)) {
Assert.fail("Bad metric found");
}
}
m = colRegex.matcher(line);
if (m.find()) {
String[] badColumns = m.group(1).split(",(\\s)?");
if (jmxMigrator.badMetrics.containsAll(Arrays.asList(badColumns))) {
Assert.fail("Bad metric found");
}
}
}
// Verify metric definitions
File metrics = new File("target/home/etc/jmx-datacollection-config.xml");
Pattern aliasRegex = Pattern.compile("alias=\"([^\"]+\\.[^\"]+)\"");
for (LineIterator it = FileUtils.lineIterator(metrics); it.hasNext(); ) {
String line = it.next();
Matcher m = aliasRegex.matcher(line);
if (m.find()) {
String ds = m.group(1);
if (jmxMigrator.badMetrics.contains(ds)) {
Assert.fail("Bad metric found");
}
}
}
return jmxMigrator;
}
use of org.apache.commons.io.LineIterator in project pratilipi by Pratilipi.
the class DataAccessorGaeImpl method getPratilipiListTitle.
@Override
public String getPratilipiListTitle(String listName, Language lang) {
String fileName = "list." + lang.getCode() + "." + listName;
String listTitle = null;
try {
InputStream inputStream = DataAccessor.class.getResource(CURATED_DATA_FOLDER + "/" + fileName).openStream();
LineIterator it = IOUtils.lineIterator(inputStream, "UTF-8");
listTitle = it.nextLine().trim();
LineIterator.closeQuietly(it);
} catch (NullPointerException | IOException e) {
logger.log(Level.SEVERE, "Exception while reading from " + listName + " .", e);
}
return listTitle;
}
use of org.apache.commons.io.LineIterator in project pratilipi by Pratilipi.
the class SEOTitleUtil method getListPageTitle.
// TYPE: CATEGORY_LIST
public static String getListPageTitle(String listName, Language language) throws UnexpectedServerException {
String listTitle = null;
try {
String fileName = "list." + language.getCode() + "." + listName;
InputStream inputStream = DataAccessor.class.getResource("curated/" + fileName).openStream();
LineIterator it = IOUtils.lineIterator(inputStream, "UTF-8");
listTitle = it.nextLine().trim();
LineIterator.closeQuietly(it);
} catch (NullPointerException | IOException e) {
throw new UnexpectedServerException();
}
Map<String, String> dataModel = new HashMap<>();
if (listTitle.contains("|")) {
dataModel.put("listTitle", listTitle.substring(0, listTitle.indexOf("|")).trim());
dataModel.put("listTitleEn", listTitle.substring(listTitle.indexOf("|") + 1).trim());
} else {
dataModel.put("listTitle", listTitle);
dataModel.put("listTitleEn", "");
}
return _getPageTitle("seo_list_page", dataModel, language);
}
use of org.apache.commons.io.LineIterator in project jackrabbit-oak by apache.
the class MarkSweepGarbageCollector method sweep.
/**
* Sweep phase of gc candidate deletion.
* <p>
* Performs the following steps depending upon the type of the blob store refer
* {@link org.apache.jackrabbit.oak.plugins.blob.SharedDataStore.Type}:
*
* <ul>
* <li>Shared</li>
* <li>
* <ul>
* <li> Merge all marked references (from the mark phase run independently) available in the data store meta
* store (from all configured independent repositories).
* <li> Retrieve all blob ids available.
* <li> Diffs the 2 sets above to retrieve list of blob ids not used.
* <li> Deletes only blobs created after
* (earliest time stamp of the marked references - #maxLastModifiedInterval) from the above set.
* </ul>
* </li>
*
* <li>Default</li>
* <li>
* <ul>
* <li> Mark phase already run.
* <li> Retrieve all blob ids available.
* <li> Diffs the 2 sets above to retrieve list of blob ids not used.
* <li> Deletes only blobs created after
* (time stamp of the marked references - #maxLastModifiedInterval).
* </ul>
* </li>
* </ul>
*
* @return the number of blobs deleted
* @throws Exception the exception
* @param fs the garbage collector file state
* @param markStart the start time of mark to take as reference for deletion
* @param forceBlobRetrieve
*/
protected long sweep(GarbageCollectorFileState fs, long markStart, boolean forceBlobRetrieve) throws Exception {
long earliestRefAvailTime;
// Only go ahead if merge succeeded
try {
earliestRefAvailTime = GarbageCollectionType.get(blobStore).mergeAllMarkedReferences(blobStore, fs);
LOG.debug("Earliest reference available for timestamp [{}]", earliestRefAvailTime);
earliestRefAvailTime = (earliestRefAvailTime < markStart ? earliestRefAvailTime : markStart);
} catch (Exception e) {
return 0;
}
// Find all blob references after iterating over the whole repository
(new BlobIdRetriever(fs, forceBlobRetrieve)).call();
// Calculate the references not used
difference(fs);
long count = 0;
long deleted = 0;
long lastMaxModifiedTime = getLastMaxModifiedTime(earliestRefAvailTime);
LOG.debug("Starting sweep phase of the garbage collector");
LOG.debug("Sweeping blobs with modified time > than the configured max deleted time ({}). ", timestampToString(lastMaxModifiedTime));
BufferedWriter removesWriter = null;
LineIterator iterator = null;
try {
removesWriter = Files.newWriter(fs.getGarbage(), Charsets.UTF_8);
ArrayDeque<String> removesQueue = new ArrayDeque<String>();
iterator = FileUtils.lineIterator(fs.getGcCandidates(), Charsets.UTF_8.name());
Iterator<List<String>> partitions = Iterators.partition(iterator, getBatchCount());
while (partitions.hasNext()) {
List<String> ids = partitions.next();
count += ids.size();
deleted += BlobCollectionType.get(blobStore).sweepInternal(blobStore, ids, removesQueue, lastMaxModifiedTime);
saveBatchToFile(newArrayList(removesQueue), removesWriter);
removesQueue.clear();
}
} finally {
LineIterator.closeQuietly(iterator);
closeQuietly(removesWriter);
}
BlobCollectionType.get(blobStore).handleRemoves(blobStore, fs.getGarbage());
if (count != deleted) {
LOG.warn("Deleted only [{}] blobs entries from the [{}] candidates identified. This may happen if blob " + "modified time is > " + "than the max deleted time ({})", deleted, count, timestampToString(lastMaxModifiedTime));
}
// Remove all the merged marked references
GarbageCollectionType.get(blobStore).removeAllMarkedReferences(blobStore);
LOG.debug("Ending sweep phase of the garbage collector");
return deleted;
}
use of org.apache.commons.io.LineIterator in project jackrabbit-oak by apache.
the class LengthCachingDataStore method loadMappingData.
private static Map<String, Long> loadMappingData(File mappingFile) throws FileNotFoundException {
Map<String, Long> mapping = new HashMap<String, Long>();
log.info("Reading mapping data from {}", mappingFile.getAbsolutePath());
LineIterator itr = new LineIterator(Files.newReader(mappingFile, Charsets.UTF_8));
try {
while (itr.hasNext()) {
String line = itr.nextLine();
int indexOfBar = line.indexOf(SEPARATOR);
checkState(indexOfBar > 0, "Malformed entry found [%s]", line);
String length = line.substring(0, indexOfBar);
String id = line.substring(indexOfBar + 1);
mapping.put(id.trim(), Long.valueOf(length));
}
log.info("Total {} mapping entries found", mapping.size());
} finally {
itr.close();
}
return mapping;
}
Aggregations