use of org.apache.commons.io.LineIterator in project opennms by OpenNMS.
the class JmxRrdMigratorOffline method fixJmxConfigurationFile.
/**
* Fixes a JMX configuration file.
*
* @param jmxConfigFile the JMX configuration file
* @throws OnmsUpgradeException the OpenNMS upgrade exception
*/
private void fixJmxConfigurationFile(File jmxConfigFile) throws OnmsUpgradeException {
try {
log("Updating JMX metric definitions on %s\n", jmxConfigFile);
zipFile(jmxConfigFile);
backupFiles.add(new File(jmxConfigFile.getAbsolutePath() + ZIP_EXT));
File outputFile = new File(jmxConfigFile.getCanonicalFile() + ".temp");
FileWriter w = new FileWriter(outputFile);
Pattern extRegex = Pattern.compile("import-mbeans[>](.+)[<]");
Pattern aliasRegex = Pattern.compile("alias=\"([^\"]+\\.[^\"]+)\"");
List<File> externalFiles = new ArrayList<File>();
LineIterator it = FileUtils.lineIterator(jmxConfigFile);
while (it.hasNext()) {
String line = it.next();
Matcher m = extRegex.matcher(line);
if (m.find()) {
externalFiles.add(new File(jmxConfigFile.getParentFile(), m.group(1)));
}
m = aliasRegex.matcher(line);
if (m.find()) {
String badDs = m.group(1);
String fixedDs = getFixedDsName(badDs);
log(" Replacing bad alias %s with %s on %s\n", badDs, fixedDs, line.trim());
line = line.replaceAll(badDs, fixedDs);
if (badMetrics.contains(badDs) == false) {
badMetrics.add(badDs);
}
}
w.write(line + "\n");
}
LineIterator.closeQuietly(it);
w.close();
FileUtils.deleteQuietly(jmxConfigFile);
FileUtils.moveFile(outputFile, jmxConfigFile);
if (!externalFiles.isEmpty()) {
for (File configFile : externalFiles) {
fixJmxConfigurationFile(configFile);
}
}
} catch (Exception e) {
throw new OnmsUpgradeException("Can't fix " + jmxConfigFile + " because " + e.getMessage(), e);
}
}
use of org.apache.commons.io.LineIterator in project Asqatasun by Asqatasun.
the class CodeGeneratorMojo method getCsv.
/**
*
* @return
*/
private Iterable<CSVRecord> getCsv() {
// we parse the csv file to extract the first line and get the headers
LineIterator lineIterator;
try {
lineIterator = FileUtils.lineIterator(dataFile, Charset.defaultCharset().name());
} catch (IOException ex) {
Logger.getLogger(CodeGeneratorMojo.class.getName()).log(Level.SEVERE, null, ex);
lineIterator = null;
}
String[] csvHeaders = lineIterator != null ? lineIterator.next().split(String.valueOf(delimiter)) : new String[0];
isCriterionPresent = extractCriterionFromCsvHeader(csvHeaders);
try {
extractAvailableLangsFromCsvHeader(csvHeaders);
} catch (I18NLanguageNotFoundException ex) {
Logger.getLogger(CodeGeneratorMojo.class.getName()).log(Level.SEVERE, null, ex);
return null;
}
// from here we just add each line to a build to re-create the csv content
// without the first line.
StringBuilder strb = new StringBuilder();
while (lineIterator.hasNext()) {
strb.append(lineIterator.next());
strb.append("\n");
}
Reader in;
try {
in = new StringReader(strb.toString());
CSVFormat csvf = CSVFormat.newFormat(delimiter).withHeader(csvHeaders);
return csvf.parse(in);
} catch (FileNotFoundException ex) {
Logger.getLogger(CodeGeneratorMojo.class.getName()).log(Level.SEVERE, null, ex);
return null;
} catch (IOException ex) {
Logger.getLogger(CodeGeneratorMojo.class.getName()).log(Level.SEVERE, null, ex);
return null;
}
}
use of org.apache.commons.io.LineIterator in project Asqatasun by Asqatasun.
the class ExtractCsvAndCopy method getCsv.
public Iterable<CSVRecord> getCsv() throws IOException {
// we parse the csv file to extract the first line and get the headers
LineIterator lineIterator;
lineIterator = FileUtils.lineIterator(DATA_FILE);
csvHeaders = lineIterator.next().split(String.valueOf(DELIMITER));
StringBuilder strb = new StringBuilder();
while (lineIterator.hasNext()) {
strb.append(lineIterator.next());
strb.append("\n");
}
Reader in;
try {
in = new StringReader(strb.toString());
CSVFormat csvf = CSVFormat.newFormat(DELIMITER).withHeader(csvHeaders);
return csvf.parse(in);
} catch (FileNotFoundException ex) {
return null;
} catch (IOException ex) {
return null;
}
}
use of org.apache.commons.io.LineIterator in project goci by EBISPOT.
the class GwasJsonController method getJsonFromFile.
/**
* Return as a string the content of the gwas.json file pointed out by Constants.JSON_FILE_PATH
*
* @return the content of the gwas.json
* @throws IOException
*/
public String getJsonFromFile() throws IOException {
String json = "";
File jsonFile = new File(Constants.JSON_FILE_PATH);
LineIterator it = FileUtils.lineIterator(jsonFile, "UTF-8");
try {
while (it.hasNext()) {
json = json + it.nextLine();
// do something with line
}
} finally {
LineIterator.closeQuietly(it);
}
return json;
}
use of org.apache.commons.io.LineIterator in project ice by JBEI.
the class BulkCSVUpload method getBulkUploadDataFromFile.
// NOTE: this also validates the part data (with the exception of the actual files)
List<PartWithSample> getBulkUploadDataFromFile(InputStream inputStream) throws IOException {
List<PartWithSample> partDataList = new LinkedList<>();
// initialize parser to null; when not-null in the loop below, then the header has been parsed
CSVParser parser = null;
HashMap<Integer, HeaderValue> headers = null;
// parse CSV file
try {
LineIterator it = IOUtils.lineIterator(inputStream, "UTF-8");
int index = 0;
while (it.hasNext()) {
String line = it.nextLine().trim();
// check if first time parsing (first line)
if (parser == null) {
// to indicate the type of parser to use (tab or comma separated)
if (line.contains("\t") && !line.contains(","))
parser = new CSVParser('\t');
else
parser = new CSVParser();
// get column headers
String[] fieldStrArray = parser.parseLine(line);
headers = processColumnHeaders(fieldStrArray);
continue;
}
// skip any empty lines (holes) in the csv file
if (StringUtils.isBlank(line) || line.replaceAll(",", "").trim().isEmpty())
continue;
// at this point we must have headers since that should be the first item in the file
if (headers == null)
throw new IOException("Could not parse file headers");
// parser != null; process line contents with available headers
String[] valuesArray = parser.parseLine(line);
PartData partData = new PartData(addType);
PartSample partSample = null;
if (subType != null) {
partData.getLinkedParts().add(new PartData(subType));
}
// for each column
for (int i = 0; i < valuesArray.length; i += 1) {
HeaderValue headerForColumn = headers.get(i);
// process sample information
if (headerForColumn.isSampleField()) {
// todo : move to another method
if (partSample == null)
partSample = new PartSample();
setPartSampleData(((SampleHeaderValue) headerForColumn).getSampleField(), partSample, valuesArray[i]);
} else {
EntryHeaderValue entryHeaderValue = (EntryHeaderValue) headerForColumn;
EntryField field = entryHeaderValue.getEntryField();
PartData data;
String value = valuesArray[i];
boolean isSubType = entryHeaderValue.isSubType();
if (isSubType)
data = partData.getLinkedParts().get(0);
else
data = partData;
// get the data for the field
switch(field) {
case ATT_FILENAME:
ArrayList<AttachmentInfo> attachments = data.getAttachments();
if (attachments == null) {
attachments = new ArrayList<>();
data.setAttachments(attachments);
}
attachments.clear();
attachments.add(new AttachmentInfo(value));
break;
case SEQ_FILENAME:
data.setSequenceFileName(value);
break;
case SEQ_TRACE_FILES:
// todo
break;
case EXISTING_PART_NUMBER:
Entry entry = DAOFactory.getEntryDAO().getByPartNumber(value);
if (entry == null)
throw new IOException("Could not locate part number \"" + value + "\" for linking");
PartData toLink = entry.toDataTransferObject();
data.getLinkedParts().add(toLink);
break;
default:
partData = EntryUtil.setPartDataFromField(partData, value, field, isSubType);
}
}
}
// validate
List<EntryField> fields = EntryUtil.validates(partData);
if (!fields.isEmpty()) {
invalidFields.clear();
invalidFields.addAll(fields);
return null;
}
partData.setIndex(index);
PartWithSample partWithSample = new PartWithSample(partSample, partData);
partDataList.add(partWithSample);
index += 1;
}
} finally {
IOUtils.closeQuietly(inputStream);
}
return partDataList;
}
Aggregations