use of org.apache.commons.csv.CSVPrinter in project hadoop by apache.
the class TestFileSystemTimelineReaderImpl method initializeDataDirectory.
public static void initializeDataDirectory(String rootDir) throws Exception {
loadEntityData(rootDir);
// Create app flow mapping file.
CSVFormat format = CSVFormat.DEFAULT.withHeader("APP", "USER", "FLOW", "FLOWRUN");
String appFlowMappingFile = rootDir + File.separator + "entities" + File.separator + "cluster1" + File.separator + FileSystemTimelineReaderImpl.APP_FLOW_MAPPING_FILE;
try (PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(appFlowMappingFile, true)));
CSVPrinter printer = new CSVPrinter(out, format)) {
printer.printRecord("app1", "user1", "flow1", 1);
printer.printRecord("app2", "user1", "flow1,flow", 1);
printer.close();
}
(new File(rootDir)).deleteOnExit();
}
use of org.apache.commons.csv.CSVPrinter in project phoenix by apache.
the class CSVFileResultHandler method open.
@Override
protected void open(String header) throws IOException {
// Check if already so we only open one writer
if (csvPrinter != null) {
return;
}
csvPrinter = new CSVPrinter(new PrintWriter(resultFileName), CSVFormat.DEFAULT);
Object[] records = header.split(PherfConstants.RESULT_FILE_DELIMETER);
csvPrinter.printRecord(records);
isClosed = false;
}
use of org.apache.commons.csv.CSVPrinter in project jackrabbit-oak by apache.
the class CSVFileBinaryResourceProviderTest method testGetBinaries.
@Test
public void testGetBinaries() throws Exception {
StringBuilder sb = new StringBuilder();
CSVPrinter p = new CSVPrinter(sb, CSVFileBinaryResourceProvider.FORMAT);
// BLOB_ID, LENGTH, JCR_MIMETYPE, JCR_ENCODING, JCR_PATH
p.printRecord("a", 123, "text/plain", null, "/a");
p.printRecord("a2", 123, "text/plain", null, "/a/c");
p.printRecord("b", null, "text/plain", null, "/b");
p.printRecord(null, null, "text/plain", null, "/c");
File dataFile = temporaryFolder.newFile();
Files.write(sb, dataFile, Charsets.UTF_8);
CSVFileBinaryResourceProvider provider = new CSVFileBinaryResourceProvider(dataFile, new MemoryBlobStore());
Map<String, BinaryResource> binaries = provider.getBinaries("/").uniqueIndex(BinarySourceMapper.BY_BLOBID);
assertEquals(3, binaries.size());
assertEquals("a", binaries.get("a").getBlobId());
assertEquals("/a", binaries.get("a").getPath());
binaries = provider.getBinaries("/a").uniqueIndex(BinarySourceMapper.BY_BLOBID);
assertEquals(1, binaries.size());
provider.close();
}
use of org.apache.commons.csv.CSVPrinter in project keycloak by keycloak.
the class PerformanceMeasurement method printToCSV.
public void printToCSV(String testName) {
checkStatisticsNotNull();
for (String statistic : statistics.keySet()) {
File csvFile = new File(PROJECT_BUILD_DIRECTORY + "/measurements" + (testName == null ? "" : "/" + testName), statistic + ".csv");
boolean csvFileCreated = false;
if (!csvFile.exists()) {
try {
csvFile.getParentFile().mkdirs();
csvFileCreated = csvFile.createNewFile();
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
try (BufferedWriter writer = new BufferedWriter(new FileWriter(csvFile, true))) {
CSVPrinter printer = new CSVPrinter(writer, CSVFormat.RFC4180);
if (csvFileCreated) {
printer.printRecord(HEADER);
}
printer.printRecord(toRecord(statistic));
printer.flush();
} catch (IOException ex) {
throw new RuntimeException(ex);
}
}
}
use of org.apache.commons.csv.CSVPrinter in project gephi by gephi.
the class ExporterSpreadsheet method exportData.
private void exportData(Graph graph) throws Exception {
final CSVFormat format = CSVFormat.DEFAULT.withDelimiter(fieldDelimiter);
try (CSVPrinter csvWriter = new CSVPrinter(writer, format)) {
boolean isEdgeTable = tableToExport != ExportTable.NODES;
Table table = isEdgeTable ? graph.getModel().getEdgeTable() : graph.getModel().getNodeTable();
ElementIterable<? extends Element> rows;
Object[] edgeLabels = graph.getModel().getEdgeTypeLabels();
boolean includeEdgeKindColumn = false;
for (Object edgeLabel : edgeLabels) {
if (edgeLabel != null && !edgeLabel.toString().isEmpty()) {
includeEdgeKindColumn = true;
}
}
TimeFormat timeFormat = graph.getModel().getTimeFormat();
DateTimeZone timeZone = graph.getModel().getTimeZone();
List<Column> columns = new ArrayList<>();
if (columnIdsToExport != null) {
for (String columnId : columnIdsToExport) {
Column column = table.getColumn(columnId);
if (column != null) {
columns.add(column);
}
}
} else {
for (Column column : table) {
columns.add(column);
}
}
// Write column headers:
if (isEdgeTable) {
csvWriter.print("Source");
csvWriter.print("Target");
csvWriter.print("Type");
if (includeEdgeKindColumn) {
csvWriter.print("Kind");
}
}
for (Column column : columns) {
// Use the title only if it's the same as the id (case insensitive):
String columnId = column.getId();
String columnTitle = column.getTitle();
String columnHeader = columnId.equalsIgnoreCase(columnTitle) ? columnTitle : columnId;
csvWriter.print(columnHeader);
}
csvWriter.println();
// Write rows:
if (isEdgeTable) {
rows = graph.getEdges();
} else {
rows = graph.getNodes();
}
for (Element row : rows) {
if (isEdgeTable) {
Edge edge = (Edge) row;
csvWriter.print(edge.getSource().getId());
csvWriter.print(edge.getTarget().getId());
csvWriter.print(edge.isDirected() ? "Directed" : "Undirected");
if (includeEdgeKindColumn) {
csvWriter.print(edge.getTypeLabel().toString());
}
}
for (Column column : columns) {
Object value = row.getAttribute(column);
String text;
if (value != null) {
if (value instanceof Number) {
text = NUMBER_FORMAT.format(value);
} else {
text = AttributeUtils.print(value, timeFormat, timeZone);
}
} else {
text = "";
}
csvWriter.print(text);
}
csvWriter.println();
}
}
}
Aggregations