use of com.csvreader.CsvReader in project gephi by gephi.
the class AttributeColumnsControllerImpl method importCSVToEdgesTable.
@Override
public void importCSVToEdgesTable(Graph graph, File file, Character separator, Charset charset, String[] columnNames, Class[] columnTypes, boolean createNewNodes) {
if (columnNames == null || columnNames.length == 0) {
return;
}
if (columnTypes == null || columnNames.length != columnTypes.length) {
throw new IllegalArgumentException("Column names length must be the same as column types length");
}
CsvReader reader = null;
graph.writeLock();
try {
//Prepare attribute columns for the column names, creating the not already existing columns:
Table edgesTable = graph.getModel().getEdgeTable();
Column weightColumn = edgesTable.getColumn("Weight");
boolean isDynamicWeight = weightColumn.isDynamic();
String idColumnHeader = null;
String sourceColumnHeader = null;
String targetColumnHeader = null;
String typeColumnHeader = null;
String weightColumnHeader = null;
//Necessary because of column name case insensitivity, to map columns to its corresponding csv header.
HashMap<Column, String> columnHeaders = new HashMap<>();
for (int i = 0; i < columnNames.length; i++) {
//Separate first id column found from the list to use as id. If more are found later, the will not be in the list and be ignored.
if (columnNames[i].equalsIgnoreCase("id")) {
if (idColumnHeader == null) {
idColumnHeader = columnNames[i];
}
} else if (columnNames[i].equalsIgnoreCase("source") && sourceColumnHeader == null) {
//Separate first source column found from the list to use as source node id
sourceColumnHeader = columnNames[i];
} else if (columnNames[i].equalsIgnoreCase("target") && targetColumnHeader == null) {
//Separate first target column found from the list to use as target node id
targetColumnHeader = columnNames[i];
} else if (columnNames[i].equalsIgnoreCase("type") && typeColumnHeader == null) {
//Separate first type column found from the list to use as edge type (directed/undirected)
typeColumnHeader = columnNames[i];
} else if (edgesTable.hasColumn(columnNames[i])) {
//Any other existing column:
Column column = edgesTable.getColumn(columnNames[i]);
columnHeaders.put(column, columnNames[i]);
if (column.equals(weightColumn)) {
weightColumnHeader = columnNames[i];
}
} else {
//New column:
Column column = addAttributeColumn(edgesTable, columnNames[i], columnTypes[i]);
if (column != null) {
columnHeaders.put(column, columnNames[i]);
}
}
}
Set<Column> columnList = columnHeaders.keySet();
//Create edges:
GraphElementsController gec = Lookup.getDefault().lookup(GraphElementsController.class);
reader = new CsvReader(new FileInputStream(file), separator, charset);
reader.setTrimWhitespace(false);
reader.readHeaders();
int recordNumber = 0;
while (reader.readRecord()) {
String id = null;
Edge edge = null;
String sourceId, targetId;
Node source, target;
String type;
boolean directed;
recordNumber++;
sourceId = reader.get(sourceColumnHeader);
targetId = reader.get(targetColumnHeader);
if (sourceId == null || sourceId.trim().isEmpty() || targetId == null || targetId.trim().isEmpty()) {
Logger.getLogger("").log(Level.WARNING, "Ignoring record number {0} due to empty source and/or target node ids", recordNumber);
//No correct source and target ids were provided, ignore row
continue;
}
source = graph.getNode(sourceId);
if (source == null) {
if (createNewNodes) {
//Create new nodes when they don't exist already and option is enabled
if (source == null) {
source = gec.createNode(null, sourceId, graph);
}
} else {
//Ignore this edge row, since no new nodes should be created.
continue;
}
}
target = graph.getNode(targetId);
if (target == null) {
if (createNewNodes) {
//Create new nodes when they don't exist already and option is enabled
if (target == null) {
target = gec.createNode(null, targetId, graph);
}
} else {
//Ignore this edge row, since no new nodes should be created.
continue;
}
}
if (typeColumnHeader != null) {
type = reader.get(typeColumnHeader);
//Undirected if indicated correctly, otherwise always directed:
if (type != null) {
directed = !type.equalsIgnoreCase("undirected");
} else {
directed = true;
}
} else {
//Directed by default when not indicated
directed = true;
}
//Prepare the correct edge to assign the attributes:
if (idColumnHeader != null) {
id = reader.get(idColumnHeader);
if (id == null || id.trim().isEmpty()) {
//id null or empty, assign one
edge = gec.createEdge(source, target, directed);
} else {
Edge edgeById = graph.getEdge(id);
if (edgeById == null) {
//Create edge because no edge with that id exists
edge = gec.createEdge(id, source, target, directed);
}
}
} else {
if (findEdge(graph, null, source, target, directed) == null) {
//Only create if it does not exist
edge = gec.createEdge(source, target, directed);
}
}
if (edge != null) {
//Assign all attributes to the new edge:
for (Column column : columnList) {
setAttributeValue(reader.get(columnHeaders.get(column)), edge, column);
}
} else {
edge = findEdge(graph, id, source, target, directed);
if (edge != null) {
//Increase non dynamic edge weight with specified weight (if specified), else increase by 1:
if (!isDynamicWeight) {
if (weightColumnHeader != null) {
String weight = reader.get(weightColumnHeader);
try {
Float weightFloat = Float.parseFloat(weight);
edge.setWeight(edge.getWeight() + weightFloat);
} catch (NumberFormatException numberFormatException) {
//Not valid weight, add 1
edge.setWeight(edge.getWeight() + 1);
Logger.getLogger("").log(Level.WARNING, "Could not parse weight {0}, adding 1", weight);
}
} else {
//Add 1 (weight not specified)
edge.setWeight(edge.getWeight() + 1);
}
}
} else {
Logger.getLogger("").log(Level.WARNING, "Could not add edge [id = {0}, source = {1}, target = {2}, directed = {3}] to the graph and could not find the existing edge to add its weight. Skipping edge", new Object[] { id, source.getId(), target.getId(), directed });
}
}
}
} catch (FileNotFoundException ex) {
Logger.getLogger("").log(Level.SEVERE, null, ex);
} catch (IOException ex) {
Logger.getLogger("").log(Level.SEVERE, null, ex);
} finally {
graph.readUnlockAll();
graph.writeUnlock();
if (reader != null) {
reader.close();
}
}
}
use of com.csvreader.CsvReader in project gephi by gephi.
the class AttributeColumnsControllerImpl method importCSVToNodesTable.
@Override
public void importCSVToNodesTable(Graph graph, File file, Character separator, Charset charset, String[] columnNames, Class[] columnTypes, boolean assignNewNodeIds) {
if (columnNames == null || columnNames.length == 0) {
return;
}
if (columnTypes == null || columnNames.length != columnTypes.length) {
throw new IllegalArgumentException("Column names length must be the same as column types length");
}
CsvReader reader = null;
graph.writeLock();
try {
//Prepare attribute columns for the column names, creating the not already existing columns:
Table nodesTable = graph.getModel().getNodeTable();
String idColumn = null;
//Necessary because of column name case insensitivity, to map columns to its corresponding csv header.
HashMap<Column, String> columnHeaders = new HashMap<>();
for (int i = 0; i < columnNames.length; i++) {
//Separate first id column found from the list to use as id. If more are found later, the will not be in the list and be ignored.
if (columnNames[i].equalsIgnoreCase("id")) {
if (idColumn == null) {
idColumn = columnNames[i];
}
} else if (nodesTable.hasColumn(columnNames[i])) {
Column column = nodesTable.getColumn(columnNames[i]);
columnHeaders.put(column, columnNames[i]);
} else {
Column column = addAttributeColumn(nodesTable, columnNames[i], columnTypes[i]);
if (column != null) {
columnHeaders.put(column, columnNames[i]);
}
}
}
Set<Column> columnList = columnHeaders.keySet();
//Create nodes:
GraphElementsController gec = Lookup.getDefault().lookup(GraphElementsController.class);
String id;
Node node;
reader = new CsvReader(new FileInputStream(file), separator, charset);
reader.setTrimWhitespace(false);
reader.readHeaders();
while (reader.readRecord()) {
//Prepare the correct node to assign the attributes:
if (idColumn != null) {
id = reader.get(idColumn);
if (id == null || id.isEmpty()) {
//id null or empty, assign one
node = gec.createNode(null, graph);
} else {
node = graph.getNode(id);
if (node != null) {
//Node with that id already in graph
if (assignNewNodeIds) {
node = gec.createNode(null, graph);
}
} else {
//New id in the graph
node = gec.createNode(null, id, graph);
}
}
} else {
node = gec.createNode(null);
}
//Assign attributes to the current node:
for (Column column : columnList) {
setAttributeValue(reader.get(columnHeaders.get(column)), node, column);
}
}
} catch (FileNotFoundException ex) {
Exceptions.printStackTrace(ex);
} catch (IOException ex) {
Exceptions.printStackTrace(ex);
} finally {
graph.readUnlockAll();
graph.writeUnlock();
if (reader != null) {
reader.close();
}
}
}
use of com.csvreader.CsvReader in project gephi by gephi.
the class ImportCSVUIVisualPanel2 method loadColumns.
private void loadColumns(JPanel settingsPanel) {
try {
columnsCheckBoxes.clear();
columnsComboBoxes.clear();
JLabel columnsLabel = new JLabel(getMessage("ImportCSVUIVisualPanel2.columnsLabel.text"));
settingsPanel.add(columnsLabel, "wrap");
CsvReader reader = new CsvReader(new FileInputStream(file), separator, charset);
reader.setTrimWhitespace(false);
reader.readHeaders();
final String[] columns = reader.getHeaders();
reader.close();
//Only first source and target columns found will be used as source and target nodes ids.
boolean sourceFound = false, targetFound = false, typeFound = false;
for (int i = 0; i < columns.length; i++) {
if (columns[i].isEmpty()) {
//Remove empty column headers:
continue;
}
JCheckBox columnCheckBox = new JCheckBox(columns[i], true);
Column column = table.getColumn(columns[i]);
if (column != null) {
columnCheckBox.setToolTipText(column.getTitle());
}
columnsCheckBoxes.add(columnCheckBox);
settingsPanel.add(columnCheckBox, "wrap");
JComboBox columnComboBox = new JComboBox();
columnsComboBoxes.add(columnComboBox);
fillComboBoxWithColumnTypes(columns[i], columnComboBox);
settingsPanel.add(columnComboBox, "wrap 15px");
if (mode == ImportCSVUIWizardAction.Mode.EDGES_TABLE && columns[i].equalsIgnoreCase("source") && !sourceFound) {
sourceFound = true;
//Do not allow to not select source column:
columnCheckBox.setEnabled(false);
columnComboBox.setEnabled(false);
}
if (mode == ImportCSVUIWizardAction.Mode.EDGES_TABLE && columns[i].equalsIgnoreCase("target") && !targetFound) {
targetFound = true;
//Do not allow to not select target column:
columnCheckBox.setEnabled(false);
columnComboBox.setEnabled(false);
}
if (mode == ImportCSVUIWizardAction.Mode.EDGES_TABLE && columns[i].equalsIgnoreCase("type") && !typeFound) {
typeFound = true;
//Do not allow to change type column type:
columnComboBox.setEnabled(false);
}
}
} catch (IOException ex) {
Exceptions.printStackTrace(ex);
}
}
use of com.csvreader.CsvReader in project dhis2-core by dhis2.
the class AnalyticsTestUtils method readInputFile.
/**
* Reads CSV input file.
*
* @param inputFile points to file in class path
* @return list of list of strings
*/
public static ArrayList<String[]> readInputFile(String inputFile) throws IOException {
InputStream input = new ClassPathResource(inputFile).getInputStream();
assertNotNull("Reading '" + inputFile + "' failed", input);
CsvReader reader = new CsvReader(input, Charset.forName("UTF-8"));
// Ignore first row
reader.readRecord();
ArrayList<String[]> lines = new ArrayList<>();
while (reader.readRecord()) {
String[] values = reader.getValues();
lines.add(values);
}
return lines;
}
use of com.csvreader.CsvReader in project dhis2-core by dhis2.
the class DefaultDataValueSetService method saveDataValueSetCsv.
@Override
public ImportSummary saveDataValueSetCsv(InputStream in, ImportOptions importOptions, TaskId id) {
try {
in = StreamUtils.wrapAndCheckCompressionFormat(in);
DataValueSet dataValueSet = new StreamingCsvDataValueSet(new CsvReader(in, Charset.forName("UTF-8")));
return saveDataValueSet(importOptions, id, dataValueSet);
} catch (Exception ex) {
log.error(DebugUtils.getStackTrace(ex));
notifier.clear(id).notify(id, ERROR, "Process failed: " + ex.getMessage(), true);
return new ImportSummary(ImportStatus.ERROR, "The import process failed: " + ex.getMessage());
}
}
Aggregations