use of au.gov.asd.tac.constellation.plugins.importexport.ImportDefinition in project constellation by constellation-app.
the class ImportDelimitedIO method loadParameterFile.
private static void loadParameterFile(final DelimitedImportController importController, final File delimIoDir, final String templName) {
try {
final ObjectMapper mapper = new ObjectMapper();
final JsonNode root = mapper.readTree(new File(delimIoDir, FilenameEncoder.encode(templName) + FileExtensionConstants.JSON));
final JsonNode source = root.get(SOURCE);
final String parser = source.get(PARSER).textValue();
final ImportFileParser ifp = ImportFileParser.getParser(parser);
if (!importController.getImportFileParser().getLabel().equals(parser)) {
final String message = String.format("Template is for a different file Parser '%s'.", parser);
NotifyDisplayer.displayAlert(LOAD_TEMPLATE, "File Parser Mismatch", message, Alert.AlertType.ERROR);
return;
}
importController.setImportFileParser(ifp);
final boolean schemaInit = source.get(SCHEMA_INIT).booleanValue();
importController.setSchemaInitialised(schemaInit);
final boolean filesIncludeHeaders = source.get(FILES_INCLUDE_HEADERS).booleanValue();
importController.setfilesIncludeHeaders(filesIncludeHeaders);
final boolean showAllSchemaAttributes = source.get(SHOW_ALL_SCHEMA_ATTRIBUTES) != null && source.get(SHOW_ALL_SCHEMA_ATTRIBUTES).booleanValue();
importController.setShowAllSchemaAttributes(showAllSchemaAttributes);
final String destination = source.get(DESTINATION).textValue();
final SchemaFactory schemaFactory = SchemaFactoryUtilities.getSchemaFactory(destination);
if (schemaFactory != null) {
importController.setDestination(new SchemaDestination(schemaFactory));
final List<ImportDefinition> definitions = new ArrayList<>();
final ArrayNode definitionsArray = (ArrayNode) root.withArray(DEFINITIONS);
for (final JsonNode definitionNode : definitionsArray) {
final int firstRow = definitionNode.get(FIRST_ROW).intValue();
final RowFilter filter = new RowFilter();
if (definitionNode.has(FILTER)) {
final JsonNode filterNode = definitionNode.get(FILTER);
final String script = filterNode.get(SCRIPT).textValue();
final JsonNode columnsArray = filterNode.withArray(COLUMNS);
final ArrayList<String> columns = new ArrayList<>();
for (final JsonNode column : columnsArray) {
columns.add(column.textValue());
}
filter.setScript(script);
filter.setColumns(columns.toArray(new String[columns.size()]));
}
final ImportDefinition impdef = new ImportDefinition("", firstRow, filter);
final JsonNode attributesNode = definitionNode.get(ATTRIBUTES);
for (final AttributeType attrType : AttributeType.values()) {
final ArrayNode columnArray = (ArrayNode) attributesNode.withArray(attrType.toString());
for (final JsonNode column : columnArray) {
final String columnLabel = column.get(COLUMN_LABEL).textValue();
final String label = column.get(ATTRIBUTE_LABEL).textValue();
if (!importController.hasAttribute(attrType.getElementType(), label)) {
// Manually created attribute.
final String type = column.get(ATTRIBUTE_TYPE).textValue();
final String descr = column.get(ATTRIBUTE_DESCRIPTION).textValue();
final NewAttribute a = new NewAttribute(attrType.getElementType(), type, label, descr);
importController.createManualAttribute(a);
}
final Attribute attribute = importController.getAttribute(attrType.getElementType(), label);
final AttributeTranslator translator = AttributeTranslator.getTranslator(column.get(TRANSLATOR).textValue());
final String args = column.get(TRANSLATOR_ARGS).textValue();
final String defaultValue = column.get(DEFAULT_VALUE).textValue();
final PluginParameters params = translator.createParameters();
translator.setParameterValues(params, args);
final ImportAttributeDefinition iad = new ImportAttributeDefinition(columnLabel, defaultValue, attribute, translator, params);
impdef.addDefinition(attrType, iad);
}
}
definitions.add(impdef);
}
importController.setClearManuallyAdded(false);
try {
((DelimitedImportPane) importController.getStage()).update(importController, definitions);
} finally {
importController.setClearManuallyAdded(true);
}
} else {
final String message = String.format("Can't find schema factory '%s'", destination);
NotifyDisplayer.displayAlert(LOAD_TEMPLATE, "Destination Schema Error", message, Alert.AlertType.ERROR);
}
} catch (final IOException ex) {
LOGGER.log(Level.SEVERE, ex.getLocalizedMessage(), ex);
}
}
use of au.gov.asd.tac.constellation.plugins.importexport.ImportDefinition in project constellation by constellation-app.
the class ImportJDBCPlugin method edit.
@Override
protected void edit(final GraphWriteMethods graph, final PluginInteraction interaction, final PluginParameters parameters) throws InterruptedException, PluginException {
final JDBCConnection connection = (JDBCConnection) parameters.getParameters().get(CONNECTION_PARAMETER_ID).getObjectValue();
final String query = parameters.getParameters().get(QUERY_PARAMETER_ID).getStringValue();
final List<ImportDefinition> definitions = (List<ImportDefinition>) parameters.getParameters().get(DEFINITIONS_PARAMETER_ID).getObjectValue();
final Boolean initialiseWithSchema = parameters.getParameters().get(SCHEMA_PARAMETER_ID).getBooleanValue();
boolean positionalAtrributesExist = false;
int totalImportedRows = 0;
final String username = parameters.getParameters().get(USERNAME_PARAMETER_ID).getStringValue();
final String password = parameters.getParameters().get(PASSWORD_PARAMETER_ID).getStringValue();
if (connection != null && query != null && !query.isBlank()) {
final List<String[]> data = new ArrayList<>();
try {
try (final Connection dbConnection = connection.getConnection(username, password)) {
try (final PreparedStatement ps = dbConnection.prepareStatement(query)) {
try (final ResultSet rs = ps.executeQuery()) {
while (rs.next()) {
final String[] d = new String[ps.getMetaData().getColumnCount()];
for (int i = 0; i < ps.getMetaData().getColumnCount(); i++) {
d[i] = rs.getString(i + 1);
}
data.add(d);
}
}
}
}
} catch (final MalformedURLException | ClassNotFoundException | SQLException | NoSuchMethodException | InstantiationException | IllegalAccessException | IllegalArgumentException | InvocationTargetException ex) {
return;
}
for (final ImportDefinition definition : definitions) {
if (definition.getDefinitions(AttributeType.SOURCE_VERTEX).isEmpty()) {
if (!definition.getDefinitions(AttributeType.DESTINATION_VERTEX).isEmpty()) {
totalImportedRows += processVertices(definition, graph, data, AttributeType.DESTINATION_VERTEX, initialiseWithSchema, interaction);
}
} else if (definition.getDefinitions(AttributeType.DESTINATION_VERTEX).isEmpty()) {
totalImportedRows += processVertices(definition, graph, data, AttributeType.SOURCE_VERTEX, initialiseWithSchema, interaction);
} else {
totalImportedRows += processTransactions(definition, graph, data, initialiseWithSchema, interaction);
}
// Determine if a positional attribute has been defined, if so update the overall flag
final boolean isPositional = attributeDefintionIsPositional(definition.getDefinitions(AttributeType.SOURCE_VERTEX), definition.getDefinitions(AttributeType.DESTINATION_VERTEX));
positionalAtrributesExist = (positionalAtrributesExist || isPositional);
}
displaySummaryAlert(totalImportedRows, data.size(), connection.getConnectionName());
// the graph. This does mean some nodes could sit on top of each other if multiple nodes have the same coordinates.
if (!positionalAtrributesExist) {
interaction.setProgress(1, 1, "Arranging", true);
graph.validateKey(GraphElementType.VERTEX, true);
graph.validateKey(GraphElementType.TRANSACTION, true);
// unfortunately need to arrange with pendants and uncollide because grid arranger works based on selection
final VertexListInclusionGraph vlGraph = new VertexListInclusionGraph(graph, AbstractInclusionGraph.Connections.NONE, new ArrayList<>());
PluginExecutor.startWith(ArrangementPluginRegistry.GRID_COMPOSITE).followedBy(ArrangementPluginRegistry.PENDANTS).followedBy(ArrangementPluginRegistry.UNCOLLIDE).followedBy(InteractiveGraphPluginRegistry.RESET_VIEW).executeNow(vlGraph.getInclusionGraph());
vlGraph.retrieveCoords();
}
}
}
use of au.gov.asd.tac.constellation.plugins.importexport.ImportDefinition in project constellation by constellation-app.
the class ImportDelimitedPlugin method edit.
@Override
protected void edit(final GraphWriteMethods graph, final PluginInteraction interaction, final PluginParameters parameters) throws InterruptedException, PluginException {
final ImportFileParser parser = (ImportFileParser) parameters.getParameters().get(PARSER_PARAMETER_ID).getObjectValue();
// files will be a list of file which extends from object type
@SuppressWarnings("unchecked") final List<File> files = (List<File>) parameters.getParameters().get(FILES_PARAMETER_ID).getObjectValue();
// definitions will be a list of import defintions which extends from object type
@SuppressWarnings("unchecked") final List<ImportDefinition> definitions = (List<ImportDefinition>) parameters.getParameters().get(DEFINITIONS_PARAMETER_ID).getObjectValue();
final boolean initialiseWithSchema = parameters.getParameters().get(SCHEMA_PARAMETER_ID).getBooleanValue();
final PluginParameters parserParameters = (PluginParameters) parameters.getParameters().get(PARSER_PARAMETER_IDS_PARAMETER_ID).getObjectValue();
final boolean filesIncludeHeaders = parameters.getParameters().get(FILES_INCLUDE_HEADERS_PARAMETER_ID).getBooleanValue();
boolean positionalAtrributesExist = false;
final List<String> validFiles = new ArrayList<>();
final List<String> emptyFiles = new ArrayList<>();
final List<String> invalidFiles = new ArrayList<>();
final List<String> emptyRunConfigs = new ArrayList<>();
int totalRows = 0;
int totalImportedRows = 0;
int dataSize = 0;
// a minimum) defined
for (final ImportDefinition definition : definitions) {
if (definition.getDefinitions(AttributeType.SOURCE_VERTEX).isEmpty() && definition.getDefinitions(AttributeType.DESTINATION_VERTEX).isEmpty()) {
emptyRunConfigs.add(definition.getDefinitionName());
}
}
for (final File file : files) {
interaction.setProgress(0, 0, "Reading File: " + file.getName(), true);
List<String[]> data = null;
int importedRowsPerFile = 0;
try {
data = parser.parse(new InputSource(file), parserParameters);
dataSize = filesIncludeHeaders ? data.size() - 1 : data.size();
totalRows = totalRows + Integer.max(0, dataSize);
if (dataSize > 0) {
if (validFiles.isEmpty()) {
validFiles.add(file.getName().concat(" (").concat(Integer.toString(dataSize)).concat(" rows)"));
} else {
validFiles.add(file.getName().concat(" (").concat(Integer.toString(dataSize)).concat(")"));
}
} else {
emptyFiles.add(file.getName());
}
} catch (FileNotFoundException ex) {
final String errorMsg = file.getPath() + " could not be found. Ignoring file during import.";
LOGGER.log(Level.INFO, errorMsg);
invalidFiles.add(file.getName());
} catch (IOException ex) {
final String errorMsg = file.getPath() + " could not be parsed. Removing file during import.";
LOGGER.log(Level.INFO, errorMsg);
invalidFiles.add(file.getName());
}
if (data != null) {
for (final ImportDefinition definition : definitions) {
if (definition.getDefinitions(AttributeType.SOURCE_VERTEX).isEmpty()) {
// Process destination vertexes if defintions are defined, otherwise there is nothing to do.
if (!definition.getDefinitions(AttributeType.DESTINATION_VERTEX).isEmpty()) {
importedRowsPerFile += processVertices(definition, graph, data, AttributeType.DESTINATION_VERTEX, initialiseWithSchema, interaction, file.getName());
}
} else if (definition.getDefinitions(AttributeType.DESTINATION_VERTEX).isEmpty()) {
// Source defintions exist, but no destination definitions exist. Process the source definitions.
importedRowsPerFile += processVertices(definition, graph, data, AttributeType.SOURCE_VERTEX, initialiseWithSchema, interaction, file.getName());
} else {
// Both source and destination defintions exist, process them.
importedRowsPerFile += processTransactions(definition, graph, data, initialiseWithSchema, interaction, file.getName());
}
// Determine if a positional attribute has been defined, if so update the overall flag
final boolean isPositional = attributeDefintionIsPositional(definition.getDefinitions(AttributeType.SOURCE_VERTEX), definition.getDefinitions(AttributeType.DESTINATION_VERTEX));
positionalAtrributesExist = (positionalAtrributesExist || isPositional);
}
}
totalImportedRows += importedRowsPerFile;
LOGGER.log(Level.INFO, "Imported {0} rows of data from file {1} containing {2} total rows", new Object[] { importedRowsPerFile, file.getPath(), dataSize });
}
displaySummaryAlert(graph.getVertexCount() + graph.getTransactionCount(), totalImportedRows, validFiles, emptyFiles, invalidFiles, emptyRunConfigs);
ConstellationLoggerHelper.importPropertyBuilder(this, GraphRecordStoreUtilities.getVertices(graph, false, false, false).getAll(GraphRecordStoreUtilities.SOURCE + VisualConcept.VertexAttribute.LABEL), files, ConstellationLoggerHelper.SUCCESS);
LOGGER.log(Level.INFO, "Auto arrangement use={0}", (!positionalAtrributesExist));
// the graph. This does mean some nodes could sit on top of each other if multiple nodes have the same coordinates.
if (!positionalAtrributesExist) {
interaction.setProgress(1, 1, "Arranging", true);
graph.validateKey(GraphElementType.VERTEX, true);
graph.validateKey(GraphElementType.TRANSACTION, true);
// unfortunately need to arrange with pendants and uncollide because grid arranger works based on selection
final VertexListInclusionGraph vlGraph = new VertexListInclusionGraph(graph, AbstractInclusionGraph.Connections.NONE, new ArrayList<>());
PluginExecutor.startWith(ArrangementPluginRegistry.GRID_COMPOSITE).followedBy(ArrangementPluginRegistry.PENDANTS).followedBy(ArrangementPluginRegistry.UNCOLLIDE).followedBy(InteractiveGraphPluginRegistry.RESET_VIEW).executeNow(vlGraph.getInclusionGraph());
vlGraph.retrieveCoords();
}
}
Aggregations