use of org.apache.hop.core.logging.ILogChannel in project hop by apache.
the class HopNeo4jPerspective method refreshResults.
private void refreshResults() {
// See if logging is enabled
//
ILogChannel log = hopGui.getLog();
String searchName = wExecutions.getText();
int amount = Const.toInt(wAmount.getText(), 50);
boolean onlyRoot = wOnlyRoot.getSelection();
try {
final NeoConnection connection = findLoggingConnection();
if (connection == null) {
wUsedConnection.setText("");
return;
}
wUsedConnection.setText(Const.NVL(connection.getName(), ""));
log.logDetailed("Logging workflow information to Neo4j connection : " + connection.getName());
Map<String, Object> resultsParameters = new HashMap<>();
final StringBuilder resultsCypher = new StringBuilder();
resultsCypher.append("MATCH(e:Execution) ");
resultsCypher.append("WHERE e.type in [ 'PIPELINE', 'WORKFLOW' ] ");
if (StringUtils.isNotEmpty(searchName)) {
resultsCypher.append("AND e.name = $name ");
resultsParameters.put("name", searchName);
}
if (onlyRoot) {
resultsCypher.append("AND e.root = true ");
}
resultsCypher.append("RETURN e.id, e.name, e.type, e.linesRead, e.linesWritten, e.linesInput, e.linesOutput, e.linesRejected, e.errors, e.executionStart, e.durationMs ");
resultsCypher.append("ORDER BY e.executionStart desc ");
resultsCypher.append("LIMIT " + amount);
wResults.clearAll(false);
try (Driver driver = connection.getDriver(log, hopGui.getVariables())) {
try (Session session = connection.getSession(log, driver, hopGui.getVariables())) {
session.readTransaction(tx -> {
Result result = tx.run(resultsCypher.toString(), resultsParameters);
while (result.hasNext()) {
Record record = result.next();
TableItem item = new TableItem(wResults.table, SWT.NONE);
int pos = 0;
Value vId = record.get(pos++);
item.setText(pos, Const.NVL(vId.asString(), ""));
Value vName = record.get(pos++);
item.setText(pos, Const.NVL(vName.asString(), ""));
Value vType = record.get(pos++);
item.setText(pos, Const.NVL(vType.asString(), ""));
Value vLinesRead = record.get(pos++);
item.setText(pos, Long.toString(vLinesRead.asLong(0)));
Value vLinesWritten = record.get(pos++);
item.setText(pos, Long.toString(vLinesWritten.asLong(0)));
Value vLinesInput = record.get(pos++);
item.setText(pos, Long.toString(vLinesInput.asLong(0)));
Value vLinesOutput = record.get(pos++);
item.setText(pos, Long.toString(vLinesOutput.asLong(0)));
Value vLinesRejected = record.get(pos++);
item.setText(pos, Long.toString(vLinesRejected.asLong(0)));
Value vErrors = record.get(pos++);
long errors = vErrors.asLong(0);
item.setText(pos, Long.toString(vErrors.asLong(0)));
Value vExecutionStart = record.get(pos++);
item.setText(pos, Const.NVL(vExecutionStart.asString(), "").replace("T", " "));
Value vDurationMs = record.get(pos++);
String durationHMS = LoggingCore.getFancyDurationFromMs(Long.valueOf(vDurationMs.asLong(0)));
item.setText(pos, durationHMS);
if (errors != 0) {
item.setBackground(errorLineBackground);
}
}
wResults.removeEmptyRows();
wResults.setRowNums();
wResults.optWidth(true);
return null;
});
// Also populate the executions combo box for pipelines and workflows
//
String execCypher = "match(e:Execution) where e.type in ['PIPELINE', 'WORKFLOW'] return distinct e.name order by e.name";
session.readTransaction(tx -> {
List<String> list = new ArrayList<>();
Result result = tx.run(execCypher);
while (result.hasNext()) {
Record record = result.next();
Value value = record.get(0);
list.add(value.asString());
}
wExecutions.setItems(list.toArray(new String[0]));
return null;
});
}
} finally {
wExecutions.setText(Const.NVL(searchName, ""));
}
} catch (Throwable e) {
new ErrorDialog(hopGui.getShell(), BaseMessages.getString(PKG, "Neo4jPerspectiveDialog.ErrorSearching.Dialog.Header"), BaseMessages.getString(PKG, "Neo4jPerspectiveDialog.ErrorSearching.Dialog.Message"), e);
}
}
use of org.apache.hop.core.logging.ILogChannel in project hop by apache.
the class HopNeo4jPerspective method analyze.
/**
* Analyze a log record entry when a user clicks on it.
*
* @param event
*/
private void analyze(Event event) {
ILogChannel log = hopGui.getLog();
System.out.println("Analyze");
if (!(event.item instanceof TableItem)) {
return;
}
TableItem item = (TableItem) event.item;
String id = item.getText(1);
String name = item.getText(2);
String type = item.getText(3);
int errors = Const.toInt(item.getText(9), -1);
try {
final NeoConnection connection = findLoggingConnection();
if (connection == null) {
return;
}
log.logDetailed("Logging workflow information to Neo4j connection : " + connection.getName());
try (Driver driver = connection.getDriver(log, hopGui.getVariables())) {
try (Session session = connection.getSession(log, driver, hopGui.getVariables())) {
analyzeLogging(session, id, name, type);
List<List<HistoryResult>> shortestPaths = analyzeErrorLineage(session, id, name, type, errors);
analyzeCypherStatements(connection, session, id, name, type, errors, shortestPaths);
}
}
} catch (Exception e) {
new ErrorDialog(hopGui.getShell(), BaseMessages.getString(PKG, "Neo4jPerspectiveDialog.ErrorAnalyze.Dialog.Header"), BaseMessages.getString(PKG, "Neo4jPerspectiveDialog.ErrorAnalyze.Dialog.Message"), e);
}
}
use of org.apache.hop.core.logging.ILogChannel in project hop by apache.
the class PipelineLoggingExtensionPoint method logEndOfPipeline.
private void logEndOfPipeline(final ILogChannel log, final Session session, final NeoConnection connection, final IPipelineEngine<PipelineMeta> pipeline) throws HopException {
log.logDetailed("Logging execution end of pipeline to Neo4j connection : " + connection.getName());
final PipelineMeta pipelineMeta = pipeline.getPipelineMeta();
synchronized (session) {
session.writeTransaction(new TransactionWork<Void>() {
@Override
public Void execute(Transaction transaction) {
try {
// Create a new node for each log channel and it's owner
// Start with the pipeline
//
ILogChannel channel = pipeline.getLogChannel();
Result result = pipeline.getResult();
String transLogChannelId = pipeline.getLogChannelId();
String transLoggingText = HopLogStore.getAppender().getBuffer(transLogChannelId, false).toString();
Date endDate = new Date();
pipeline.getExtensionDataMap().put(PIPELINE_END_DATE, endDate);
Date startDate = (Date) pipeline.getExtensionDataMap().get(PIPELINE_START_DATE);
Map<String, Object> transPars = new HashMap<>();
transPars.put("pipelineName", pipelineMeta.getName());
transPars.put("type", EXECUTION_TYPE_PIPELINE);
transPars.put("id", channel.getLogChannelId());
transPars.put("executionEnd", new SimpleDateFormat("yyyy/MM/dd'T'HH:mm:ss").format(endDate));
transPars.put("durationMs", endDate.getTime() - startDate.getTime());
transPars.put("errors", result.getNrErrors());
transPars.put("linesInput", result.getNrLinesInput());
transPars.put("linesOutput", result.getNrLinesOutput());
transPars.put("linesRead", result.getNrLinesRead());
transPars.put("linesWritten", result.getNrLinesWritten());
transPars.put("linesRejected", result.getNrLinesRejected());
transPars.put("loggingText", transLoggingText);
transPars.put("status", pipeline.getStatusDescription());
StringBuilder transCypher = new StringBuilder();
transCypher.append("MATCH (pipeline:Pipeline { name : $pipelineName } ) ");
transCypher.append("MERGE (exec:Execution { name : $pipelineName, type : $type, id : $id } ) ");
transCypher.append("SET ");
transCypher.append(" exec.executionEnd = $executionEnd ");
transCypher.append(", exec.durationMs = $durationMs ");
transCypher.append(", exec.status = $status ");
transCypher.append(", exec.errors = $errors ");
transCypher.append(", exec.linesInput = $linesInput ");
transCypher.append(", exec.linesOutput = $linesOutput ");
transCypher.append(", exec.linesRead = $linesRead ");
transCypher.append(", exec.linesWritten = $linesWritten ");
transCypher.append(", exec.linesRejected = $linesRejected ");
transCypher.append(", exec.loggingText = $loggingText ");
transCypher.append("MERGE (exec)-[r:EXECUTION_OF_PIPELINE]->(pipeline) ");
transaction.run(transCypher.toString(), transPars);
// Also log every transform copy
//
List<TransformMetaDataCombi<ITransform, ITransformMeta, ITransformData>> combis = ((Pipeline) pipeline).getTransforms();
for (TransformMetaDataCombi combi : combis) {
String transformLogChannelId = combi.transform.getLogChannel().getLogChannelId();
String transformLoggingText = HopLogStore.getAppender().getBuffer(transformLogChannelId, false).toString();
Map<String, Object> transformPars = new HashMap<>();
transformPars.put("pipelineName", pipelineMeta.getName());
transformPars.put("name", combi.transformName);
transformPars.put("type", EXECUTION_TYPE_TRANSFORM);
transformPars.put("id", transformLogChannelId);
transformPars.put("transId", transLogChannelId);
transformPars.put("copy", Long.valueOf(combi.copy));
transformPars.put("status", combi.transform.getStatus().getDescription());
transformPars.put("loggingText", transformLoggingText);
transformPars.put("errors", combi.transform.getErrors());
transformPars.put("linesRead", combi.transform.getLinesRead());
transformPars.put("linesWritten", combi.transform.getLinesWritten());
transformPars.put("linesInput", combi.transform.getLinesInput());
transformPars.put("linesOutput", combi.transform.getLinesOutput());
transformPars.put("linesRejected", combi.transform.getLinesRejected());
StringBuilder transformCypher = new StringBuilder();
transformCypher.append("MATCH (transform:Transform { pipelineName : $pipelineName, name : $name } ) ");
transformCypher.append("MERGE (exec:Execution { name : $name, type : $type, id : $id } ) ");
transformCypher.append("SET ");
transformCypher.append(" exec.transId = $transId ");
transformCypher.append(", exec.copy = $copy ");
transformCypher.append(", exec.status = $status ");
transformCypher.append(", exec.loggingText = $loggingText ");
transformCypher.append(", exec.errors = $errors ");
transformCypher.append(", exec.linesRead = $linesRead ");
transformCypher.append(", exec.linesWritten = $linesWritten ");
transformCypher.append(", exec.linesInput = $linesInput ");
transformCypher.append(", exec.linesOutput = $linesOutput ");
transformCypher.append(", exec.linesRejected = $linesRejected ");
transformCypher.append("MERGE (exec)-[r:EXECUTION_OF_TRANSFORM]->(transform) ");
transaction.run(transformCypher.toString(), transformPars);
// Log graph usage as well
// This Map is left by the Neo4j transform plugins : Neo4j Output and Neo4j Graph
// Output
//
Map<String, Map<String, Set<String>>> usageMap = (Map<String, Map<String, Set<String>>>) pipeline.getExtensionDataMap().get(Defaults.TRANS_NODE_UPDATES_GROUP);
if (usageMap != null) {
for (String graphUsage : usageMap.keySet()) {
Map<String, Set<String>> transformsMap = usageMap.get(graphUsage);
Set<String> labels = transformsMap.get(combi.transformName);
if (labels != null) {
for (String label : labels) {
// Save relationship to GraphUsage node
//
Map<String, Object> usagePars = new HashMap<>();
usagePars.put("transform", combi.transformName);
usagePars.put("type", "TRANSFORM");
usagePars.put("id", transformLogChannelId);
usagePars.put("label", label);
usagePars.put("usage", graphUsage);
StringBuilder usageCypher = new StringBuilder();
usageCypher.append("MATCH (transform:Execution { name : $transform, type : $type, id : $id } ) ");
usageCypher.append("MERGE (usage:Usage { usage : $usage, label : $label } ) ");
usageCypher.append("MERGE (transform)-[r:PERFORMS_" + graphUsage + "]->(usage)");
transaction.run(usageCypher.toString(), usagePars);
}
}
}
}
}
transaction.commit();
} catch (Exception e) {
transaction.rollback();
log.logError("Error logging pipeline end", e);
}
return null;
}
});
}
}
use of org.apache.hop.core.logging.ILogChannel in project hop by apache.
the class PipelineLoggingExtensionPoint method logPipelineMetadata.
private void logPipelineMetadata(final ILogChannel log, final Session session, final NeoConnection connection, final IPipelineEngine<PipelineMeta> pipeline) throws HopException {
log.logDetailed("Logging pipeline metadata to Neo4j connection : " + connection.getName());
final PipelineMeta pipelineMeta = pipeline.getPipelineMeta();
synchronized (session) {
session.writeTransaction((TransactionWork<Void>) transaction -> {
try {
Map<String, Object> transPars = new HashMap<>();
transPars.put("pipelineName", pipelineMeta.getName());
transPars.put("description", pipelineMeta.getDescription());
transPars.put("filename", pipelineMeta.getFilename());
StringBuilder transCypher = new StringBuilder();
transCypher.append("MERGE (pipeline:Pipeline { name : $pipelineName } ) ");
transCypher.append("SET pipeline.filename = $filename, pipeline.description = $description ");
transaction.run(transCypher.toString(), transPars);
log.logDetailed("Pipeline cypher : " + transCypher);
for (TransformMeta transformMeta : pipelineMeta.getTransforms()) {
Map<String, Object> transformPars = new HashMap<>();
transformPars.put("pipelineName", pipelineMeta.getName());
transformPars.put("transformName", transformMeta.getName());
transformPars.put("description", transformMeta.getDescription());
transformPars.put("pluginId", transformMeta.getPluginId());
transformPars.put("copies", transformMeta.getCopies(pipeline));
transformPars.put("locationX", transformMeta.getLocation().x);
transformPars.put("locationY", transformMeta.getLocation().y);
StringBuilder transformCypher = new StringBuilder();
transformCypher.append("MATCH (pipeline:Pipeline { name : $pipelineName } ) ");
transformCypher.append("MERGE (transform:Transform { pipelineName : $pipelineName, name : $transformName } ) ");
transformCypher.append("SET ");
transformCypher.append(" transform.description = $description ");
transformCypher.append(", transform.pluginId = $pluginId ");
transformCypher.append(", transform.copies = $copies ");
transformCypher.append(", transform.locationX = $locationX ");
transformCypher.append(", transform.locationY = $locationY ");
transformCypher.append("MERGE (transform)-[rel:TRANSFORM_OF_PIPELINE]->(pipeline) ");
log.logDetailed("Transform '" + transformMeta.getName() + "' cypher : " + transformCypher);
transaction.run(transformCypher.toString(), transformPars);
}
for (int i = 0; i < pipelineMeta.nrPipelineHops(); i++) {
PipelineHopMeta hopMeta = pipelineMeta.getPipelineHop(i);
Map<String, Object> hopPars = new HashMap<>();
hopPars.put("fromTransform", hopMeta.getFromTransform().getName());
hopPars.put("toTransform", hopMeta.getToTransform().getName());
hopPars.put("pipelineName", pipelineMeta.getName());
StringBuilder hopCypher = new StringBuilder();
hopCypher.append("MATCH (from:Transform { pipelineName : $pipelineName, name : $fromTransform }) ");
hopCypher.append("MATCH (to:Transform { pipelineName : $pipelineName, name : $toTransform }) ");
hopCypher.append("MERGE (from)-[rel:PRECEDES]->(to) ");
transaction.run(hopCypher.toString(), hopPars);
}
transaction.commit();
} catch (Exception e) {
transaction.rollback();
log.logError("Error logging pipeline metadata", e);
}
return null;
});
}
}
use of org.apache.hop.core.logging.ILogChannel in project hop by apache.
the class WorkflowLoggingExtensionPoint method logStartOfWorkflow.
private void logStartOfWorkflow(final ILogChannel log, final Session session, final NeoConnection connection, final IWorkflowEngine<WorkflowMeta> workflow) throws HopException {
log.logDetailed("Logging execution start of workflow to Neo4j connection : " + connection.getName());
final WorkflowMeta workflowMeta = workflow.getWorkflowMeta();
synchronized (session) {
session.writeTransaction(new TransactionWork<Void>() {
@Override
public Void execute(Transaction transaction) {
try {
// Create a new node for each log channel and it's owner
// Start with the workflow
//
ILogChannel channel = workflow.getLogChannel();
Date startDate = (Date) workflow.getExtensionDataMap().get(WORKFLOW_START_DATE);
Map<String, Object> workflowPars = new HashMap<>();
workflowPars.put("workflowName", workflowMeta.getName());
workflowPars.put("id", channel.getLogChannelId());
workflowPars.put("type", EXECUTION_TYPE_WORKFLOW);
workflowPars.put("executionStart", new SimpleDateFormat("yyyy/MM/dd'T'HH:mm:ss").format(startDate));
StringBuilder workflowCypher = new StringBuilder();
workflowCypher.append("MATCH (w:Workflow { name : $workflowName} ) ");
workflowCypher.append("MERGE (e:Execution { name : $workflowName, type : $type, id : $id} ) ");
workflowCypher.append("SET ");
workflowCypher.append(" e.executionStart = $executionStart ");
workflowCypher.append("MERGE (e)-[r:EXECUTION_OF_WORKFLOW]->(w) ");
transaction.run(workflowCypher.toString(), workflowPars);
transaction.commit();
} catch (Exception e) {
transaction.rollback();
log.logError("Error logging workflow start", e);
}
return null;
}
});
}
}
Aggregations