use of org.apache.commons.vfs2.UserAuthenticationData.Type in project pentaho-kettle by pentaho.
the class AutoDoc method processRow.
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException {
meta = (AutoDocMeta) smi;
data = (AutoDocData) sdi;
Object[] row = getRow();
if (row == null) {
if (data.filenames.isEmpty()) {
// Nothing to see here, move along!
//
setOutputDone();
return false;
}
// End of the line, create the documentation...
//
FileObject targetFile = KettleVFS.getFileObject(environmentSubstitute(meta.getTargetFilename()));
String targetFilename = KettleVFS.getFilename(targetFile);
// Create the report builder
//
KettleReportBuilder kettleReportBuilder = new KettleReportBuilder(this, data.filenames, KettleVFS.getFilename(targetFile), meta);
try {
//
if (ClassicEngineBoot.getInstance().isBootDone() == false) {
ObjectUtilities.setClassLoader(getClass().getClassLoader());
ObjectUtilities.setClassLoaderSource(ObjectUtilities.CLASS_CONTEXT);
LibLoaderBoot.getInstance().start();
LibFontBoot.getInstance().start();
ClassicEngineBoot.getInstance().start();
}
// Do the reporting thing...
//
kettleReportBuilder.createReport();
kettleReportBuilder.render();
Object[] outputRowData = RowDataUtil.allocateRowData(data.outputRowMeta.size());
int outputIndex = 0;
outputRowData[outputIndex++] = targetFilename;
// Pass along the data to the next steps...
//
putRow(data.outputRowMeta, outputRowData);
// Add the target file to the result file list
//
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, targetFile, getTransMeta().getName(), toString());
resultFile.setComment("This file was generated by the 'Auto Documentation Output' step");
addResultFile(resultFile);
} catch (Exception e) {
throw new KettleException(BaseMessages.getString(PKG, "AutoDoc.Exception.UnableToRenderReport"), e);
}
setOutputDone();
return false;
}
if (first) {
first = false;
data.outputRowMeta = getInputRowMeta().clone();
meta.getFields(data.outputRowMeta, getStepname(), null, null, this, repository, metaStore);
// Get the filename field index...
//
String filenameField = environmentSubstitute(meta.getFilenameField());
data.fileNameFieldIndex = getInputRowMeta().indexOfValue(filenameField);
if (data.fileNameFieldIndex < 0) {
throw new KettleException(BaseMessages.getString(PKG, "AutoDoc.Exception.FilenameFieldNotFound", filenameField));
}
// Get the file type field index...
//
String fileTypeField = environmentSubstitute(meta.getFileTypeField());
data.fileTypeFieldIndex = getInputRowMeta().indexOfValue(fileTypeField);
if (data.fileTypeFieldIndex < 0) {
throw new KettleException(BaseMessages.getString(PKG, "AutoDoc.Exception.FileTypeFieldNotFound", fileTypeField));
}
data.repository = getTrans().getRepository();
if (data.repository != null) {
data.tree = data.repository.loadRepositoryDirectoryTree();
}
// Initialize the repository information handlers (images, metadata, loading, etc)
//
TransformationInformation.init(getTrans().getRepository());
JobInformation.init(getTrans().getRepository());
}
// One more transformation or job to place in the documentation.
//
String fileName = getInputRowMeta().getString(row, data.fileNameFieldIndex);
String fileType = getInputRowMeta().getString(row, data.fileTypeFieldIndex);
RepositoryObjectType objectType;
if ("Transformation".equalsIgnoreCase(fileType)) {
objectType = RepositoryObjectType.TRANSFORMATION;
} else if ("Job".equalsIgnoreCase(fileType)) {
objectType = RepositoryObjectType.JOB;
} else {
throw new KettleException(BaseMessages.getString(PKG, "AutoDoc.Exception.UnknownFileTypeValue", fileType));
}
ReportSubjectLocation location = null;
if (getTrans().getRepository() == null) {
switch(objectType) {
case TRANSFORMATION:
location = new ReportSubjectLocation(fileName, null, null, objectType);
break;
case JOB:
location = new ReportSubjectLocation(fileName, null, null, objectType);
break;
default:
break;
}
} else {
int lastSlashIndex = fileName.lastIndexOf(RepositoryDirectory.DIRECTORY_SEPARATOR);
if (lastSlashIndex < 0) {
fileName = RepositoryDirectory.DIRECTORY_SEPARATOR + fileName;
lastSlashIndex = 0;
}
String directoryName = fileName.substring(0, lastSlashIndex + 1);
String objectName = fileName.substring(lastSlashIndex + 1);
RepositoryDirectoryInterface directory = data.tree.findDirectory(directoryName);
if (directory == null) {
throw new KettleException(BaseMessages.getString(PKG, "AutoDoc.Exception.RepositoryDirectoryNotFound", directoryName));
}
location = new ReportSubjectLocation(null, directory, objectName, objectType);
}
if (location == null) {
throw new KettleException(BaseMessages.getString(PKG, "AutoDoc.Exception.UnableToDetermineLocation", fileName, fileType));
}
if (meta.getOutputType() != OutputType.METADATA) {
// Add the file location to the list for later processing in one output report
//
data.filenames.add(location);
} else {
// Load the metadata from the transformation / job...
// Output it in one row for each input row
//
Object[] outputRow = RowDataUtil.resizeArray(row, data.outputRowMeta.size());
int outputIndex = getInputRowMeta().size();
List<AreaOwner> imageAreaList = null;
switch(location.getObjectType()) {
case TRANSFORMATION:
TransformationInformation ti = TransformationInformation.getInstance();
TransMeta transMeta = ti.getTransMeta(location);
imageAreaList = ti.getImageAreaList(location);
// TransMeta
outputRow[outputIndex++] = transMeta;
break;
case JOB:
JobInformation ji = JobInformation.getInstance();
JobMeta jobMeta = ji.getJobMeta(location);
imageAreaList = ji.getImageAreaList(location);
// TransMeta
outputRow[outputIndex++] = jobMeta;
break;
default:
break;
}
// Name
if (meta.isIncludingName()) {
outputRow[outputIndex++] = KettleFileTableModel.getName(location);
}
// Description
if (meta.isIncludingDescription()) {
outputRow[outputIndex++] = KettleFileTableModel.getDescription(location);
}
// Extended Description
if (meta.isIncludingExtendedDescription()) {
outputRow[outputIndex++] = KettleFileTableModel.getExtendedDescription(location);
}
// created
if (meta.isIncludingCreated()) {
outputRow[outputIndex++] = KettleFileTableModel.getCreation(location);
}
// modified
if (meta.isIncludingModified()) {
outputRow[outputIndex++] = KettleFileTableModel.getModification(location);
}
// image
if (meta.isIncludingImage()) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
try {
BufferedImage image = KettleFileTableModel.getImage(location);
ImageIO.write(image, "png", outputStream);
outputRow[outputIndex++] = outputStream.toByteArray();
} catch (Exception e) {
throw new KettleException("Unable to serialize image to PNG", e);
} finally {
try {
outputStream.close();
} catch (IOException e) {
throw new KettleException("Unable to serialize image to PNG", e);
}
}
}
if (meta.isIncludingLoggingConfiguration()) {
outputRow[outputIndex++] = KettleFileTableModel.getLogging(location);
}
if (meta.isIncludingLastExecutionResult()) {
outputRow[outputIndex++] = KettleFileTableModel.getLogging(location);
}
if (meta.isIncludingImageAreaList()) {
outputRow[outputIndex++] = imageAreaList;
}
putRow(data.outputRowMeta, outputRow);
}
return true;
}
use of org.apache.commons.vfs2.UserAuthenticationData.Type in project pentaho-kettle by pentaho.
the class GetXMLData method ReadNextString.
private boolean ReadNextString() {
try {
// Grab another row ...
data.readrow = getRow();
if (data.readrow == null) {
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "GetXMLData.Log.FinishedProcessing"));
}
return false;
}
if (first) {
first = false;
data.nrReadRow = getInputRowMeta().size();
data.inputRowMeta = getInputRowMeta();
data.outputRowMeta = data.inputRowMeta.clone();
meta.getFields(data.outputRowMeta, getStepname(), null, null, this, repository, metaStore);
// Get total previous fields
data.totalpreviousfields = data.inputRowMeta.size();
// Create convert meta-data objects that will contain Date & Number formatters
data.convertRowMeta = new RowMeta();
for (ValueMetaInterface valueMeta : data.convertRowMeta.getValueMetaList()) {
data.convertRowMeta.addValueMeta(ValueMetaFactory.cloneValueMeta(valueMeta, ValueMetaInterface.TYPE_STRING));
}
// For String to <type> conversions, we allocate a conversion meta data row as well...
//
data.convertRowMeta = data.outputRowMeta.cloneToType(ValueMetaInterface.TYPE_STRING);
// Check is XML field is provided
if (Utils.isEmpty(meta.getXMLField())) {
logError(BaseMessages.getString(PKG, "GetXMLData.Log.NoField"));
throw new KettleException(BaseMessages.getString(PKG, "GetXMLData.Log.NoField"));
}
// cache the position of the field
if (data.indexOfXmlField < 0) {
data.indexOfXmlField = getInputRowMeta().indexOfValue(meta.getXMLField());
if (data.indexOfXmlField < 0) {
// The field is unreachable !
logError(BaseMessages.getString(PKG, "GetXMLData.Log.ErrorFindingField", meta.getXMLField()));
throw new KettleException(BaseMessages.getString(PKG, "GetXMLData.Exception.CouldnotFindField", meta.getXMLField()));
}
}
}
if (meta.isInFields()) {
// get XML field value
String Fieldvalue = getInputRowMeta().getString(data.readrow, data.indexOfXmlField);
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "GetXMLData.Log.XMLStream", meta.getXMLField(), Fieldvalue));
}
if (meta.getIsAFile()) {
FileObject file = null;
try {
// XML source is a file.
file = KettleVFS.getFileObject(environmentSubstitute(Fieldvalue), getTransMeta());
if (meta.isIgnoreEmptyFile() && file.getContent().getSize() == 0) {
logBasic(BaseMessages.getString(PKG, "GetXMLData.Error.FileSizeZero", "" + file.getName()));
return ReadNextString();
}
// Open the XML document
if (!setDocument(null, file, false, false)) {
throw new KettleException(BaseMessages.getString(PKG, "GetXMLData.Log.UnableCreateDocument"));
}
if (!applyXPath()) {
throw new KettleException(BaseMessages.getString(PKG, "GetXMLData.Log.UnableApplyXPath"));
}
addFileToResultFilesname(file);
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "GetXMLData.Log.LoopFileOccurences", "" + data.nodesize, file.getName().getBaseName()));
}
} catch (Exception e) {
throw new KettleException(e);
} finally {
try {
if (file != null) {
file.close();
}
} catch (Exception e) {
// Ignore close errors
}
}
} else {
boolean url = false;
boolean xmltring = true;
if (meta.isReadUrl()) {
url = true;
xmltring = false;
}
// Open the XML document
if (!setDocument(Fieldvalue, null, xmltring, url)) {
throw new KettleException(BaseMessages.getString(PKG, "GetXMLData.Log.UnableCreateDocument"));
}
// Apply XPath and set node list
if (!applyXPath()) {
throw new KettleException(BaseMessages.getString(PKG, "GetXMLData.Log.UnableApplyXPath"));
}
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "GetXMLData.Log.LoopFileOccurences", "" + data.nodesize));
}
}
}
} catch (Exception e) {
logError(BaseMessages.getString(PKG, "GetXMLData.Log.UnexpectedError", e.toString()));
stopAll();
logError(Const.getStackTracker(e));
setErrors(1);
return false;
}
return true;
}
use of org.apache.commons.vfs2.UserAuthenticationData.Type in project pentaho-kettle by pentaho.
the class GetXMLData method processRow.
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException {
if (first && !meta.isInFields()) {
first = false;
data.files = meta.getFiles(this);
if (!meta.isdoNotFailIfNoFile() && data.files.nrOfFiles() == 0) {
throw new KettleException(BaseMessages.getString(PKG, "GetXMLData.Log.NoFiles"));
}
handleMissingFiles();
// Create the output row meta-data
data.outputRowMeta = new RowMeta();
meta.getFields(data.outputRowMeta, getStepname(), null, null, this, repository, metaStore);
// Create convert meta-data objects that will contain Date & Number formatters
// For String to <type> conversions, we allocate a conversion meta data row as well...
//
data.convertRowMeta = data.outputRowMeta.cloneToType(ValueMetaInterface.TYPE_STRING);
}
// Grab a row
Object[] r = getXMLRow();
if (data.errorInRowButContinue) {
// continue without putting the row out
return true;
}
if (r == null) {
// signal end to receiver(s)
setOutputDone();
// end of data or error.
return false;
}
return putRowOut(r);
}
use of org.apache.commons.vfs2.UserAuthenticationData.Type in project pentaho-kettle by pentaho.
the class Spoon method showPluginInfo.
/**
* Show a plugin browser
*/
public void showPluginInfo() {
try {
// First we collect information concerning all the plugin types...
//
Map<String, RowMetaInterface> metaMap = new HashMap<>();
Map<String, List<Object[]>> dataMap = new HashMap<>();
PluginRegistry registry = PluginRegistry.getInstance();
List<Class<? extends PluginTypeInterface>> pluginTypeClasses = registry.getPluginTypes();
for (Class<? extends PluginTypeInterface> pluginTypeClass : pluginTypeClasses) {
PluginTypeInterface pluginTypeInterface = registry.getPluginType(pluginTypeClass);
if (pluginTypeInterface.isFragment()) {
continue;
}
String subject = pluginTypeInterface.getName();
RowBuffer pluginInformation = registry.getPluginInformation(pluginTypeClass);
metaMap.put(subject, pluginInformation.getRowMeta());
dataMap.put(subject, pluginInformation.getBuffer());
}
// Now push it all to a subject data browser...
//
SubjectDataBrowserDialog dialog = new SubjectDataBrowserDialog(shell, metaMap, dataMap, "Plugin browser", "Plugin type");
dialog.open();
} catch (Exception e) {
new ErrorDialog(shell, "Error", "Error listing plugins", e);
}
}
use of org.apache.commons.vfs2.UserAuthenticationData.Type in project pentaho-kettle by pentaho.
the class Spoon method setMenu.
private synchronized void setMenu(Tree tree) {
TreeSelection[] objects = getTreeObjects(tree);
if (objects.length != 1) {
// not yet supported, we can do this later when the OSX bug
return;
// goes away
}
TreeSelection object = objects[0];
selectionObject = object.getSelection();
Object selection = selectionObject;
selectionObjectParent = object.getParent();
// Not clicked on a real object: returns a class
XulMenupopup spoonMenu = null;
if (selection instanceof Class<?>) {
if (selection.equals(TransMeta.class)) {
// New
spoonMenu = (XulMenupopup) menuMap.get("trans-class");
} else if (selection.equals(JobMeta.class)) {
// New
spoonMenu = (XulMenupopup) menuMap.get("job-class");
} else if (selection.equals(TransHopMeta.class)) {
// New
spoonMenu = (XulMenupopup) menuMap.get("trans-hop-class");
} else if (selection.equals(DatabaseMeta.class)) {
spoonMenu = (XulMenupopup) menuMap.get("database-class");
} else if (selection.equals(PartitionSchema.class)) {
// New
spoonMenu = (XulMenupopup) menuMap.get("partition-schema-class");
} else if (selection.equals(ClusterSchema.class)) {
spoonMenu = (XulMenupopup) menuMap.get("cluster-schema-class");
} else if (selection.equals(SlaveServer.class)) {
spoonMenu = (XulMenupopup) menuMap.get("slave-cluster-class");
} else {
spoonMenu = null;
}
} else {
if (selection instanceof TransMeta) {
spoonMenu = (XulMenupopup) menuMap.get("trans-inst");
} else if (selection instanceof JobMeta) {
spoonMenu = (XulMenupopup) menuMap.get("job-inst");
} else if (selection instanceof PluginInterface) {
spoonMenu = (XulMenupopup) menuMap.get("step-plugin");
} else if (selection instanceof DatabaseMeta) {
spoonMenu = (XulMenupopup) menuMap.get("database-inst");
// disable for now if the connection is an SAP ERP type of database...
//
XulMenuitem item = (XulMenuitem) mainSpoonContainer.getDocumentRoot().getElementById("database-inst-explore");
if (item != null) {
final DatabaseMeta databaseMeta = (DatabaseMeta) selection;
item.setDisabled(!databaseMeta.isExplorable());
}
item = (XulMenuitem) mainSpoonContainer.getDocumentRoot().getElementById("database-inst-clear-cache");
if (item != null) {
final DatabaseMeta databaseMeta = (DatabaseMeta) selectionObject;
item.setLabel(BaseMessages.getString(PKG, "Spoon.Menu.Popup.CONNECTIONS.ClearDBCache") + // Clear
databaseMeta.getName());
}
item = (XulMenuitem) mainSpoonContainer.getDocumentRoot().getElementById("database-inst-share");
if (item != null) {
final DatabaseMeta databaseMeta = (DatabaseMeta) selection;
if (databaseMeta.isShared()) {
item.setLabel(BaseMessages.getString(PKG, "Spoon.Menu.Popup.CONNECTIONS.UnShare"));
} else {
item.setLabel(BaseMessages.getString(PKG, "Spoon.Menu.Popup.CONNECTIONS.Share"));
}
}
} else if (selection instanceof StepMeta) {
spoonMenu = (XulMenupopup) menuMap.get("step-inst");
} else if (selection instanceof JobEntryCopy) {
spoonMenu = (XulMenupopup) menuMap.get("job-entry-copy-inst");
} else if (selection instanceof TransHopMeta) {
spoonMenu = (XulMenupopup) menuMap.get("trans-hop-inst");
} else if (selection instanceof PartitionSchema) {
spoonMenu = (XulMenupopup) menuMap.get("partition-schema-inst");
} else if (selection instanceof ClusterSchema) {
spoonMenu = (XulMenupopup) menuMap.get("cluster-schema-inst");
} else if (selection instanceof SlaveServer) {
spoonMenu = (XulMenupopup) menuMap.get("slave-server-inst");
}
}
if (spoonMenu != null) {
ConstUI.displayMenu(spoonMenu, tree);
} else {
tree.setMenu(null);
}
createPopUpMenuExtension();
}
Aggregations