use of org.apache.commons.vfs2.FileObject in project pentaho-kettle by pentaho.
the class AutoDoc method processRow.
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException {
meta = (AutoDocMeta) smi;
data = (AutoDocData) sdi;
Object[] row = getRow();
if (row == null) {
if (data.filenames.isEmpty()) {
// Nothing to see here, move along!
//
setOutputDone();
return false;
}
// End of the line, create the documentation...
//
FileObject targetFile = KettleVFS.getFileObject(environmentSubstitute(meta.getTargetFilename()));
String targetFilename = KettleVFS.getFilename(targetFile);
// Create the report builder
//
KettleReportBuilder kettleReportBuilder = new KettleReportBuilder(this, data.filenames, KettleVFS.getFilename(targetFile), meta);
try {
//
if (ClassicEngineBoot.getInstance().isBootDone() == false) {
ObjectUtilities.setClassLoader(getClass().getClassLoader());
ObjectUtilities.setClassLoaderSource(ObjectUtilities.CLASS_CONTEXT);
LibLoaderBoot.getInstance().start();
LibFontBoot.getInstance().start();
ClassicEngineBoot.getInstance().start();
}
// Do the reporting thing...
//
kettleReportBuilder.createReport();
kettleReportBuilder.render();
Object[] outputRowData = RowDataUtil.allocateRowData(data.outputRowMeta.size());
int outputIndex = 0;
outputRowData[outputIndex++] = targetFilename;
// Pass along the data to the next steps...
//
putRow(data.outputRowMeta, outputRowData);
// Add the target file to the result file list
//
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, targetFile, getTransMeta().getName(), toString());
resultFile.setComment("This file was generated by the 'Auto Documentation Output' step");
addResultFile(resultFile);
} catch (Exception e) {
throw new KettleException(BaseMessages.getString(PKG, "AutoDoc.Exception.UnableToRenderReport"), e);
}
setOutputDone();
return false;
}
if (first) {
first = false;
data.outputRowMeta = getInputRowMeta().clone();
meta.getFields(data.outputRowMeta, getStepname(), null, null, this, repository, metaStore);
// Get the filename field index...
//
String filenameField = environmentSubstitute(meta.getFilenameField());
data.fileNameFieldIndex = getInputRowMeta().indexOfValue(filenameField);
if (data.fileNameFieldIndex < 0) {
throw new KettleException(BaseMessages.getString(PKG, "AutoDoc.Exception.FilenameFieldNotFound", filenameField));
}
// Get the file type field index...
//
String fileTypeField = environmentSubstitute(meta.getFileTypeField());
data.fileTypeFieldIndex = getInputRowMeta().indexOfValue(fileTypeField);
if (data.fileTypeFieldIndex < 0) {
throw new KettleException(BaseMessages.getString(PKG, "AutoDoc.Exception.FileTypeFieldNotFound", fileTypeField));
}
data.repository = getTrans().getRepository();
if (data.repository != null) {
data.tree = data.repository.loadRepositoryDirectoryTree();
}
// Initialize the repository information handlers (images, metadata, loading, etc)
//
TransformationInformation.init(getTrans().getRepository());
JobInformation.init(getTrans().getRepository());
}
// One more transformation or job to place in the documentation.
//
String fileName = getInputRowMeta().getString(row, data.fileNameFieldIndex);
String fileType = getInputRowMeta().getString(row, data.fileTypeFieldIndex);
RepositoryObjectType objectType;
if ("Transformation".equalsIgnoreCase(fileType)) {
objectType = RepositoryObjectType.TRANSFORMATION;
} else if ("Job".equalsIgnoreCase(fileType)) {
objectType = RepositoryObjectType.JOB;
} else {
throw new KettleException(BaseMessages.getString(PKG, "AutoDoc.Exception.UnknownFileTypeValue", fileType));
}
ReportSubjectLocation location = null;
if (getTrans().getRepository() == null) {
switch(objectType) {
case TRANSFORMATION:
location = new ReportSubjectLocation(fileName, null, null, objectType);
break;
case JOB:
location = new ReportSubjectLocation(fileName, null, null, objectType);
break;
default:
break;
}
} else {
int lastSlashIndex = fileName.lastIndexOf(RepositoryDirectory.DIRECTORY_SEPARATOR);
if (lastSlashIndex < 0) {
fileName = RepositoryDirectory.DIRECTORY_SEPARATOR + fileName;
lastSlashIndex = 0;
}
String directoryName = fileName.substring(0, lastSlashIndex + 1);
String objectName = fileName.substring(lastSlashIndex + 1);
RepositoryDirectoryInterface directory = data.tree.findDirectory(directoryName);
if (directory == null) {
throw new KettleException(BaseMessages.getString(PKG, "AutoDoc.Exception.RepositoryDirectoryNotFound", directoryName));
}
location = new ReportSubjectLocation(null, directory, objectName, objectType);
}
if (location == null) {
throw new KettleException(BaseMessages.getString(PKG, "AutoDoc.Exception.UnableToDetermineLocation", fileName, fileType));
}
if (meta.getOutputType() != OutputType.METADATA) {
// Add the file location to the list for later processing in one output report
//
data.filenames.add(location);
} else {
// Load the metadata from the transformation / job...
// Output it in one row for each input row
//
Object[] outputRow = RowDataUtil.resizeArray(row, data.outputRowMeta.size());
int outputIndex = getInputRowMeta().size();
List<AreaOwner> imageAreaList = null;
switch(location.getObjectType()) {
case TRANSFORMATION:
TransformationInformation ti = TransformationInformation.getInstance();
TransMeta transMeta = ti.getTransMeta(location);
imageAreaList = ti.getImageAreaList(location);
// TransMeta
outputRow[outputIndex++] = transMeta;
break;
case JOB:
JobInformation ji = JobInformation.getInstance();
JobMeta jobMeta = ji.getJobMeta(location);
imageAreaList = ji.getImageAreaList(location);
// TransMeta
outputRow[outputIndex++] = jobMeta;
break;
default:
break;
}
// Name
if (meta.isIncludingName()) {
outputRow[outputIndex++] = KettleFileTableModel.getName(location);
}
// Description
if (meta.isIncludingDescription()) {
outputRow[outputIndex++] = KettleFileTableModel.getDescription(location);
}
// Extended Description
if (meta.isIncludingExtendedDescription()) {
outputRow[outputIndex++] = KettleFileTableModel.getExtendedDescription(location);
}
// created
if (meta.isIncludingCreated()) {
outputRow[outputIndex++] = KettleFileTableModel.getCreation(location);
}
// modified
if (meta.isIncludingModified()) {
outputRow[outputIndex++] = KettleFileTableModel.getModification(location);
}
// image
if (meta.isIncludingImage()) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
try {
BufferedImage image = KettleFileTableModel.getImage(location);
ImageIO.write(image, "png", outputStream);
outputRow[outputIndex++] = outputStream.toByteArray();
} catch (Exception e) {
throw new KettleException("Unable to serialize image to PNG", e);
} finally {
try {
outputStream.close();
} catch (IOException e) {
throw new KettleException("Unable to serialize image to PNG", e);
}
}
}
if (meta.isIncludingLoggingConfiguration()) {
outputRow[outputIndex++] = KettleFileTableModel.getLogging(location);
}
if (meta.isIncludingLastExecutionResult()) {
outputRow[outputIndex++] = KettleFileTableModel.getLogging(location);
}
if (meta.isIncludingImageAreaList()) {
outputRow[outputIndex++] = imageAreaList;
}
putRow(data.outputRowMeta, outputRow);
}
return true;
}
use of org.apache.commons.vfs2.FileObject in project pentaho-kettle by pentaho.
the class JobEntryCheckDbConnectionsIT method cleanup.
@After
public void cleanup() {
try {
FileObject dbFile = KettleVFS.getFileObject(H2_DATABASE + ".h2.db");
if (dbFile.exists()) {
System.out.println("deleting file");
dbFile.delete();
}
} catch (KettleFileException | FileSystemException ignored) {
// Ignore, we tried cleaning up
}
}
use of org.apache.commons.vfs2.FileObject in project pentaho-kettle by pentaho.
the class JobEntryAddResultFilenames method processFile.
private boolean processFile(String filename, String wildcard, Job parentJob, Result result) {
boolean rcode = true;
FileObject filefolder = null;
String realFilefoldername = environmentSubstitute(filename);
String realwildcard = environmentSubstitute(wildcard);
try {
if (parentJobMeta.getNamedClusterEmbedManager() != null) {
parentJobMeta.getNamedClusterEmbedManager().passEmbeddedMetastoreKey(this, parentJobMeta.getEmbeddedMetastoreProviderKey());
}
filefolder = KettleVFS.getFileObject(realFilefoldername, this);
if (filefolder.exists()) {
if (filefolder.getType() == FileType.FILE) {
// Add filename to Resultfilenames ...
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobEntryAddResultFilenames.AddingFileToResult", filefolder.toString()));
}
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(filefolder.toString(), this), parentJob.getJobname(), toString());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
} else {
FileObject[] list = filefolder.findFiles(new TextFileSelector(filefolder.toString(), realwildcard));
for (int i = 0; i < list.length && !parentJob.isStopped(); i++) {
// Add filename to Resultfilenames ...
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobEntryAddResultFilenames.AddingFileToResult", list[i].toString()));
}
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(list[i].toString(), this), parentJob.getJobname(), toString());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
}
}
} else {
// File can not be found
if (log.isBasic()) {
logBasic(BaseMessages.getString(PKG, "JobEntryAddResultFilenames.FileCanNotbeFound", realFilefoldername));
}
rcode = false;
}
} catch (Exception e) {
rcode = false;
logError(BaseMessages.getString(PKG, "JobEntryAddResultFilenames.CouldNotProcess", realFilefoldername, e.getMessage()), e);
} finally {
if (filefolder != null) {
try {
filefolder.close();
filefolder = null;
} catch (IOException ex) {
// Ignore
}
}
}
return rcode;
}
use of org.apache.commons.vfs2.FileObject in project pentaho-kettle by pentaho.
the class JobEntryZipFileIT method processFile_ReturnsTrue_OnSuccess.
@Test
public void processFile_ReturnsTrue_OnSuccess() throws Exception {
final String zipPath = createTempZipFileName("pdi-15013");
final String content = "temp file";
final File tempFile = createTempFile(content);
tempFile.deleteOnExit();
try {
Result result = new Result();
JobEntryZipFile entry = new JobEntryZipFile();
assertTrue(entry.processRowFile(new Job(), result, zipPath, null, null, tempFile.getAbsolutePath(), null, false));
boolean isTrue = true;
FileObject zip = KettleVFS.getFileObject(zipPath);
assertTrue("Zip archive should be created", zip.exists());
ByteArrayOutputStream os = new ByteArrayOutputStream();
IOUtils.copy(zip.getContent().getInputStream(), os);
ZipInputStream zis = new ZipInputStream(new ByteArrayInputStream(os.toByteArray()));
ZipEntry zipEntry = zis.getNextEntry();
assertEquals("Input file should be put into the archive", tempFile.getName(), zipEntry.getName());
os.reset();
IOUtils.copy(zis, os);
assertEquals("File's content should be equal to original", content, new String(os.toByteArray()));
} finally {
tempFile.delete();
File tempZipFile = new File(zipPath);
tempZipFile.delete();
}
}
use of org.apache.commons.vfs2.FileObject in project pentaho-kettle by pentaho.
the class ResourceUtil method serializeResourceExportInterface.
/**
* Serializes the referenced resource export interface (Job, Transformation, Mapping, Step, Job Entry, etc) to a ZIP
* file.
*
* @param zipFilename
* The ZIP file to put the content in
* @param resourceExportInterface
* the interface to serialize
* @param space
* the space to use for variable replacement
* @param repository
* the repository to load objects from (or null if not used)
* @param injectXML
* The XML to inject into the resulting ZIP archive (optional, can be null)
* @param injectFilename
* The name of the file for the XML to inject in the ZIP archive (optional, can be null)
* @return The full VFS filename reference to the serialized export interface XML file in the ZIP archive.
* @throws KettleException
* in case anything goes wrong during serialization
*/
public static final TopLevelResource serializeResourceExportInterface(String zipFilename, ResourceExportInterface resourceExportInterface, VariableSpace space, Repository repository, IMetaStore metaStore, String injectXML, String injectFilename) throws KettleException {
ZipOutputStream out = null;
try {
Map<String, ResourceDefinition> definitions = new HashMap<String, ResourceDefinition>();
//
if (injectXML != null) {
ResourceDefinition resourceDefinition = new ResourceDefinition(injectFilename, injectXML);
definitions.put(injectFilename, resourceDefinition);
}
ResourceNamingInterface namingInterface = new SequenceResourceNaming();
String topLevelResource = resourceExportInterface.exportResources(space, definitions, namingInterface, repository, metaStore);
if (topLevelResource != null && !definitions.isEmpty()) {
// Create the ZIP file...
//
FileObject fileObject = KettleVFS.getFileObject(zipFilename, space);
// Store the XML in the definitions in a ZIP file...
//
out = new ZipOutputStream(KettleVFS.getOutputStream(fileObject, false));
for (Map.Entry<String, ResourceDefinition> entry : definitions.entrySet()) {
String filename = entry.getKey();
ResourceDefinition resourceDefinition = entry.getValue();
ZipEntry zipEntry = new ZipEntry(resourceDefinition.getFilename());
String comment = BaseMessages.getString(PKG, "ResourceUtil.SerializeResourceExportInterface.ZipEntryComment.OriginatingFile", filename, Const.NVL(resourceDefinition.getOrigin(), "-"));
zipEntry.setComment(comment);
out.putNextEntry(zipEntry);
out.write(resourceDefinition.getContent().getBytes());
out.closeEntry();
}
String zipURL = fileObject.getName().toString();
return new TopLevelResource(topLevelResource, zipURL, "zip:" + zipURL + "!" + topLevelResource);
} else {
throw new KettleException(BaseMessages.getString(PKG, "ResourceUtil.Exception.NoResourcesFoundToExport"));
}
} catch (Exception e) {
throw new KettleException(BaseMessages.getString(PKG, "ResourceUtil.Exception.ErrorSerializingExportInterface", resourceExportInterface.toString()), e);
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
throw new KettleException(BaseMessages.getString(PKG, "ResourceUtil.Exception.ErrorClosingZipStream", zipFilename));
}
}
}
}
Aggregations