use of org.pentaho.di.core.exception.KettleFileException in project pentaho-kettle by pentaho.
the class BaseStreamingDialog method createNewSubtrans.
protected void createNewSubtrans() {
TransMeta newSubTransMeta = createSubTransMeta();
boolean saved = false;
String path = null;
if (spoonInstance.getRepository() != null) {
try {
saved = spoonInstance.saveToRepository(newSubTransMeta);
path = getRepositoryRelativePath(newSubTransMeta.getPathAndName());
} catch (KettleException e) {
new ErrorDialog(shell, BaseMessages.getString(PKG, "BaseStreamingDialog.File.Save.Fail.Title"), BaseMessages.getString(PKG, "BaseStreamingDialog.File.Save.Fail.Message"), e);
}
} else {
saved = spoonInstance.saveXMLFile(newSubTransMeta, false);
if (saved) {
try {
path = getRelativePath(KettleVFS.getFileObject(newSubTransMeta.getFilename()).toString());
} catch (KettleFileException e) {
new ErrorDialog(shell, BaseMessages.getString(PKG, "BaseStreamingDialog.File.Save.Fail.Title"), BaseMessages.getString(PKG, "BaseStreamingDialog.File.Save.Fail.Message"), e);
}
}
}
if (saved && null != path) {
wTransPath.setText(path);
createSubtrans(newSubTransMeta);
if (props.showNewSubtransPopup()) {
NewSubtransDialog newSubtransDialog = new NewSubtransDialog(shell, SWT.NONE);
props.setShowNewSubtransPopup(!newSubtransDialog.open());
}
}
}
use of org.pentaho.di.core.exception.KettleFileException in project pentaho-kettle by pentaho.
the class JobMeta method exportResources.
public String exportResources(VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface namingInterface, Repository repository, IMetaStore metaStore) throws KettleException {
String resourceName = null;
try {
// Handle naming for both repository and XML bases resources...
//
String baseName;
String originalPath;
String fullname;
String extension = "kjb";
if (Utils.isEmpty(getFilename())) {
// Assume repository...
//
originalPath = directory.getPath();
baseName = getName();
fullname = directory.getPath() + (directory.getPath().endsWith(RepositoryDirectory.DIRECTORY_SEPARATOR) ? "" : RepositoryDirectory.DIRECTORY_SEPARATOR) + getName() + "." + //
extension;
} else {
// Assume file
//
FileObject fileObject = KettleVFS.getFileObject(space.environmentSubstitute(getFilename()), space);
originalPath = fileObject.getParent().getName().getPath();
baseName = fileObject.getName().getBaseName();
fullname = fileObject.getName().getPath();
}
resourceName = namingInterface.nameResource(baseName, originalPath, extension, ResourceNamingInterface.FileNamingType.JOB);
ResourceDefinition definition = definitions.get(resourceName);
if (definition == null) {
// If we do this once, it will be plenty :-)
//
JobMeta jobMeta = (JobMeta) this.realClone(false);
// All objects get re-located to the root folder,
// but, when exporting, we need to see current directory
// in order to make 'Internal.Entry.Current.Directory' variable work
jobMeta.setRepositoryDirectory(directory);
//
for (JobEntryCopy jobEntry : jobMeta.jobcopies) {
compatibleJobEntryExportResources(jobEntry.getEntry(), jobMeta, definitions, namingInterface, repository);
jobEntry.getEntry().exportResources(jobMeta, definitions, namingInterface, repository, metaStore);
}
// Set a number of parameters for all the data files referenced so far...
//
Map<String, String> directoryMap = namingInterface.getDirectoryMap();
if (directoryMap != null) {
for (String directory : directoryMap.keySet()) {
String parameterName = directoryMap.get(directory);
jobMeta.addParameterDefinition(parameterName, directory, "Data file path discovered during export");
}
}
// At the end, add ourselves to the map...
//
String jobMetaContent = jobMeta.getXML();
definition = new ResourceDefinition(resourceName, jobMetaContent);
//
if (Utils.isEmpty(this.getFilename())) {
// Repository
definition.setOrigin(fullname);
} else {
definition.setOrigin(this.getFilename());
}
definitions.put(fullname, definition);
}
} catch (FileSystemException e) {
throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", getFilename()), e);
} catch (KettleFileException e) {
throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", getFilename()), e);
}
return resourceName;
}
use of org.pentaho.di.core.exception.KettleFileException in project pentaho-kettle by pentaho.
the class TransMeta method exportResources.
/**
* Exports the specified objects to a flat-file system, adding content with filename keys to a set of definitions. The
* supplied resource naming interface allows the object to name appropriately without worrying about those parts of
* the implementation specific details.
*
* @param space
* the variable space to use
* @param definitions
* @param resourceNamingInterface
* @param repository
* The repository to optionally load other resources from (to be converted to XML)
* @param metaStore
* the metaStore in which non-kettle metadata could reside.
*
* @return the filename of the exported resource
*/
@Override
public String exportResources(VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore) throws KettleException {
try {
// Handle naming for both repository and XML bases resources...
//
String baseName;
String originalPath;
String fullname;
String extension = "ktr";
if (Utils.isEmpty(getFilename())) {
// Assume repository...
//
originalPath = directory.getPath();
baseName = getName();
fullname = directory.getPath() + (directory.getPath().endsWith(RepositoryDirectory.DIRECTORY_SEPARATOR) ? "" : RepositoryDirectory.DIRECTORY_SEPARATOR) + getName() + "." + //
extension;
} else {
// Assume file
//
FileObject fileObject = KettleVFS.getFileObject(space.environmentSubstitute(getFilename()), space);
originalPath = fileObject.getParent().getURL().toString();
baseName = fileObject.getName().getBaseName();
fullname = fileObject.getURL().toString();
}
String exportFileName = resourceNamingInterface.nameResource(baseName, originalPath, extension, ResourceNamingInterface.FileNamingType.TRANSFORMATION);
ResourceDefinition definition = definitions.get(exportFileName);
if (definition == null) {
// If we do this once, it will be plenty :-)
//
TransMeta transMeta = (TransMeta) this.realClone(false);
//
for (StepMeta stepMeta : transMeta.getSteps()) {
stepMeta.exportResources(space, definitions, resourceNamingInterface, repository, metaStore);
}
// Change the filename, calling this sets internal variables
// inside of the transformation.
//
transMeta.setFilename(exportFileName);
// All objects get re-located to the root folder
//
transMeta.setRepositoryDirectory(new RepositoryDirectory());
// Set a number of parameters for all the data files referenced so far...
//
Map<String, String> directoryMap = resourceNamingInterface.getDirectoryMap();
if (directoryMap != null) {
for (String directory : directoryMap.keySet()) {
String parameterName = directoryMap.get(directory);
transMeta.addParameterDefinition(parameterName, directory, "Data file path discovered during export");
}
}
// At the end, add ourselves to the map...
//
String transMetaContent = transMeta.getXML();
definition = new ResourceDefinition(exportFileName, transMetaContent);
//
if (Utils.isEmpty(this.getFilename())) {
// Repository
definition.setOrigin(fullname);
} else {
definition.setOrigin(this.getFilename());
}
definitions.put(fullname, definition);
}
return exportFileName;
} catch (FileSystemException e) {
throw new KettleException(BaseMessages.getString(PKG, "TransMeta.Exception.ErrorOpeningOrValidatingTheXMLFile", getFilename()), e);
} catch (KettleFileException e) {
throw new KettleException(BaseMessages.getString(PKG, "TransMeta.Exception.ErrorOpeningOrValidatingTheXMLFile", getFilename()), e);
}
}
use of org.pentaho.di.core.exception.KettleFileException in project pentaho-kettle by pentaho.
the class JobEntryFileCompare method equalFileContents.
/**
* Check whether 2 files have the same contents.
*
* @param file1
* first file to compare
* @param file2
* second file to compare
* @return true if files are equal, false if they are not
*
* @throws IOException
* upon IO problems
*/
protected boolean equalFileContents(FileObject file1, FileObject file2) throws KettleFileException {
// Really read the contents and do comparisons
DataInputStream in1 = null;
DataInputStream in2 = null;
try {
in1 = new DataInputStream(new BufferedInputStream(KettleVFS.getInputStream(KettleVFS.getFilename(file1), this)));
in2 = new DataInputStream(new BufferedInputStream(KettleVFS.getInputStream(KettleVFS.getFilename(file2), this)));
char ch1, ch2;
while (in1.available() != 0 && in2.available() != 0) {
ch1 = (char) in1.readByte();
ch2 = (char) in2.readByte();
if (ch1 != ch2) {
return false;
}
}
if (in1.available() != in2.available()) {
return false;
} else {
return true;
}
} catch (IOException e) {
throw new KettleFileException(e);
} finally {
if (in1 != null) {
try {
in1.close();
} catch (IOException ignored) {
// Nothing to do here
}
}
if (in2 != null) {
try {
in2.close();
} catch (IOException ignored) {
// Nothing to see here...
}
}
}
}
use of org.pentaho.di.core.exception.KettleFileException in project pentaho-kettle by pentaho.
the class JobEntryMysqlBulkFile method execute.
public Result execute(Result previousResult, int nr) {
String LimitNbrLignes = "";
String ListOfColumn = "*";
String strHighPriority = "";
String OutDumpText = "";
String OptionEnclosed = "";
String FieldSeparator = "";
String LinesTerminated = "";
Result result = previousResult;
result.setResult(false);
// Let's check the filename ...
if (filename != null) {
// User has specified a file, We can continue ...
String realFilename = getRealFilename();
File file = new File(realFilename);
if (file.exists() && iffileexists == 2) {
// the file exists and user want to Fail
result.setResult(false);
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists1.Label") + realFilename + BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists2.Label"));
} else if (file.exists() && iffileexists == 1) {
// the file exists and user want to do nothing
result.setResult(true);
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists1.Label") + realFilename + BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists2.Label"));
}
} else {
if (file.exists() && iffileexists == 0) {
// File exists and user want to renamme it with unique name
// Format Date
// Try to clean filename (without wildcard)
String wildcard = realFilename.substring(realFilename.length() - 4, realFilename.length());
if (wildcard.substring(0, 1).equals(".")) {
// Find wildcard
realFilename = realFilename.substring(0, realFilename.length() - 4) + "_" + StringUtil.getFormattedDateTimeNow(true) + wildcard;
} else {
// did not find wildcard
realFilename = realFilename + "_" + StringUtil.getFormattedDateTimeNow(true);
}
logDebug(BaseMessages.getString(PKG, "JobMysqlBulkFile.FileNameChange1.Label") + realFilename + BaseMessages.getString(PKG, "JobMysqlBulkFile.FileNameChange1.Label"));
}
// User has specified an existing file, We can continue ...
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists1.Label") + realFilename + BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists2.Label"));
}
if (connection != null) {
// User has specified a connection, We can continue ...
Database db = new Database(this, connection);
db.shareVariablesWith(this);
try {
db.connect(parentJob.getTransactionId(), null);
// Get schemaname
String realSchemaname = environmentSubstitute(schemaname);
// Get tablename
String realTablename = environmentSubstitute(tablename);
if (db.checkTableExists(realTablename)) {
// The table existe, We can continue ...
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobMysqlBulkFile.TableExists1.Label") + realTablename + BaseMessages.getString(PKG, "JobMysqlBulkFile.TableExists2.Label"));
}
// Add schemaname (Most the time Schemaname.Tablename)
if (schemaname != null) {
realTablename = realSchemaname + "." + realTablename;
}
// Set the Limit lines
if (Const.toInt(getRealLimitlines(), 0) > 0) {
LimitNbrLignes = "LIMIT " + getRealLimitlines();
}
// Set list of Column, if null get all columns (*)
if (getRealListColumn() != null) {
ListOfColumn = MysqlString(getRealListColumn());
}
// Fields separator
if (getRealSeparator() != null && outdumpvalue == 0) {
FieldSeparator = "FIELDS TERMINATED BY '" + Const.replace(getRealSeparator(), "'", "''") + "'";
}
// Lines Terminated by
if (getRealLineterminated() != null && outdumpvalue == 0) {
LinesTerminated = "LINES TERMINATED BY '" + Const.replace(getRealLineterminated(), "'", "''") + "'";
}
// High Priority ?
if (isHighPriority()) {
strHighPriority = "HIGH_PRIORITY";
}
if (getRealEnclosed() != null && outdumpvalue == 0) {
if (isOptionEnclosed()) {
OptionEnclosed = "OPTIONALLY ";
}
OptionEnclosed = OptionEnclosed + "ENCLOSED BY '" + Const.replace(getRealEnclosed(), "'", "''") + "'";
}
// OutFile or Dumpfile
if (outdumpvalue == 0) {
OutDumpText = "INTO OUTFILE";
} else {
OutDumpText = "INTO DUMPFILE";
}
String FILEBulkFile = "SELECT " + strHighPriority + " " + ListOfColumn + " " + OutDumpText + " '" + realFilename + "' " + FieldSeparator + " " + OptionEnclosed + " " + LinesTerminated + " FROM " + realTablename + " " + LimitNbrLignes + " LOCK IN SHARE MODE";
try {
if (log.isDetailed()) {
logDetailed(FILEBulkFile);
}
// Run the SQL
PreparedStatement ps = db.prepareSQL(FILEBulkFile);
ps.execute();
// Everything is OK...we can disconnect now
db.disconnect();
if (isAddFileToResult()) {
// Add filename to output files
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(realFilename, this), parentJob.getJobname(), toString());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
}
result.setResult(true);
} catch (SQLException je) {
db.disconnect();
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.Error.Label") + " " + je.getMessage());
} catch (KettleFileException e) {
logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.Error.Label") + e.getMessage());
result.setNrErrors(1);
}
} else {
// Of course, the table should have been created already before the bulk load operation
db.disconnect();
result.setNrErrors(1);
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobMysqlBulkFile.TableNotExists1.Label") + realTablename + BaseMessages.getString(PKG, "JobMysqlBulkFile.TableNotExists2.Label"));
}
}
} catch (KettleDatabaseException dbe) {
db.disconnect();
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.Error.Label") + " " + dbe.getMessage());
}
} else {
// No database connection is defined
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.Nodatabase.Label"));
}
}
} else {
// No file was specified
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.Nofilename.Label"));
}
return result;
}
Aggregations