use of org.pentaho.di.core.exception.KettleFileException in project pentaho-kettle by pentaho.
the class KettleVFS method getTextFileContent.
public static String getTextFileContent(String vfsFilename, VariableSpace space, String charSetName) throws KettleFileException {
try {
InputStream inputStream = null;
if (space == null) {
inputStream = getInputStream(vfsFilename);
} else {
inputStream = getInputStream(vfsFilename, space);
}
InputStreamReader reader = new InputStreamReader(inputStream, charSetName);
int c;
StringBuilder aBuffer = new StringBuilder();
while ((c = reader.read()) != -1) {
aBuffer.append((char) c);
}
reader.close();
inputStream.close();
return aBuffer.toString();
} catch (IOException e) {
throw new KettleFileException(e);
}
}
use of org.pentaho.di.core.exception.KettleFileException in project pentaho-kettle by pentaho.
the class DBCache method saveCache.
public void saveCache() throws KettleFileException {
try {
// Serialization support for the DB cache
//
String filename = getFilename();
File file = new File(filename);
if (!file.exists() || file.canWrite()) {
FileOutputStream fos = null;
DataOutputStream dos = null;
try {
fos = new FileOutputStream(file);
dos = new DataOutputStream(new BufferedOutputStream(fos, 10000));
int counter = 0;
boolean ok = true;
Enumeration<DBCacheEntry> keys = cache.keys();
while (ok && keys.hasMoreElements()) {
// Save the database cache entry
DBCacheEntry entry = keys.nextElement();
entry.write(dos);
// Save the corresponding row as well.
RowMetaInterface rowMeta = get(entry);
if (rowMeta != null) {
rowMeta.writeMeta(dos);
counter++;
} else {
throw new KettleFileException("The database cache contains an empty row. We can't save this!");
}
}
log.logDetailed("We wrote " + counter + " cached rows to the database cache!");
} catch (Exception e) {
throw new Exception(e);
} finally {
if (dos != null) {
dos.close();
}
}
} else {
throw new KettleFileException("We can't write to the cache file: " + filename);
}
} catch (Exception e) {
throw new KettleFileException("Couldn't write to the database cache", e);
}
}
use of org.pentaho.di.core.exception.KettleFileException in project pentaho-kettle by pentaho.
the class JobEntryZipFile method determineZipfilenameForDepth.
/**
* Get the requested part of the filename
*
* @param filename
* the filename (full) (/path/to/a/file.txt)
* @param depth
* the depth to get. 0 means: the complete filename, 1: the name only (file.txt), 2: one folder (a/file.txt)
* 3: two folders (to/a/file.txt) and so on.
* @return the requested part of the file name up to a certain depth
* @throws KettleFileException
*/
private String determineZipfilenameForDepth(String filename, int depth) throws KettleException {
try {
if (Utils.isEmpty(filename)) {
return null;
}
if (depth == 0) {
return filename;
}
FileObject fileObject = KettleVFS.getFileObject(filename, this);
FileObject folder = fileObject.getParent();
String baseName = fileObject.getName().getBaseName();
if (depth == 1) {
return baseName;
}
StringBuilder path = new StringBuilder(baseName);
int d = 1;
while (d < depth && folder != null) {
path.insert(0, '/');
path.insert(0, folder.getName().getBaseName());
folder = folder.getParent();
d++;
}
return path.toString();
} catch (Exception e) {
throw new KettleException("Unable to get zip filename '" + filename + "' to depth " + depth, e);
}
}
use of org.pentaho.di.core.exception.KettleFileException in project pentaho-kettle by pentaho.
the class JobEntryMssqlBulkLoad method execute.
public Result execute(Result previousResult, int nr) {
String TakeFirstNbrLines = "";
String LineTerminatedby = "";
String FieldTerminatedby = "";
boolean useFieldSeparator = false;
String UseCodepage = "";
String ErrorfileName = "";
Result result = previousResult;
result.setResult(false);
String vfsFilename = environmentSubstitute(filename);
FileObject fileObject = null;
// Let's check the filename ...
if (!Utils.isEmpty(vfsFilename)) {
try {
// User has specified a file, We can continue ...
//
// This is running over VFS but we need a normal file.
// As such, we're going to verify that it's a local file...
// We're also going to convert VFS FileObject to File
//
fileObject = KettleVFS.getFileObject(vfsFilename, this);
if (!(fileObject instanceof LocalFile)) {
//
throw new KettleException(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.OnlyLocalFileSupported", vfsFilename));
}
// Convert it to a regular platform specific file name
//
String realFilename = KettleVFS.getFilename(fileObject);
// Here we go... back to the regular scheduled program...
//
File file = new File(realFilename);
if (file.exists() && file.canRead()) {
// User has specified an existing file, We can continue ...
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobMssqlBulkLoad.FileExists.Label", realFilename));
}
if (connection != null) {
// User has specified a connection, We can continue ...
Database db = new Database(this, connection);
if (!(db.getDatabaseMeta().getDatabaseInterface() instanceof MSSQLServerDatabaseMeta)) {
logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.DbNotMSSQL", connection.getDatabaseName()));
return result;
}
db.shareVariablesWith(this);
try {
db.connect(parentJob.getTransactionId(), null);
// Get schemaname
String realSchemaname = environmentSubstitute(schemaname);
// Get tablename
String realTablename = environmentSubstitute(tablename);
// Add schemaname (Most the time Schemaname.Tablename)
if (schemaname != null) {
realTablename = realSchemaname + "." + realTablename;
}
if (db.checkTableExists(realTablename)) {
// The table existe, We can continue ...
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobMssqlBulkLoad.TableExists.Label", realTablename));
}
// FIELDTERMINATOR
String Fieldterminator = getRealFieldTerminator();
if (Utils.isEmpty(Fieldterminator) && (datafiletype.equals("char") || datafiletype.equals("widechar"))) {
logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.FieldTerminatorMissing"));
return result;
} else {
if (datafiletype.equals("char") || datafiletype.equals("widechar")) {
useFieldSeparator = true;
FieldTerminatedby = "FIELDTERMINATOR='" + Fieldterminator + "'";
}
}
// Check Specific Code page
if (codepage.equals("Specific")) {
String realCodePage = environmentSubstitute(codepage);
if (specificcodepage.length() < 0) {
logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.SpecificCodePageMissing"));
return result;
} else {
UseCodepage = "CODEPAGE = '" + realCodePage + "'";
}
} else {
UseCodepage = "CODEPAGE = '" + codepage + "'";
}
// Check Error file
String realErrorFile = environmentSubstitute(errorfilename);
if (realErrorFile != null) {
File errorfile = new File(realErrorFile);
if (errorfile.exists() && !adddatetime) {
// The error file is created when the command is executed. An error occurs if the file already
// exists.
logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.ErrorFileExists"));
return result;
}
if (adddatetime) {
// Add date time to filename...
SimpleDateFormat daf = new SimpleDateFormat();
Date now = new Date();
daf.applyPattern("yyyMMdd_HHmmss");
String d = daf.format(now);
ErrorfileName = "ERRORFILE ='" + realErrorFile + "_" + d + "'";
} else {
ErrorfileName = "ERRORFILE ='" + realErrorFile + "'";
}
}
// ROWTERMINATOR
String Rowterminator = getRealLineterminated();
if (!Utils.isEmpty(Rowterminator)) {
LineTerminatedby = "ROWTERMINATOR='" + Rowterminator + "'";
}
// Start file at
if (startfile > 0) {
TakeFirstNbrLines = "FIRSTROW=" + startfile;
}
// End file at
if (endfile > 0) {
TakeFirstNbrLines = "LASTROW=" + endfile;
}
// Truncate table?
String SQLBULKLOAD = "";
if (truncate) {
SQLBULKLOAD = "TRUNCATE TABLE " + realTablename + ";";
}
// Build BULK Command
SQLBULKLOAD = SQLBULKLOAD + "BULK INSERT " + realTablename + " FROM " + "'" + realFilename.replace('\\', '/') + "'";
SQLBULKLOAD = SQLBULKLOAD + " WITH (";
if (useFieldSeparator) {
SQLBULKLOAD = SQLBULKLOAD + FieldTerminatedby;
} else {
SQLBULKLOAD = SQLBULKLOAD + "DATAFILETYPE ='" + datafiletype + "'";
}
if (LineTerminatedby.length() > 0) {
SQLBULKLOAD = SQLBULKLOAD + "," + LineTerminatedby;
}
if (TakeFirstNbrLines.length() > 0) {
SQLBULKLOAD = SQLBULKLOAD + "," + TakeFirstNbrLines;
}
if (UseCodepage.length() > 0) {
SQLBULKLOAD = SQLBULKLOAD + "," + UseCodepage;
}
String realFormatFile = environmentSubstitute(formatfilename);
if (realFormatFile != null) {
SQLBULKLOAD = SQLBULKLOAD + ", FORMATFILE='" + realFormatFile + "'";
}
if (firetriggers) {
SQLBULKLOAD = SQLBULKLOAD + ",FIRE_TRIGGERS";
}
if (keepnulls) {
SQLBULKLOAD = SQLBULKLOAD + ",KEEPNULLS";
}
if (keepidentity) {
SQLBULKLOAD = SQLBULKLOAD + ",KEEPIDENTITY";
}
if (checkconstraints) {
SQLBULKLOAD = SQLBULKLOAD + ",CHECK_CONSTRAINTS";
}
if (tablock) {
SQLBULKLOAD = SQLBULKLOAD + ",TABLOCK";
}
if (orderby != null) {
SQLBULKLOAD = SQLBULKLOAD + ",ORDER ( " + orderby + " " + orderdirection + ")";
}
if (ErrorfileName.length() > 0) {
SQLBULKLOAD = SQLBULKLOAD + ", " + ErrorfileName;
}
if (maxerrors > 0) {
SQLBULKLOAD = SQLBULKLOAD + ", MAXERRORS=" + maxerrors;
}
if (batchsize > 0) {
SQLBULKLOAD = SQLBULKLOAD + ", BATCHSIZE=" + batchsize;
}
if (rowsperbatch > 0) {
SQLBULKLOAD = SQLBULKLOAD + ", ROWS_PER_BATCH=" + rowsperbatch;
}
// End of Bulk command
SQLBULKLOAD = SQLBULKLOAD + ")";
try {
// Run the SQL
db.execStatement(SQLBULKLOAD);
// Everything is OK...we can disconnect now
db.disconnect();
if (isAddFileToResult()) {
// Add filename to output files
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(realFilename, this), parentJob.getJobname(), toString());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
}
result.setResult(true);
} catch (KettleDatabaseException je) {
result.setNrErrors(1);
logError("An error occurred executing this job entry : " + je.getMessage(), je);
} catch (KettleFileException e) {
logError("An error occurred executing this job entry : " + e.getMessage(), e);
result.setNrErrors(1);
} finally {
if (db != null) {
db.disconnect();
db = null;
}
}
} else {
// Of course, the table should have been created already before the bulk load operation
db.disconnect();
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.TableNotExists", realTablename));
}
} catch (KettleDatabaseException dbe) {
db.disconnect();
result.setNrErrors(1);
logError("An error occurred executing this entry: " + dbe.getMessage());
}
} else {
// No database connection is defined
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Nodatabase.Label"));
}
} else {
// the file doesn't exist
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Error.FileNotExists", realFilename));
}
} catch (Exception e) {
// An unexpected error occurred
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.UnexpectedError.Label"), e);
} finally {
try {
if (fileObject != null) {
fileObject.close();
}
} catch (Exception e) {
// Ignore errors
}
}
} else {
// No file was specified
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMssqlBulkLoad.Nofilename.Label"));
}
return result;
}
use of org.pentaho.di.core.exception.KettleFileException in project pentaho-kettle by pentaho.
the class JobEntryMysqlBulkLoad method execute.
public Result execute(Result previousResult, int nr) {
String ReplaceIgnore;
String IgnoreNbrLignes = "";
String ListOfColumn = "";
String LocalExec = "";
String PriorityText = "";
String LineTerminatedby = "";
String FieldTerminatedby = "";
Result result = previousResult;
result.setResult(false);
String vfsFilename = environmentSubstitute(filename);
// Let's check the filename ...
if (!Utils.isEmpty(vfsFilename)) {
try {
// User has specified a file, We can continue ...
//
// This is running over VFS but we need a normal file.
// As such, we're going to verify that it's a local file...
// We're also going to convert VFS FileObject to File
//
FileObject fileObject = KettleVFS.getFileObject(vfsFilename, this);
if (!(fileObject instanceof LocalFile)) {
//
throw new KettleException("Only local files are supported at this time, file [" + vfsFilename + "] is not a local file.");
}
// Convert it to a regular platform specific file name
//
String realFilename = KettleVFS.getFilename(fileObject);
// Here we go... back to the regular scheduled program...
//
File file = new File(realFilename);
if ((file.exists() && file.canRead()) || isLocalInfile() == false) {
// User has specified an existing file, We can continue ...
if (log.isDetailed()) {
logDetailed("File [" + realFilename + "] exists.");
}
if (connection != null) {
// User has specified a connection, We can continue ...
Database db = new Database(this, connection);
db.shareVariablesWith(this);
try {
db.connect(parentJob.getTransactionId(), null);
// Get schemaname
String realSchemaname = environmentSubstitute(schemaname);
// Get tablename
String realTablename = environmentSubstitute(tablename);
if (db.checkTableExists(realTablename)) {
// The table existe, We can continue ...
if (log.isDetailed()) {
logDetailed("Table [" + realTablename + "] exists.");
}
// Add schemaname (Most the time Schemaname.Tablename)
if (schemaname != null) {
realTablename = realSchemaname + "." + realTablename;
}
// Set the REPLACE or IGNORE
if (isReplacedata()) {
ReplaceIgnore = "REPLACE";
} else {
ReplaceIgnore = "IGNORE";
}
// Set the IGNORE LINES
if (Const.toInt(getRealIgnorelines(), 0) > 0) {
IgnoreNbrLignes = "IGNORE " + getRealIgnorelines() + " LINES";
}
// Set list of Column
if (getRealListattribut() != null) {
ListOfColumn = "(" + MysqlString(getRealListattribut()) + ")";
}
// Local File execution
if (isLocalInfile()) {
LocalExec = "LOCAL";
}
// Prority
if (prorityvalue == 1) {
// LOW
PriorityText = "LOW_PRIORITY";
} else if (prorityvalue == 2) {
// CONCURRENT
PriorityText = "CONCURRENT";
}
// Fields ....
if (getRealSeparator() != null || getRealEnclosed() != null || getRealEscaped() != null) {
FieldTerminatedby = "FIELDS ";
if (getRealSeparator() != null) {
FieldTerminatedby = FieldTerminatedby + "TERMINATED BY '" + Const.replace(getRealSeparator(), "'", "''") + "'";
}
if (getRealEnclosed() != null) {
FieldTerminatedby = FieldTerminatedby + " ENCLOSED BY '" + Const.replace(getRealEnclosed(), "'", "''") + "'";
}
if (getRealEscaped() != null) {
FieldTerminatedby = FieldTerminatedby + " ESCAPED BY '" + Const.replace(getRealEscaped(), "'", "''") + "'";
}
}
// LINES ...
if (getRealLinestarted() != null || getRealLineterminated() != null) {
LineTerminatedby = "LINES ";
// Line starting By
if (getRealLinestarted() != null) {
LineTerminatedby = LineTerminatedby + "STARTING BY '" + Const.replace(getRealLinestarted(), "'", "''") + "'";
}
// Line terminating By
if (getRealLineterminated() != null) {
LineTerminatedby = LineTerminatedby + " TERMINATED BY '" + Const.replace(getRealLineterminated(), "'", "''") + "'";
}
}
String SQLBULKLOAD = "LOAD DATA " + PriorityText + " " + LocalExec + " INFILE '" + realFilename.replace('\\', '/') + "' " + ReplaceIgnore + " INTO TABLE " + realTablename + " " + FieldTerminatedby + " " + LineTerminatedby + " " + IgnoreNbrLignes + " " + ListOfColumn + ";";
try {
// Run the SQL
db.execStatement(SQLBULKLOAD);
// Everything is OK...we can deconnect now
db.disconnect();
if (isAddFileToResult()) {
// Add zip filename to output files
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(realFilename, this), parentJob.getJobname(), toString());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
}
result.setResult(true);
} catch (KettleDatabaseException je) {
db.disconnect();
result.setNrErrors(1);
logError("An error occurred executing this job entry : " + je.getMessage());
} catch (KettleFileException e) {
logError("An error occurred executing this job entry : " + e.getMessage());
result.setNrErrors(1);
}
} else {
// Of course, the table should have been created already before the bulk load operation
db.disconnect();
result.setNrErrors(1);
if (log.isDetailed()) {
logDetailed("Table [" + realTablename + "] doesn't exist!");
}
}
} catch (KettleDatabaseException dbe) {
db.disconnect();
result.setNrErrors(1);
logError("An error occurred executing this entry: " + dbe.getMessage());
}
} else {
// No database connection is defined
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMysqlBulkLoad.Nodatabase.Label"));
}
} else {
// the file doesn't exist
result.setNrErrors(1);
logError("File [" + realFilename + "] doesn't exist!");
}
} catch (Exception e) {
// An unexpected error occurred
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMysqlBulkLoad.UnexpectedError.Label"), e);
}
} else {
// No file was specified
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMysqlBulkLoad.Nofilename.Label"));
}
return result;
}
Aggregations