use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntryMssqlBulkLoad method loadRep.
public void loadRep(Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException {
try {
schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname");
tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename");
filename = rep.getJobEntryAttributeString(id_jobentry, "filename");
datafiletype = rep.getJobEntryAttributeString(id_jobentry, "datafiletype");
fieldterminator = rep.getJobEntryAttributeString(id_jobentry, "fieldterminator");
lineterminated = rep.getJobEntryAttributeString(id_jobentry, "lineterminated");
codepage = rep.getJobEntryAttributeString(id_jobentry, "codepage");
specificcodepage = rep.getJobEntryAttributeString(id_jobentry, "specificcodepage");
formatfilename = rep.getJobEntryAttributeString(id_jobentry, "formatfilename");
firetriggers = rep.getJobEntryAttributeBoolean(id_jobentry, "firetriggers");
checkconstraints = rep.getJobEntryAttributeBoolean(id_jobentry, "checkconstraints");
keepnulls = rep.getJobEntryAttributeBoolean(id_jobentry, "keepnulls");
keepidentity = rep.getJobEntryAttributeBoolean(id_jobentry, "keepidentity");
tablock = rep.getJobEntryAttributeBoolean(id_jobentry, "tablock");
startfile = (int) rep.getJobEntryAttributeInteger(id_jobentry, "startfile");
endfile = (int) rep.getJobEntryAttributeInteger(id_jobentry, "endfile");
orderby = rep.getJobEntryAttributeString(id_jobentry, "orderby");
orderdirection = rep.getJobEntryAttributeString(id_jobentry, "orderdirection");
errorfilename = rep.getJobEntryAttributeString(id_jobentry, "errorfilename");
maxerrors = (int) rep.getJobEntryAttributeInteger(id_jobentry, "maxerrors");
batchsize = (int) rep.getJobEntryAttributeInteger(id_jobentry, "batchsize");
rowsperbatch = (int) rep.getJobEntryAttributeInteger(id_jobentry, "rowsperbatch");
adddatetime = rep.getJobEntryAttributeBoolean(id_jobentry, "adddatetime");
addfiletoresult = rep.getJobEntryAttributeBoolean(id_jobentry, "addfiletoresult");
truncate = rep.getJobEntryAttributeBoolean(id_jobentry, "truncate");
connection = rep.loadDatabaseMetaFromJobEntryAttribute(id_jobentry, "connection", "id_database", databases);
} catch (KettleDatabaseException dbe) {
throw new KettleException("Unable to load job entry of type 'MSsql bulk load' from the repository for id_jobentry=" + id_jobentry, dbe);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntryMysqlBulkFile method loadRep.
public void loadRep(Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException {
try {
schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname");
tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename");
filename = rep.getJobEntryAttributeString(id_jobentry, "filename");
separator = rep.getJobEntryAttributeString(id_jobentry, "separator");
enclosed = rep.getJobEntryAttributeString(id_jobentry, "enclosed");
lineterminated = rep.getJobEntryAttributeString(id_jobentry, "lineterminated");
limitlines = rep.getJobEntryAttributeString(id_jobentry, "limitlines");
listcolumn = rep.getJobEntryAttributeString(id_jobentry, "listcolumn");
highpriority = rep.getJobEntryAttributeBoolean(id_jobentry, "highpriority");
optionenclosed = rep.getJobEntryAttributeBoolean(id_jobentry, "optionenclosed");
outdumpvalue = (int) rep.getJobEntryAttributeInteger(id_jobentry, "outdumpvalue");
iffileexists = (int) rep.getJobEntryAttributeInteger(id_jobentry, "iffileexists");
addfiletoresult = rep.getJobEntryAttributeBoolean(id_jobentry, "addfiletoresult");
connection = rep.loadDatabaseMetaFromJobEntryAttribute(id_jobentry, "connection", "id_database", databases);
} catch (KettleDatabaseException dbe) {
throw new KettleException("Unable to load job entry of type 'table exists' from the repository for id_jobentry=" + id_jobentry, dbe);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntryMysqlBulkFile method execute.
public Result execute(Result previousResult, int nr) {
String LimitNbrLignes = "";
String ListOfColumn = "*";
String strHighPriority = "";
String OutDumpText = "";
String OptionEnclosed = "";
String FieldSeparator = "";
String LinesTerminated = "";
Result result = previousResult;
result.setResult(false);
// Let's check the filename ...
if (filename != null) {
// User has specified a file, We can continue ...
String realFilename = getRealFilename();
File file = new File(realFilename);
if (file.exists() && iffileexists == 2) {
// the file exists and user want to Fail
result.setResult(false);
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists1.Label") + realFilename + BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists2.Label"));
} else if (file.exists() && iffileexists == 1) {
// the file exists and user want to do nothing
result.setResult(true);
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists1.Label") + realFilename + BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists2.Label"));
}
} else {
if (file.exists() && iffileexists == 0) {
// File exists and user want to renamme it with unique name
// Format Date
// Try to clean filename (without wildcard)
String wildcard = realFilename.substring(realFilename.length() - 4, realFilename.length());
if (wildcard.substring(0, 1).equals(".")) {
// Find wildcard
realFilename = realFilename.substring(0, realFilename.length() - 4) + "_" + StringUtil.getFormattedDateTimeNow(true) + wildcard;
} else {
// did not find wildcard
realFilename = realFilename + "_" + StringUtil.getFormattedDateTimeNow(true);
}
logDebug(BaseMessages.getString(PKG, "JobMysqlBulkFile.FileNameChange1.Label") + realFilename + BaseMessages.getString(PKG, "JobMysqlBulkFile.FileNameChange1.Label"));
}
// User has specified an existing file, We can continue ...
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists1.Label") + realFilename + BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists2.Label"));
}
if (connection != null) {
// User has specified a connection, We can continue ...
Database db = new Database(this, connection);
db.shareVariablesWith(this);
try {
db.connect(parentJob.getTransactionId(), null);
// Get schemaname
String realSchemaname = environmentSubstitute(schemaname);
// Get tablename
String realTablename = environmentSubstitute(tablename);
if (db.checkTableExists(realTablename)) {
// The table existe, We can continue ...
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobMysqlBulkFile.TableExists1.Label") + realTablename + BaseMessages.getString(PKG, "JobMysqlBulkFile.TableExists2.Label"));
}
// Add schemaname (Most the time Schemaname.Tablename)
if (schemaname != null) {
realTablename = realSchemaname + "." + realTablename;
}
// Set the Limit lines
if (Const.toInt(getRealLimitlines(), 0) > 0) {
LimitNbrLignes = "LIMIT " + getRealLimitlines();
}
// Set list of Column, if null get all columns (*)
if (getRealListColumn() != null) {
ListOfColumn = MysqlString(getRealListColumn());
}
// Fields separator
if (getRealSeparator() != null && outdumpvalue == 0) {
FieldSeparator = "FIELDS TERMINATED BY '" + Const.replace(getRealSeparator(), "'", "''") + "'";
}
// Lines Terminated by
if (getRealLineterminated() != null && outdumpvalue == 0) {
LinesTerminated = "LINES TERMINATED BY '" + Const.replace(getRealLineterminated(), "'", "''") + "'";
}
// High Priority ?
if (isHighPriority()) {
strHighPriority = "HIGH_PRIORITY";
}
if (getRealEnclosed() != null && outdumpvalue == 0) {
if (isOptionEnclosed()) {
OptionEnclosed = "OPTIONALLY ";
}
OptionEnclosed = OptionEnclosed + "ENCLOSED BY '" + Const.replace(getRealEnclosed(), "'", "''") + "'";
}
// OutFile or Dumpfile
if (outdumpvalue == 0) {
OutDumpText = "INTO OUTFILE";
} else {
OutDumpText = "INTO DUMPFILE";
}
String FILEBulkFile = "SELECT " + strHighPriority + " " + ListOfColumn + " " + OutDumpText + " '" + realFilename + "' " + FieldSeparator + " " + OptionEnclosed + " " + LinesTerminated + " FROM " + realTablename + " " + LimitNbrLignes + " LOCK IN SHARE MODE";
try {
if (log.isDetailed()) {
logDetailed(FILEBulkFile);
}
// Run the SQL
PreparedStatement ps = db.prepareSQL(FILEBulkFile);
ps.execute();
// Everything is OK...we can disconnect now
db.disconnect();
if (isAddFileToResult()) {
// Add filename to output files
ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(realFilename, this), parentJob.getJobname(), toString());
result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
}
result.setResult(true);
} catch (SQLException je) {
db.disconnect();
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.Error.Label") + " " + je.getMessage());
} catch (KettleFileException e) {
logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.Error.Label") + e.getMessage());
result.setNrErrors(1);
}
} else {
// Of course, the table should have been created already before the bulk load operation
db.disconnect();
result.setNrErrors(1);
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "JobMysqlBulkFile.TableNotExists1.Label") + realTablename + BaseMessages.getString(PKG, "JobMysqlBulkFile.TableNotExists2.Label"));
}
}
} catch (KettleDatabaseException dbe) {
db.disconnect();
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.Error.Label") + " " + dbe.getMessage());
}
} else {
// No database connection is defined
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.Nodatabase.Label"));
}
}
} else {
// No file was specified
result.setNrErrors(1);
logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.Nofilename.Label"));
}
return result;
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntryMysqlBulkLoad method saveRep.
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
try {
rep.saveJobEntryAttribute(id_job, getObjectId(), "schemaname", schemaname);
rep.saveJobEntryAttribute(id_job, getObjectId(), "tablename", tablename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "filename", filename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "separator", separator);
rep.saveJobEntryAttribute(id_job, getObjectId(), "enclosed", enclosed);
rep.saveJobEntryAttribute(id_job, getObjectId(), "escaped", escaped);
rep.saveJobEntryAttribute(id_job, getObjectId(), "linestarted", linestarted);
rep.saveJobEntryAttribute(id_job, getObjectId(), "lineterminated", lineterminated);
rep.saveJobEntryAttribute(id_job, getObjectId(), "replacedata", replacedata);
rep.saveJobEntryAttribute(id_job, getObjectId(), "ignorelines", ignorelines);
rep.saveJobEntryAttribute(id_job, getObjectId(), "listattribut", listattribut);
rep.saveJobEntryAttribute(id_job, getObjectId(), "localinfile", localinfile);
rep.saveJobEntryAttribute(id_job, getObjectId(), "prorityvalue", prorityvalue);
rep.saveJobEntryAttribute(id_job, getObjectId(), "addfiletoresult", addfiletoresult);
rep.saveDatabaseMetaJobEntryAttribute(id_job, getObjectId(), "connection", "id_database", connection);
} catch (KettleDatabaseException dbe) {
throw new KettleException("Unable to load job entry of type 'Mysql Bulk Load' to the repository for id_job=" + id_job, dbe);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class KettleDatabaseRepositoryNotePadDelegate method loadNotePadMeta.
public NotePadMeta loadNotePadMeta(ObjectId id_note) throws KettleException {
NotePadMeta note = new NotePadMeta();
try {
note.setObjectId(id_note);
RowMetaAndData r = getNote(id_note);
if (r != null) {
note.setNote(r.getString("VALUE_STR", ""));
int x = (int) r.getInteger("GUI_LOCATION_X", 0L);
int y = (int) r.getInteger("GUI_LOCATION_Y", 0L);
note.setLocation(new Point(x, y));
note.setWidth((int) r.getInteger("GUI_LOCATION_WIDTH", 0L));
note.setHeight((int) r.getInteger("GUI_LOCATION_HEIGHT", 0L));
note.setSelected(false);
// Font
note.setFontName(r.getString("FONT_NAME", null));
note.setFontSize((int) r.getInteger("FONT_SIZE", -1));
note.setFontBold(r.getBoolean("FONT_BOLD", false));
note.setFontItalic(r.getBoolean("FONT_ITALIC", false));
// Font color
note.setFontColorRed((int) r.getInteger("FONT_COLOR_RED", NotePadMeta.COLOR_RGB_BLACK_BLUE));
note.setFontColorGreen((int) r.getInteger("FONT_COLOR_GREEN", NotePadMeta.COLOR_RGB_BLACK_GREEN));
note.setFontColorBlue((int) r.getInteger("FONT_COLOR_BLUE", NotePadMeta.COLOR_RGB_BLACK_BLUE));
// Background color
note.setBackGroundColorRed((int) r.getInteger("FONT_BACK_GROUND_COLOR_RED", NotePadMeta.COLOR_RGB_DEFAULT_BG_RED));
note.setBackGroundColorGreen((int) r.getInteger("FONT_BACK_GROUND_COLOR_GREEN", NotePadMeta.COLOR_RGB_DEFAULT_BG_GREEN));
note.setBackGroundColorBlue((int) r.getInteger("FONT_BACK_GROUND_COLOR_BLUE", NotePadMeta.COLOR_RGB_DEFAULT_BG_BLUE));
// Border color
note.setBorderColorRed((int) r.getInteger("FONT_BORDER_COLOR_RED", NotePadMeta.COLOR_RGB_DEFAULT_BORDER_RED));
note.setBorderColorGreen((int) r.getInteger("FONT_BORDER_COLOR_GREEN", NotePadMeta.COLOR_RGB_DEFAULT_BORDER_GREEN));
note.setBorderColorBlue((int) r.getInteger("FONT_BORDER_COLOR_BLUE", NotePadMeta.COLOR_RGB_DEFAULT_BORDER_BLUE));
note.setDrawShadow(r.getBoolean("DRAW_SHADOW", true));
// Done!
return note;
} else {
note.setObjectId(null);
throw new KettleException("I couldn't find Notepad with id_note=" + id_note + " in the repository.");
}
} catch (KettleDatabaseException dbe) {
note.setObjectId(null);
throw new KettleException("Unable to load Notepad from repository (id_note=" + id_note + ")", dbe);
}
}
Aggregations