use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class JobEntryMSAccessBulkLoad method saveRep.
public void saveRep(Repository rep, IMetaStore metaStore, ObjectId id_job) throws KettleException {
try {
rep.saveJobEntryAttribute(id_job, getObjectId(), "include_subfolders", include_subfolders);
rep.saveJobEntryAttribute(id_job, getObjectId(), "add_result_filenames", add_result_filenames);
rep.saveJobEntryAttribute(id_job, getObjectId(), "is_args_from_previous", is_args_from_previous);
rep.saveJobEntryAttribute(id_job, getObjectId(), "limit", limit);
rep.saveJobEntryAttribute(id_job, getObjectId(), "success_condition", success_condition);
// save the arguments...
if (source_filefolder != null) {
for (int i = 0; i < source_filefolder.length; i++) {
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "source_filefolder", source_filefolder[i]);
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "source_wildcard", source_wildcard[i]);
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "delimiter", delimiter[i]);
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "target_Db", target_Db[i]);
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "target_table", target_table[i]);
}
}
} catch (KettleDatabaseException dbe) {
throw new KettleException(BaseMessages.getString(PKG, "JobEntryMSAccessBulkLoad.Meta.UnableSave", "" + id_job, dbe.getMessage()), dbe);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class Database method closeProcedureStatement.
public void closeProcedureStatement() throws KettleDatabaseException {
// procedure call!
try {
if (cstmt != null) {
cstmt.close();
cstmt = null;
}
} catch (SQLException ex) {
throw new KettleDatabaseException(BaseMessages.getString(PKG, "Database.Exception.ErrorClosingCallableStatement"), ex);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class Database method closeInsert.
public void closeInsert() throws KettleDatabaseException {
if (prepStatementInsert != null) {
try {
prepStatementInsert.close();
prepStatementInsert = null;
} catch (SQLException e) {
throw new KettleDatabaseException("Error closing insert prepared statement.", e);
}
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class Database method callProcedure.
public RowMetaAndData callProcedure(String[] arg, String[] argdir, int[] argtype, String resultname, int resulttype) throws KettleDatabaseException {
RowMetaAndData ret;
try {
boolean moreResults = cstmt.execute();
ret = new RowMetaAndData();
int pos = 1;
if (resultname != null && resultname.length() != 0) {
ValueMeta vMeta = new ValueMeta(resultname, resulttype);
Object v = null;
switch(resulttype) {
case ValueMetaInterface.TYPE_BOOLEAN:
v = Boolean.valueOf(cstmt.getBoolean(pos));
break;
case ValueMetaInterface.TYPE_NUMBER:
v = new Double(cstmt.getDouble(pos));
break;
case ValueMetaInterface.TYPE_BIGNUMBER:
v = cstmt.getBigDecimal(pos);
break;
case ValueMetaInterface.TYPE_INTEGER:
v = Long.valueOf(cstmt.getLong(pos));
break;
case ValueMetaInterface.TYPE_STRING:
v = cstmt.getString(pos);
break;
case ValueMetaInterface.TYPE_BINARY:
if (databaseMeta.supportsGetBlob()) {
Blob blob = cstmt.getBlob(pos);
if (blob != null) {
v = blob.getBytes(1L, (int) blob.length());
} else {
v = null;
}
} else {
v = cstmt.getBytes(pos);
}
break;
case ValueMetaInterface.TYPE_DATE:
if (databaseMeta.supportsTimeStampToDateConversion()) {
v = cstmt.getTimestamp(pos);
} else {
v = cstmt.getDate(pos);
}
break;
default:
break;
}
ret.addValue(vMeta, v);
pos++;
}
for (int i = 0; i < arg.length; i++) {
if (argdir[i].equalsIgnoreCase("OUT") || argdir[i].equalsIgnoreCase("INOUT")) {
ValueMetaInterface vMeta = ValueMetaFactory.createValueMeta(arg[i], argtype[i]);
Object v = null;
switch(argtype[i]) {
case ValueMetaInterface.TYPE_BOOLEAN:
v = Boolean.valueOf(cstmt.getBoolean(pos + i));
break;
case ValueMetaInterface.TYPE_NUMBER:
v = new Double(cstmt.getDouble(pos + i));
break;
case ValueMetaInterface.TYPE_BIGNUMBER:
v = cstmt.getBigDecimal(pos + i);
break;
case ValueMetaInterface.TYPE_INTEGER:
v = Long.valueOf(cstmt.getLong(pos + i));
break;
case ValueMetaInterface.TYPE_STRING:
v = cstmt.getString(pos + i);
break;
case ValueMetaInterface.TYPE_BINARY:
if (databaseMeta.supportsGetBlob()) {
Blob blob = cstmt.getBlob(pos + i);
if (blob != null) {
v = blob.getBytes(1L, (int) blob.length());
} else {
v = null;
}
} else {
v = cstmt.getBytes(pos + i);
}
break;
case ValueMetaInterface.TYPE_DATE:
if (databaseMeta.supportsTimeStampToDateConversion()) {
v = cstmt.getTimestamp(pos + i);
} else {
v = cstmt.getDate(pos + i);
}
break;
default:
break;
}
ret.addValue(vMeta, v);
}
}
ResultSet rs = null;
int updateCount = -1;
do {
rs = null;
try {
// Save the result set
if (moreResults) {
rs = cstmt.getResultSet();
} else {
// Save the update count if it is available (> -1)
updateCount = cstmt.getUpdateCount();
}
moreResults = cstmt.getMoreResults();
} finally {
if (rs != null) {
rs.close();
rs = null;
}
}
} while (moreResults || (updateCount > -1));
return ret;
} catch (Exception ex) {
throw new KettleDatabaseException("Unable to call procedure", ex);
}
}
use of org.pentaho.di.core.exception.KettleDatabaseException in project pentaho-kettle by pentaho.
the class Database method getQueryFields.
public RowMetaInterface getQueryFields(String sql, boolean param, RowMetaInterface inform, Object[] data) throws KettleDatabaseException {
RowMetaInterface fields;
DBCache dbcache = DBCache.getInstance();
DBCacheEntry entry = null;
//
if (dbcache != null) {
entry = new DBCacheEntry(databaseMeta.getName(), sql);
fields = dbcache.get(entry);
if (fields != null) {
return fields;
}
}
if (connection == null) {
// Cache test without connect.
return null;
}
//
try {
if (databaseMeta.supportsPreparedStatementMetadataRetrieval()) {
// On with the regular program.
//
fields = getQueryFieldsFromPreparedStatement(sql);
} else {
fields = getQueryFieldsFromDatabaseMetaData();
}
} catch (Exception e) {
/*
* databaseMeta.getDatabaseType()==DatabaseMeta.TYPE_DATABASE_SYBASEIQ ) {
*/
fields = getQueryFieldsFallback(sql, param, inform, data);
}
// Store in cache!!
if (dbcache != null && entry != null) {
if (fields != null) {
dbcache.put(entry, fields);
}
}
return fields;
}
Aggregations