use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.
the class JobEntryColumnsExistTest method jobFail_tableNameIsEmpty.
@Test
public void jobFail_tableNameIsEmpty() throws KettleException {
jobEntry.setTablename(null);
final Result result = jobEntry.execute(new Result(), 0);
assertEquals("Should be error", 1, result.getNrErrors());
assertFalse("Result should be false", result.getResult());
}
use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.
the class JobEntryColumnsExistTest method jobFail_columnsArrayIsEmpty.
@Test
public void jobFail_columnsArrayIsEmpty() throws KettleException {
jobEntry.setArguments(null);
final Result result = jobEntry.execute(new Result(), 0);
assertEquals("Should be error", 1, result.getNrErrors());
assertFalse("Result should be false", result.getResult());
}
use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.
the class Database method execStatement.
public Result execStatement(String rawsql, RowMetaInterface params, Object[] data) throws KettleDatabaseException {
Result result = new Result();
// Replace existing code with a class that removes comments from the raw
// SQL.
// The SqlCommentScrubber respects single-quoted strings, so if a
// double-dash or a multiline comment appears
// in a single-quoted string, it will be treated as a string instead of
// comments.
String sql = databaseMeta.getDatabaseInterface().createSqlScriptParser().removeComments(rawsql).trim();
try {
boolean resultSet;
int count;
if (params != null) {
PreparedStatement prep_stmt = connection.prepareStatement(databaseMeta.stripCR(sql));
// set the parameters!
setValues(params, data, prep_stmt);
resultSet = prep_stmt.execute();
count = prep_stmt.getUpdateCount();
prep_stmt.close();
} else {
String sqlStripped = databaseMeta.stripCR(sql);
// log.logDetailed("Executing SQL Statement: ["+sqlStripped+"]");
Statement stmt = connection.createStatement();
resultSet = stmt.execute(sqlStripped);
count = stmt.getUpdateCount();
stmt.close();
}
String upperSql = sql.toUpperCase();
if (!resultSet) {
// log.logDetailed("What to do with ResultSet??? (count="+count+")");
if (count > 0) {
if (upperSql.startsWith("INSERT")) {
result.setNrLinesOutput(count);
} else if (upperSql.startsWith("UPDATE")) {
result.setNrLinesUpdated(count);
} else if (upperSql.startsWith("DELETE")) {
result.setNrLinesDeleted(count);
}
}
}
// See if a cache needs to be cleared...
if (upperSql.startsWith("ALTER TABLE") || upperSql.startsWith("DROP TABLE") || upperSql.startsWith("CREATE TABLE")) {
DBCache.getInstance().clear(databaseMeta.getName());
}
} catch (SQLException ex) {
throw new KettleDatabaseException("Couldn't execute SQL: " + sql + Const.CR, ex);
} catch (Exception e) {
throw new KettleDatabaseException("Unexpected error executing SQL: " + Const.CR, e);
}
return result;
}
use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.
the class Database method execStatements.
/**
* Execute a series of SQL statements, separated by ;
* <p/>
* We are already connected...
* <p/>
* Multiple statements have to be split into parts We use the ";" to separate statements...
* <p/>
* We keep the results in Result object from Jobs
*
* @param script The SQL script to be execute
* @param params Parameters Meta
* @param data Parameters value
* @return A result with counts of the number or records updates, inserted, deleted or read.
* @throws KettleDatabaseException In case an error occurs
*/
public Result execStatements(String script, RowMetaInterface params, Object[] data) throws KettleDatabaseException {
Result result = new Result();
SqlScriptParser sqlScriptParser = databaseMeta.getDatabaseInterface().createSqlScriptParser();
List<String> statements = sqlScriptParser.split(script);
int nrstats = 0;
if (statements != null) {
for (String stat : statements) {
// Deleting all the single-line and multi-line comments from the string
stat = sqlScriptParser.removeComments(stat);
if (!Const.onlySpaces(stat)) {
String sql = Const.trim(stat);
if (sql.toUpperCase().startsWith("SELECT")) {
// A Query
if (log.isDetailed()) {
log.logDetailed("launch SELECT statement: " + Const.CR + sql);
}
nrstats++;
ResultSet rs = null;
try {
rs = openQuery(sql, params, data);
if (rs != null) {
Object[] row = getRow(rs);
while (row != null) {
result.setNrLinesRead(result.getNrLinesRead() + 1);
if (log.isDetailed()) {
log.logDetailed(rowMeta.getString(row));
}
row = getRow(rs);
}
} else {
if (log.isDebug()) {
log.logDebug("Error executing query: " + Const.CR + sql);
}
}
} catch (KettleValueException e) {
// just pass the error
throw new KettleDatabaseException(e);
// upwards.
} finally {
try {
if (rs != null) {
rs.close();
}
} catch (SQLException ex) {
if (log.isDebug()) {
log.logDebug("Error closing query: " + Const.CR + sql);
}
}
}
} else {
// any kind of statement
if (log.isDetailed()) {
log.logDetailed("launch DDL statement: " + Const.CR + sql);
}
// A DDL statement
nrstats++;
Result res = execStatement(sql, params, data);
result.add(res);
}
}
}
}
if (log.isDetailed()) {
log.logDetailed(nrstats + " statement" + (nrstats == 1 ? "" : "s") + " executed");
}
return result;
}
use of org.pentaho.di.core.Result in project pentaho-kettle by pentaho.
the class SimpleEvalNullFieldIT method testNullField.
@Test
public void testNullField() throws KettleXMLException, IOException, URISyntaxException {
JobMeta jm = new JobMeta(new File(SimultaneousJobsAppenderIT.class.getClassLoader().getResource(PKG + jobPath).toURI()).getCanonicalPath(), null);
Job job = new Job(null, jm);
job.start();
job.waitUntilFinished();
Result result = job.getResult();
Assert.assertTrue(result.getResult());
if (result.getNrErrors() != 0) {
Assert.fail(result.getLogText());
}
}
Aggregations