use of org.pentaho.di.core.exception.KettleValueException in project pentaho-kettle by pentaho.
the class ValueDataUtilBackwardCompatibilityIT method testNVL.
public void testNVL() {
// Test Kettle number types
assertEquals(Double.valueOf("1.0"), calculate("1", "", ValueMetaInterface.TYPE_NUMBER, CalculatorMetaFunction.CALC_NVL));
assertEquals(Double.valueOf("2.0"), calculate("", "2", ValueMetaInterface.TYPE_NUMBER, CalculatorMetaFunction.CALC_NVL));
assertEquals(Double.valueOf("10.0"), calculate("10", "20", ValueMetaInterface.TYPE_NUMBER, CalculatorMetaFunction.CALC_NVL));
assertEquals(null, calculate("", "", ValueMetaInterface.TYPE_NUMBER, CalculatorMetaFunction.CALC_NVL));
// Test Kettle string types
assertEquals("1", calculate("1", "", ValueMetaInterface.TYPE_STRING, CalculatorMetaFunction.CALC_NVL));
assertEquals("2", calculate("", "2", ValueMetaInterface.TYPE_STRING, CalculatorMetaFunction.CALC_NVL));
assertEquals("10", calculate("10", "20", ValueMetaInterface.TYPE_STRING, CalculatorMetaFunction.CALC_NVL));
assertEquals(null, calculate("", "", ValueMetaInterface.TYPE_STRING, CalculatorMetaFunction.CALC_NVL));
// Test Kettle Integer (Java Long) types
assertEquals(Long.valueOf("1"), calculate("1", "", ValueMetaInterface.TYPE_INTEGER, CalculatorMetaFunction.CALC_NVL));
assertEquals(Long.valueOf("2"), calculate("", "2", ValueMetaInterface.TYPE_INTEGER, CalculatorMetaFunction.CALC_NVL));
assertEquals(Long.valueOf("10"), calculate("10", "20", ValueMetaInterface.TYPE_INTEGER, CalculatorMetaFunction.CALC_NVL));
assertEquals(null, calculate("", "", ValueMetaInterface.TYPE_INTEGER, CalculatorMetaFunction.CALC_NVL));
// Test Kettle big Number types
assertEquals(0, new BigDecimal("1").compareTo((BigDecimal) calculate("1", "", ValueMetaInterface.TYPE_BIGNUMBER, CalculatorMetaFunction.CALC_NVL)));
assertEquals(0, new BigDecimal("2").compareTo((BigDecimal) calculate("", "2", ValueMetaInterface.TYPE_BIGNUMBER, CalculatorMetaFunction.CALC_NVL)));
assertEquals(0, new BigDecimal("10").compareTo((BigDecimal) calculate("10", "20", ValueMetaInterface.TYPE_BIGNUMBER, CalculatorMetaFunction.CALC_NVL)));
assertEquals(null, calculate("", "", ValueMetaInterface.TYPE_BIGNUMBER, CalculatorMetaFunction.CALC_NVL));
// boolean
assertEquals(true, calculate("true", "", ValueMetaInterface.TYPE_BOOLEAN, CalculatorMetaFunction.CALC_NVL));
assertEquals(false, calculate("", "false", ValueMetaInterface.TYPE_BOOLEAN, CalculatorMetaFunction.CALC_NVL));
assertEquals(false, calculate("false", "true", ValueMetaInterface.TYPE_BOOLEAN, CalculatorMetaFunction.CALC_NVL));
assertEquals(null, calculate("", "", ValueMetaInterface.TYPE_BOOLEAN, CalculatorMetaFunction.CALC_NVL));
// Test Kettle date
SimpleDateFormat simpleDateFormat = new SimpleDateFormat(yyyy_MM_dd);
try {
assertEquals(simpleDateFormat.parse("2012-04-11"), calculate("2012-04-11", "", ValueMetaInterface.TYPE_DATE, CalculatorMetaFunction.CALC_NVL));
assertEquals(simpleDateFormat.parse("2012-11-04"), calculate("", "2012-11-04", ValueMetaInterface.TYPE_DATE, CalculatorMetaFunction.CALC_NVL));
assertEquals(simpleDateFormat.parse("1965-07-01"), calculate("1965-07-01", "1967-04-11", ValueMetaInterface.TYPE_DATE, CalculatorMetaFunction.CALC_NVL));
assertNull(calculate("", "", ValueMetaInterface.TYPE_DATE, CalculatorMetaFunction.CALC_NVL));
} catch (ParseException pe) {
fail(pe.getMessage());
}
// assertEquals(0, calculate("", "2012-11-04", ValueMetaInterface.TYPE_DATE, CalculatorMetaFunction.CALC_NVL)));
// assertEquals(0, calculate("2012-11-04", "2010-04-11", ValueMetaInterface.TYPE_DATE,
// CalculatorMetaFunction.CALC_NVL)));
// assertEquals(null, calculate("", "", ValueMetaInterface.TYPE_DATE, CalculatorMetaFunction.CALC_NVL));
// binary
ValueMeta stringValueMeta = new ValueMeta("string", ValueMeta.TYPE_STRING);
try {
byte[] data = stringValueMeta.getBinary("101");
byte[] calculated = (byte[]) calculate("101", "", ValueMetaInterface.TYPE_BINARY, CalculatorMetaFunction.CALC_NVL);
assertTrue(Arrays.equals(data, calculated));
data = stringValueMeta.getBinary("011");
calculated = (byte[]) calculate("", "011", ValueMetaInterface.TYPE_BINARY, CalculatorMetaFunction.CALC_NVL);
assertTrue(Arrays.equals(data, calculated));
data = stringValueMeta.getBinary("110");
calculated = (byte[]) calculate("110", "011", ValueMetaInterface.TYPE_BINARY, CalculatorMetaFunction.CALC_NVL);
assertTrue(Arrays.equals(data, calculated));
calculated = (byte[]) calculate("", "", ValueMetaInterface.TYPE_BINARY, CalculatorMetaFunction.CALC_NVL);
assertNull(calculated);
// assertEquals(binaryValueMeta.convertData(new ValueMeta("dummy", ValueMeta.TYPE_STRING), "101"),
// calculate("101", "", ValueMetaInterface.TYPE_BINARY, CalculatorMetaFunction.CALC_NVL));
} catch (KettleValueException kve) {
fail(kve.getMessage());
}
}
use of org.pentaho.di.core.exception.KettleValueException in project pentaho-kettle by pentaho.
the class DatabaseLookupIT method checkRows.
/**
* Check the 2 lists comparing the rows in order. If they are not the same fail the test.
*/
public void checkRows(List<RowMetaAndData> rows1, List<RowMetaAndData> rows2) {
int idx = 1;
if (rows1.size() != rows2.size()) {
fail("Number of rows is not the same: " + rows1.size() + " and " + rows2.size());
}
Iterator<RowMetaAndData> it1 = rows1.iterator();
Iterator<RowMetaAndData> it2 = rows2.iterator();
while (it1.hasNext() && it2.hasNext()) {
RowMetaAndData rm1 = it1.next();
RowMetaAndData rm2 = it2.next();
Object[] r1 = rm1.getData();
Object[] r2 = rm2.getData();
if (rm1.size() != rm2.size()) {
fail("row nr " + idx + " is not equal");
}
int[] fields = new int[r1.length];
for (int ydx = 0; ydx < r1.length; ydx++) {
fields[ydx] = ydx;
}
try {
if (rm1.getRowMeta().compare(r1, r2, fields) != 0) {
fail("row nr " + idx + " is not equal");
}
} catch (KettleValueException e) {
fail("row nr " + idx + " is not equal");
}
idx++;
}
}
use of org.pentaho.di.core.exception.KettleValueException in project pentaho-kettle by pentaho.
the class DetectLastRowStepIT method checkRows.
/**
* Check the 2 lists comparing the rows in order. If they are not the same fail the test.
*
* @param rows1
* first row set to compare
* @param rows2
* second row set to compare
*/
public void checkRows(List<RowMetaAndData> rows1, List<RowMetaAndData> rows2) {
int idx = 1;
if (rows1.size() != rows2.size()) {
fail("Number of rows is not the same: " + rows1.size() + " and " + rows2.size());
}
Iterator<RowMetaAndData> it1 = rows1.iterator();
Iterator<RowMetaAndData> it2 = rows2.iterator();
while (it1.hasNext() && it2.hasNext()) {
RowMetaAndData rm1 = it1.next();
RowMetaAndData rm2 = it2.next();
Object[] r1 = rm1.getData();
Object[] r2 = rm2.getData();
if (rm1.size() != rm2.size()) {
fail("row nr " + idx + " is not equal");
}
int[] fields = new int[rm1.size()];
for (int ydx = 0; ydx < rm1.size(); ydx++) {
fields[ydx] = ydx;
}
try {
if (rm1.getRowMeta().compare(r1, r2, fields) != 0) {
fail("row nr " + idx + " is not equal");
}
} catch (KettleValueException e) {
fail("row nr " + idx + " is not equal");
}
idx++;
}
}
use of org.pentaho.di.core.exception.KettleValueException in project pentaho-kettle by pentaho.
the class ExecSQLRowIT method checkRows.
/**
* Check the 2 lists comparing the rows in order. If they are not the same fail the test.
*/
public void checkRows(List<RowMetaAndData> rows1, List<RowMetaAndData> rows2) {
int idx = 1;
if (rows1.size() != rows2.size()) {
fail("Number of rows is not the same: " + rows1.size() + " and " + rows2.size());
}
Iterator<RowMetaAndData> it1 = rows1.iterator();
Iterator<RowMetaAndData> it2 = rows2.iterator();
while (it1.hasNext() && it2.hasNext()) {
RowMetaAndData rm1 = it1.next();
RowMetaAndData rm2 = it2.next();
Object[] r1 = rm1.getData();
Object[] r2 = rm2.getData();
if (rm1.size() != rm2.size()) {
fail("row nr " + idx + " is not equal");
}
int[] fields = new int[r1.length];
for (int ydx = 0; ydx < r1.length; ydx++) {
fields[ydx] = ydx;
}
try {
if (rm1.getRowMeta().compare(r1, r2, fields) != 0) {
fail("row nr " + idx + " is not equal");
}
} catch (KettleValueException e) {
fail("row nr " + idx + " is not equal");
}
idx++;
}
}
use of org.pentaho.di.core.exception.KettleValueException in project pentaho-kettle by pentaho.
the class SortedMerge method getRowSorted.
/**
* We read from all streams in the partition merge mode For that we need at least one row on all input rowsets... If
* we don't have a row, we wait for one.
*
* TODO: keep the inputRowSets() list sorted and go from there. That should dramatically improve speed as you only
* need half as many comparisons.
*
* @return the next row
*/
private synchronized Object[] getRowSorted() throws KettleException {
if (first) {
first = false;
// Verify that socket connections to all the remote input steps are opened
// before we start to read/write ...
//
openRemoteInputStepSocketsOnce();
// Read one row from all rowsets...
//
data.sortedBuffer = new ArrayList<RowSetRow>();
data.rowMeta = null;
// PDI-1212:
// If one of the inputRowSets holds a null row (the input yields
// 0 rows), then the null rowSet is removed from the InputRowSet buffer.. (BaseStep.getRowFrom())
// which throws this loop off by one (the next set never gets processed).
// Instead of modifying BaseStep, I figure reversing the loop here would
// effect change in less areas. If the reverse loop causes a problem, please
// re-open http://jira.pentaho.com/browse/PDI-1212.
List<RowSet> inputRowSets = getInputRowSets();
for (int i = inputRowSets.size() - 1; i >= 0 && !isStopped(); i--) {
RowSet rowSet = inputRowSets.get(i);
Object[] row = getRowFrom(rowSet);
if (row != null) {
// Add this row to the sortedBuffer...
// Which is not yet sorted, we'll get to that later.
//
data.sortedBuffer.add(new RowSetRow(rowSet, rowSet.getRowMeta(), row));
if (data.rowMeta == null) {
data.rowMeta = rowSet.getRowMeta().clone();
}
//
if (data.fieldIndices == null) {
// Get the indexes of the specified sort fields...
data.fieldIndices = new int[meta.getFieldName().length];
for (int f = 0; f < data.fieldIndices.length; f++) {
data.fieldIndices[f] = data.rowMeta.indexOfValue(meta.getFieldName()[f]);
if (data.fieldIndices[f] < 0) {
throw new KettleStepException("Unable to find fieldname [" + meta.getFieldName()[f] + "] in row : " + data.rowMeta);
}
data.rowMeta.getValueMeta(data.fieldIndices[f]).setSortedDescending(!meta.getAscending()[f]);
}
}
}
data.comparator = new Comparator<RowSetRow>() {
public int compare(RowSetRow o1, RowSetRow o2) {
try {
return o1.getRowMeta().compare(o1.getRowData(), o2.getRowData(), data.fieldIndices);
} catch (KettleValueException e) {
// TODO see if we should fire off alarms over here... Perhaps throw a RuntimeException.
return 0;
}
}
};
// Now sort the sortedBuffer for the first time.
//
Collections.sort(data.sortedBuffer, data.comparator);
}
}
//
if (data.sortedBuffer.isEmpty()) {
return null;
}
// now that we have all rows sorted, all we need to do is find out what the smallest row is.
// The smallest row is the first in our case...
//
RowSetRow smallestRow = data.sortedBuffer.get(0);
data.sortedBuffer.remove(0);
Object[] outputRowData = smallestRow.getRowData();
// We read another row from the row set where the smallest row came from.
// That we we exhaust all row sets.
//
Object[] extraRow = getRowFrom(smallestRow.getRowSet());
//
if (extraRow != null) {
// Add this one to the sortedBuffer
//
RowSetRow add = new RowSetRow(smallestRow.getRowSet(), smallestRow.getRowSet().getRowMeta(), extraRow);
int index = Collections.binarySearch(data.sortedBuffer, add, data.comparator);
if (index < 0) {
data.sortedBuffer.add(-index - 1, add);
} else {
data.sortedBuffer.add(index, add);
}
}
//
if (getTrans().isSafeModeEnabled()) {
// for checking we need to get data and meta
//
safeModeChecking(smallestRow.getRowMeta());
}
return outputRowData;
}
Aggregations