use of org.pentaho.di.core.row.value.ValueMetaDate in project pentaho-kettle by pentaho.
the class GroupByMetaGetFieldsTest method setup.
@Before
public void setup() throws KettlePluginException {
rowMeta = spy(new RowMeta());
groupByMeta = spy(new GroupByMeta());
mockStatic(ValueMetaFactory.class);
when(ValueMetaFactory.createValueMeta(anyInt())).thenCallRealMethod();
when(ValueMetaFactory.createValueMeta(anyString(), anyInt())).thenCallRealMethod();
when(ValueMetaFactory.createValueMeta("maxDate", 3, -1, -1)).thenReturn(new ValueMetaDate("maxDate"));
when(ValueMetaFactory.createValueMeta("minDate", 3, -1, -1)).thenReturn(new ValueMetaDate("minDate"));
when(ValueMetaFactory.createValueMeta("countDate", 5, -1, -1)).thenReturn(new ValueMetaInteger("countDate"));
when(ValueMetaFactory.getValueMetaName(3)).thenReturn("Date");
when(ValueMetaFactory.getValueMetaName(5)).thenReturn("Integer");
}
use of org.pentaho.di.core.row.value.ValueMetaDate in project pentaho-kettle by pentaho.
the class PDI4910_DenormaliserTest method testDeNormalise.
@Test
public void testDeNormalise() throws Exception {
// init step data
DenormaliserData stepData = new DenormaliserData();
stepData.keyFieldNr = 0;
stepData.keyValue = new HashMap<String, List<Integer>>();
stepData.keyValue.put("1", Arrays.asList(new Integer[] { 0, 1 }));
stepData.fieldNameIndex = new int[] { 1, 2 };
stepData.inputRowMeta = new RowMeta();
ValueMetaDate outDateField1 = new ValueMetaDate("date_field[yyyy-MM-dd]");
ValueMetaDate outDateField2 = new ValueMetaDate("date_field[yyyy/MM/dd]");
stepData.outputRowMeta = new RowMeta();
stepData.outputRowMeta.addValueMeta(0, outDateField1);
stepData.outputRowMeta.addValueMeta(1, outDateField2);
stepData.removeNrs = new int[] {};
stepData.targetResult = new Object[] { null, null };
// init step meta
DenormaliserMeta stepMeta = new DenormaliserMeta();
DenormaliserTargetField[] denormaliserTargetFields = new DenormaliserTargetField[2];
DenormaliserTargetField targetField1 = new DenormaliserTargetField();
DenormaliserTargetField targetField2 = new DenormaliserTargetField();
targetField1.setTargetFormat("yyyy-MM-dd");
targetField2.setTargetFormat("yyyy/MM/dd");
denormaliserTargetFields[0] = targetField1;
denormaliserTargetFields[1] = targetField2;
stepMeta.setDenormaliserTargetField(denormaliserTargetFields);
// init row meta
RowMetaInterface rowMeta = new RowMeta();
rowMeta.addValueMeta(0, new ValueMetaInteger("key"));
rowMeta.addValueMeta(1, new ValueMetaString("stringDate1"));
rowMeta.addValueMeta(2, new ValueMetaString("stringDate2"));
// init row data
Object[] rowData = new Object[] { 1L, "2000-10-20", "2000/10/20" };
// init step
denormaliser = new Denormaliser(mockHelper.stepMeta, stepData, 0, mockHelper.transMeta, mockHelper.trans);
// inject step meta
Field metaField = denormaliser.getClass().getDeclaredField("meta");
Assert.assertNotNull("Can't find a field 'meta' in class Denormalizer", metaField);
metaField.setAccessible(true);
metaField.set(denormaliser, stepMeta);
// call tested method
Method deNormalise = denormaliser.getClass().getDeclaredMethod("deNormalise", RowMetaInterface.class, Object[].class);
Assert.assertNotNull("Can't find a method 'deNormalise' in class Denormalizer", deNormalise);
deNormalise.setAccessible(true);
deNormalise.invoke(denormaliser, rowMeta, rowData);
// vefiry
for (Object res : stepData.targetResult) {
Assert.assertNotNull("Date is null", res);
}
}
use of org.pentaho.di.core.row.value.ValueMetaDate in project pentaho-kettle by pentaho.
the class PDI_11152_Test method testInputLazyConversion.
@Test
public void testInputLazyConversion() throws KettleException {
Database db = mock(Database.class);
RowMeta returnRowMeta = new RowMeta();
doReturn(new Object[] { new Timestamp(System.currentTimeMillis()) }).when(db).getLookup(any(PreparedStatement.class));
returnRowMeta.addValueMeta(new ValueMetaDate("TimeStamp"));
doReturn(returnRowMeta).when(db).getReturnRowMeta();
ValueMetaString storageMetadata = new ValueMetaString("Date");
storageMetadata.setConversionMask("yyyy-MM-dd");
ValueMetaDate valueMeta = new ValueMetaDate("Date");
valueMeta.setStorageType(ValueMetaInterface.STORAGE_TYPE_BINARY_STRING);
valueMeta.setStorageMetadata(storageMetadata);
RowMeta inputRowMeta = new RowMeta();
inputRowMeta.addValueMeta(valueMeta);
UpdateMeta stepMeta = smh.processRowsStepMetaInterface;
UpdateData stepData = smh.processRowsStepDataInterface;
stepData.lookupParameterRowMeta = inputRowMeta;
stepData.db = db;
stepData.keynrs = stepData.valuenrs = new int[] { 0 };
stepData.keynrs2 = new int[] { -1 };
stepData.updateParameterRowMeta = when(mock(RowMeta.class).size()).thenReturn(2).getMock();
Update step = new Update(smh.stepMeta, smh.stepDataInterface, 0, smh.transMeta, smh.trans);
step.setInputRowMeta(inputRowMeta);
step.addRowSetToInputRowSets(smh.getMockInputRowSet(new Object[] { "2013-12-20".getBytes() }));
step.init(smh.initStepMetaInterface, smh.initStepDataInterface);
step.first = false;
Assert.assertTrue("Failure during row processing", step.processRow(stepMeta, stepData));
}
use of org.pentaho.di.core.row.value.ValueMetaDate in project pentaho-kettle by pentaho.
the class ReadAllCacheTest method lookup_DoesNotFind_WithBetweenOperator.
@Test
public void lookup_DoesNotFind_WithBetweenOperator() throws Exception {
RowMeta meta = keysMeta.clone();
meta.setValueMeta(3, new ValueMetaDate());
meta.addValueMeta(new ValueMetaInteger());
ReadAllCache cache = buildCache("<>,IS NOT NULL,BETWEEN,IS NULL");
Object[] found = cache.getRowFromCache(meta, new Object[] { -1L, null, new Date(1000), new Date(2000), null });
assertNull("(1000 <= keys[2] <= 2000) --> none", found);
}
use of org.pentaho.di.core.row.value.ValueMetaDate in project pentaho-kettle by pentaho.
the class ReadAllCacheTest method setUp.
@Before
public void setUp() {
stepData = new DatabaseLookupData();
stepData.conditions = new int[4];
keysMeta = new RowMeta();
keysMeta.addValueMeta(new ValueMetaInteger());
keysMeta.addValueMeta(new ValueMetaString());
keysMeta.addValueMeta(new ValueMetaDate());
keysMeta.addValueMeta(new ValueMetaInteger());
keys = new Object[][] { new Object[] { 0L, "0", new Date(0), null }, new Object[] { 0L, "0", new Date(50), null }, new Object[] { 2L, "2", new Date(200), null }, new Object[] { 1L, "1", new Date(100), null }, new Object[] { 1L, "1", new Date(150), null } };
data = new Object[][] { new Object[] { 0 }, new Object[] { 1 }, new Object[] { 2 }, new Object[] { 3 }, new Object[] { 4 } };
}
Aggregations