use of org.pentaho.di.core.row.value.ValueMetaDate in project pentaho-kettle by pentaho.
the class PDI3499Test method testDateLazyConversion.
@Test
public void testDateLazyConversion() throws KettleException {
Database db = mock(Database.class);
RowMeta returnRowMeta = new RowMeta();
doReturn(new Object[] { new Timestamp(System.currentTimeMillis()) }).when(db).getLookup(any(PreparedStatement.class));
returnRowMeta.addValueMeta(new ValueMetaDate("TimeStamp"));
doReturn(returnRowMeta).when(db).getReturnRowMeta();
ValueMetaString storageMetadata = new ValueMetaString("Date");
storageMetadata.setConversionMask("yyyy-MM-dd");
ValueMetaDate valueMeta = new ValueMetaDate("Date");
valueMeta.setStorageType(ValueMetaInterface.STORAGE_TYPE_BINARY_STRING);
valueMeta.setStorageMetadata(storageMetadata);
RowMeta inputRowMeta = new RowMeta();
inputRowMeta.addValueMeta(valueMeta);
InsertUpdateMeta stepMeta = smh.processRowsStepMetaInterface;
doReturn(new Boolean[] { true }).when(stepMeta).getUpdate();
InsertUpdateData stepData = smh.processRowsStepDataInterface;
stepData.lookupParameterRowMeta = inputRowMeta;
stepData.db = db;
stepData.keynrs = stepData.valuenrs = new int[] { 0 };
stepData.keynrs2 = new int[] { -1 };
stepData.updateParameterRowMeta = when(mock(RowMeta.class).size()).thenReturn(2).getMock();
InsertUpdate step = new InsertUpdate(smh.stepMeta, smh.stepDataInterface, 0, smh.transMeta, smh.trans);
step.setInputRowMeta(inputRowMeta);
step.addRowSetToInputRowSets(smh.getMockInputRowSet(new Object[] { "2013-12-20".getBytes() }));
step.init(smh.initStepMetaInterface, smh.initStepDataInterface);
step.first = false;
step.processRow(stepMeta, stepData);
}
use of org.pentaho.di.core.row.value.ValueMetaDate in project pentaho-kettle by pentaho.
the class PDI_10836_Test method testDateInsert.
@Test
public void testDateInsert() throws Exception {
SalesforceInsert step = new SalesforceInsert(smh.stepMeta, smh.stepDataInterface, 0, smh.transMeta, smh.trans);
SalesforceInsertMeta meta = smh.initStepMetaInterface;
doReturn(UUID.randomUUID().toString()).when(meta).getTargetURL();
doReturn(UUID.randomUUID().toString()).when(meta).getUsername();
doReturn(UUID.randomUUID().toString()).when(meta).getPassword();
doReturn(UUID.randomUUID().toString()).when(meta).getModule();
doReturn(2).when(meta).getBatchSizeInt();
doReturn(new String[] { "Date" }).when(meta).getUpdateLookup();
doReturn(new Boolean[] { false }).when(meta).getUseExternalId();
SalesforceInsertData data = smh.initStepDataInterface;
data.nrfields = 1;
data.fieldnrs = new int[] { 0 };
data.sfBuffer = new SObject[] { null };
data.outputBuffer = new Object[][] { null };
step.init(meta, data);
RowMeta rowMeta = new RowMeta();
ValueMetaInterface valueMeta = new ValueMetaDate("date");
valueMeta.setDateFormatTimeZone(TimeZone.getTimeZone("Europe/Minsk"));
rowMeta.addValueMeta(valueMeta);
smh.initStepDataInterface.inputRowMeta = rowMeta;
Calendar minskTime = Calendar.getInstance(valueMeta.getDateFormatTimeZone());
minskTime.clear();
minskTime.set(2013, Calendar.OCTOBER, 16);
Object[] args = new Object[] { minskTime.getTime() };
Method m = SalesforceInsert.class.getDeclaredMethod("writeToSalesForce", Object[].class);
m.setAccessible(true);
m.invoke(step, new Object[] { args });
DateFormat utc = new SimpleDateFormat("yyyy-MM-dd");
utc.setTimeZone(TimeZone.getTimeZone("UTC"));
XmlObject xmlObject = SalesforceConnection.getChildren(data.sfBuffer[0])[0];
Assert.assertEquals("2013-10-16", utc.format(((Calendar) xmlObject.getValue()).getTime()));
}
use of org.pentaho.di.core.row.value.ValueMetaDate in project pentaho-kettle by pentaho.
the class AccessInputMeta method getFields.
@Override
public void getFields(RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore) throws KettleStepException {
int i;
int nr = inputFields == null ? 0 : inputFields.length;
for (i = 0; i < nr; i++) {
AccessInputField field = inputFields[i];
int type = field.getType();
if (type == ValueMetaInterface.TYPE_NONE) {
type = ValueMetaInterface.TYPE_STRING;
}
ValueMetaInterface v;
try {
v = ValueMetaFactory.createValueMeta(space.environmentSubstitute(field.getName()), type);
} catch (KettlePluginException e) {
v = new ValueMetaNone(space.environmentSubstitute(field.getName()));
}
v.setLength(field.getLength());
v.setPrecision(field.getPrecision());
v.setOrigin(name);
v.setConversionMask(field.getFormat());
v.setDecimalSymbol(field.getDecimalSymbol());
v.setGroupingSymbol(field.getGroupSymbol());
v.setCurrencySymbol(field.getCurrencySymbol());
v.setTrimType(field.getTrimType());
r.addValueMeta(v);
}
if (includeFilename) {
ValueMetaInterface v = new ValueMetaString(space.environmentSubstitute(filenameField));
v.setLength(250);
v.setPrecision(-1);
v.setOrigin(name);
r.addValueMeta(v);
}
if (includeTablename) {
ValueMetaInterface v = new ValueMetaString(space.environmentSubstitute(tablenameField));
v.setLength(250);
v.setPrecision(-1);
v.setOrigin(name);
r.addValueMeta(v);
}
if (includeRowNumber) {
ValueMetaInterface v = new ValueMetaInteger(space.environmentSubstitute(rowNumberField));
v.setLength(ValueMetaInterface.DEFAULT_INTEGER_LENGTH, 0);
v.setOrigin(name);
r.addValueMeta(v);
}
if (getShortFileNameField() != null && getShortFileNameField().length() > 0) {
ValueMetaInterface v = new ValueMetaString(space.environmentSubstitute(getShortFileNameField()));
v.setLength(100, -1);
v.setOrigin(name);
r.addValueMeta(v);
}
if (getExtensionField() != null && getExtensionField().length() > 0) {
ValueMetaInterface v = new ValueMetaString(space.environmentSubstitute(getExtensionField()));
v.setLength(100, -1);
v.setOrigin(name);
r.addValueMeta(v);
}
if (getPathField() != null && getPathField().length() > 0) {
ValueMetaInterface v = new ValueMetaString(space.environmentSubstitute(getPathField()));
v.setLength(100, -1);
v.setOrigin(name);
r.addValueMeta(v);
}
if (getSizeField() != null && getSizeField().length() > 0) {
ValueMetaInterface v = new ValueMetaInteger(space.environmentSubstitute(getSizeField()));
v.setOrigin(name);
v.setLength(9);
r.addValueMeta(v);
}
if (isHiddenField() != null && isHiddenField().length() > 0) {
ValueMetaInterface v = new ValueMetaBoolean(space.environmentSubstitute(isHiddenField()));
v.setOrigin(name);
r.addValueMeta(v);
}
if (getLastModificationDateField() != null && getLastModificationDateField().length() > 0) {
ValueMetaInterface v = new ValueMetaDate(space.environmentSubstitute(getLastModificationDateField()));
v.setOrigin(name);
r.addValueMeta(v);
}
if (getUriField() != null && getUriField().length() > 0) {
ValueMetaInterface v = new ValueMetaString(space.environmentSubstitute(getUriField()));
v.setLength(100, -1);
v.setOrigin(name);
r.addValueMeta(v);
}
if (getRootUriField() != null && getRootUriField().length() > 0) {
ValueMetaInterface v = new ValueMetaString(space.environmentSubstitute(getRootUriField()));
v.setLength(100, -1);
v.setOrigin(name);
r.addValueMeta(v);
}
}
use of org.pentaho.di.core.row.value.ValueMetaDate in project pentaho-kettle by pentaho.
the class CombinationLookup method combiInsert.
/**
* This inserts new record into a junk dimension
*/
public Long combiInsert(RowMetaInterface rowMeta, Object[] row, Long val_key, Long val_crc) throws KettleDatabaseException {
String debug = "Combination insert";
DatabaseMeta databaseMeta = meta.getDatabaseMeta();
try {
if (data.prepStatementInsert == null) {
// first time: construct prepared statement
debug = "First: construct prepared statement";
data.insertRowMeta = new RowMeta();
/*
* Construct the SQL statement...
*
* INSERT INTO d_test(keyfield, [crcfield,] keylookup[]) VALUES(val_key, [val_crc], row values with keynrs[]) ;
*/
String sql = "";
sql += "INSERT INTO " + data.schemaTable + ("( ");
boolean comma = false;
if (!isAutoIncrement()) {
// NO AUTOINCREMENT
sql += databaseMeta.quoteField(meta.getTechnicalKeyField());
data.insertRowMeta.addValueMeta(new ValueMetaInteger(meta.getTechnicalKeyField()));
comma = true;
} else if (databaseMeta.needsPlaceHolder()) {
// placeholder on informix! Will be replaced in table by real autoinc value.
sql += "0";
data.insertRowMeta.addValueMeta(new ValueMetaInteger(meta.getTechnicalKeyField()));
comma = true;
}
if (meta.useHash()) {
if (comma) {
sql += ", ";
}
sql += databaseMeta.quoteField(meta.getHashField());
data.insertRowMeta.addValueMeta(new ValueMetaInteger(meta.getHashField()));
comma = true;
}
if (!Utils.isEmpty(meta.getLastUpdateField())) {
if (comma) {
sql += ", ";
}
sql += databaseMeta.quoteField(meta.getLastUpdateField());
data.insertRowMeta.addValueMeta(new ValueMetaDate(meta.getLastUpdateField()));
comma = true;
}
for (int i = 0; i < meta.getKeyLookup().length; i++) {
if (comma) {
sql += ", ";
}
sql += databaseMeta.quoteField(meta.getKeyLookup()[i]);
data.insertRowMeta.addValueMeta(rowMeta.getValueMeta(data.keynrs[i]));
comma = true;
}
sql += ") VALUES (";
comma = false;
if (!isAutoIncrement()) {
sql += '?';
comma = true;
}
if (meta.useHash()) {
if (comma) {
sql += ',';
}
sql += '?';
comma = true;
}
if (!Utils.isEmpty(meta.getLastUpdateField())) {
if (comma) {
sql += ',';
}
sql += '?';
comma = true;
}
for (int i = 0; i < meta.getKeyLookup().length; i++) {
if (comma) {
sql += ',';
} else {
comma = true;
}
sql += '?';
}
sql += " )";
String sqlStatement = sql;
try {
debug = "First: prepare statement";
if (isAutoIncrement() && databaseMeta.supportsAutoGeneratedKeys()) {
logDetailed("SQL with return keys: " + sqlStatement);
data.prepStatementInsert = data.db.getConnection().prepareStatement(databaseMeta.stripCR(sqlStatement), Statement.RETURN_GENERATED_KEYS);
} else {
logDetailed("SQL without return keys: " + sqlStatement);
data.prepStatementInsert = data.db.getConnection().prepareStatement(databaseMeta.stripCR(sqlStatement));
}
} catch (SQLException ex) {
throw new KettleDatabaseException("Unable to prepare combi insert statement : " + Const.CR + sqlStatement, ex);
} catch (Exception ex) {
throw new KettleDatabaseException("Unable to prepare combi insert statement : " + Const.CR + sqlStatement, ex);
}
}
debug = "Create new insert row rins";
Object[] insertRow = new Object[data.insertRowMeta.size()];
int insertIndex = 0;
if (!isAutoIncrement()) {
insertRow[insertIndex] = val_key;
insertIndex++;
}
if (meta.useHash()) {
insertRow[insertIndex] = val_crc;
insertIndex++;
}
if (!Utils.isEmpty(meta.getLastUpdateField())) {
insertRow[insertIndex] = new Date();
insertIndex++;
}
for (int i = 0; i < data.keynrs.length; i++) {
insertRow[insertIndex] = row[data.keynrs[i]];
insertIndex++;
}
if (isRowLevel()) {
logRowlevel("rins=" + data.insertRowMeta.getString(insertRow));
}
debug = "Set values on insert";
// INSERT NEW VALUE!
data.db.setValues(data.insertRowMeta, insertRow, data.prepStatementInsert);
debug = "Insert row";
data.db.insertRow(data.prepStatementInsert);
debug = "Retrieve key";
if (isAutoIncrement() && databaseMeta.supportsAutoGeneratedKeys()) {
ResultSet keys = null;
try {
// 1 key
keys = data.prepStatementInsert.getGeneratedKeys();
if (keys.next()) {
val_key = new Long(keys.getLong(1));
} else {
throw new KettleDatabaseException("Unable to retrieve auto-increment of combi insert key : " + meta.getTechnicalKeyField() + ", no fields in resultset");
}
} catch (SQLException ex) {
throw new KettleDatabaseException("Unable to retrieve auto-increment of combi insert key : " + meta.getTechnicalKeyField(), ex);
} finally {
try {
if (keys != null) {
keys.close();
}
} catch (SQLException ex) {
throw new KettleDatabaseException("Unable to retrieve auto-increment of combi insert key : " + meta.getTechnicalKeyField(), ex);
}
}
}
} catch (Exception e) {
logError(Const.getStackTracker(e));
throw new KettleDatabaseException("Unexpected error in combination insert in part [" + debug + "] : " + e.toString(), e);
}
return val_key;
}
use of org.pentaho.di.core.row.value.ValueMetaDate in project pentaho-kettle by pentaho.
the class CombinationLookupMeta method getSQLStatements.
@Override
public SQLStatement getSQLStatements(TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, Repository repository, IMetaStore metaStore) {
// default: nothing to do!
SQLStatement retval = new SQLStatement(stepMeta.getName(), databaseMeta, null);
int i;
if (databaseMeta != null) {
if (prev != null && prev.size() > 0) {
if (!Utils.isEmpty(tablename)) {
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(schemaName, tablename);
Database db = new Database(loggingObject, databaseMeta);
try {
boolean doHash = false;
String cr_table = null;
db.connect();
// OK, what do we put in the new table??
RowMetaInterface fields = new RowMeta();
// First, the new technical key...
ValueMetaInterface vkeyfield = new ValueMetaInteger(technicalKeyField);
vkeyfield.setLength(10);
vkeyfield.setPrecision(0);
// Then the hashcode (optional)
ValueMetaInterface vhashfield = null;
if (useHash && !Utils.isEmpty(hashField)) {
vhashfield = new ValueMetaInteger(hashField);
vhashfield.setLength(15);
vhashfield.setPrecision(0);
doHash = true;
}
// Then the last update field (optional)
ValueMetaInterface vLastUpdateField = null;
if (!Utils.isEmpty(lastUpdateField)) {
vLastUpdateField = new ValueMetaDate(lastUpdateField);
}
if (!db.checkTableExists(schemaTable)) {
// Add technical key field.
fields.addValueMeta(vkeyfield);
// Add the keys only to the table
if (keyField != null && keyLookup != null) {
int cnt = keyField.length;
for (i = 0; i < cnt; i++) {
String error_field = "";
// Find the value in the stream
ValueMetaInterface v = prev.searchValueMeta(keyField[i]);
if (v != null) {
String name = keyLookup[i];
ValueMetaInterface newValue = v.clone();
newValue.setName(name);
if (name.equals(vkeyfield.getName()) || (doHash == true && name.equals(vhashfield.getName()))) {
error_field += name;
}
if (error_field.length() > 0) {
retval.setError(BaseMessages.getString(PKG, "CombinationLookupMeta.ReturnValue.NameCollision", error_field));
} else {
fields.addValueMeta(newValue);
}
}
}
}
if (doHash == true) {
fields.addValueMeta(vhashfield);
}
if (vLastUpdateField != null) {
fields.addValueMeta(vLastUpdateField);
}
} else {
// Table already exists
// Get the fields that are in the table now:
RowMetaInterface tabFields = db.getTableFields(schemaTable);
// Don't forget to quote these as well...
databaseMeta.quoteReservedWords(tabFields);
if (tabFields.searchValueMeta(vkeyfield.getName()) == null) {
// Add technical key field if it didn't exist yet
fields.addValueMeta(vkeyfield);
}
// Add the already existing fields
int cnt = tabFields.size();
for (i = 0; i < cnt; i++) {
ValueMetaInterface v = tabFields.getValueMeta(i);
fields.addValueMeta(v);
}
// Find the missing fields in the real table
String[] keyLookup = getKeyLookup();
String[] keyField = getKeyField();
if (keyField != null && keyLookup != null) {
cnt = keyField.length;
for (i = 0; i < cnt; i++) {
// Find the value in the stream
ValueMetaInterface v = prev.searchValueMeta(keyField[i]);
if (v != null) {
ValueMetaInterface newValue = v.clone();
newValue.setName(keyLookup[i]);
// Does the corresponding name exist in the table
if (tabFields.searchValueMeta(newValue.getName()) == null) {
// nope --> add
fields.addValueMeta(newValue);
}
}
}
}
if (doHash == true && tabFields.searchValueMeta(vhashfield.getName()) == null) {
// Add hash field
fields.addValueMeta(vhashfield);
}
if (vLastUpdateField != null && tabFields.searchValueMeta(vLastUpdateField.getName()) == null) {
fields.addValueMeta(vLastUpdateField);
}
}
cr_table = db.getDDL(schemaTable, fields, (CREATION_METHOD_SEQUENCE.equals(getTechKeyCreation()) && sequenceFrom != null && sequenceFrom.length() != 0) ? null : technicalKeyField, CREATION_METHOD_AUTOINC.equals(getTechKeyCreation()), null, true);
//
// OK, now let's build the index
//
// What fields do we put int the index?
// Only the hashcode or all fields?
String cr_index = "";
String cr_uniq_index = "";
String[] idx_fields = null;
if (useHash) {
if (hashField != null && hashField.length() > 0) {
idx_fields = new String[] { hashField };
} else {
retval.setError(BaseMessages.getString(PKG, "CombinationLookupMeta.ReturnValue.NotHashFieldSpecified"));
}
} else {
// index on all key fields...
if (!Utils.isEmpty(keyLookup)) {
int nrfields = keyLookup.length;
int maxFields = databaseMeta.getMaxColumnsInIndex();
if (maxFields > 0 && nrfields > maxFields) {
// For example, oracle indexes are limited to 32 fields...
nrfields = maxFields;
}
idx_fields = new String[nrfields];
for (i = 0; i < nrfields; i++) {
idx_fields[i] = keyLookup[i];
}
} else {
retval.setError(BaseMessages.getString(PKG, "CombinationLookupMeta.ReturnValue.NotFieldsSpecified"));
}
}
if (!Utils.isEmpty(technicalKeyField)) {
String[] techKeyArr = new String[] { technicalKeyField };
if (!db.checkIndexExists(schemaTable, techKeyArr)) {
String indexname = "idx_" + tablename + "_pk";
cr_uniq_index = db.getCreateIndexStatement(schemaTable, indexname, techKeyArr, true, true, false, true);
cr_uniq_index += Const.CR;
}
}
// OK, now get the create lookup index statement...
if (!Utils.isEmpty(idx_fields) && !db.checkIndexExists(schemaTable, idx_fields)) {
String indexname = "idx_" + tablename + "_lookup";
cr_index = db.getCreateIndexStatement(schemaTable, indexname, idx_fields, false, false, false, true);
cr_index += Const.CR;
}
//
// Don't forget the sequence (optional)
//
String cr_seq = "";
if (databaseMeta.supportsSequences() && !Utils.isEmpty(sequenceFrom)) {
if (!db.checkSequenceExists(schemaName, sequenceFrom)) {
cr_seq += db.getCreateSequenceStatement(schemaName, sequenceFrom, 1L, 1L, -1L, true);
cr_seq += Const.CR;
}
}
retval.setSQL(transMeta.environmentSubstitute(cr_table + cr_uniq_index + cr_index + cr_seq));
} catch (KettleException e) {
retval.setError(BaseMessages.getString(PKG, "CombinationLookupMeta.ReturnValue.ErrorOccurred") + Const.CR + e.getMessage());
}
} else {
retval.setError(BaseMessages.getString(PKG, "CombinationLookupMeta.ReturnValue.NotTableDefined"));
}
} else {
retval.setError(BaseMessages.getString(PKG, "CombinationLookupMeta.ReturnValue.NotReceivingField"));
}
} else {
retval.setError(BaseMessages.getString(PKG, "CombinationLookupMeta.ReturnValue.NotConnectionDefined"));
}
return retval;
}
Aggregations