use of org.pentaho.di.core.row.RowMeta in project pentaho-kettle by pentaho.
the class MondrianHelper method createFlattenedOutput.
/**
* Retrieve the rows from the opened query. Also create a description of the flattened output of the query. This call
* populated rowMetaInterface and rows The query needs to be opened beforehand.
*
* @throws KettleDatabaseException
* in case something goes wrong
*
* TODO: this is not quite working for our purposes.
*/
public void createFlattenedOutput() throws KettleDatabaseException {
final Axis[] axes = result.getAxes();
rows = new ArrayList<>();
headings = new ArrayList<>();
//
for (Axis axis : axes) {
final List<Position> positions = axis.getPositions();
if (positions.isEmpty()) {
// even deduce column headings.
return;
}
for (Member member : positions.get(0)) {
Hierarchy hierarchy = member.getHierarchy();
headings.add(hierarchy.getUniqueName());
}
}
int[] coords = new int[axes.length];
outputFlattenedRecurse(result, rows, new ArrayList<>(), coords, 0);
outputRowMeta = new RowMeta();
//
for (int i = 0; i < rows.size() && i < 1; i++) {
List<Object> rowValues = rows.get(i);
for (int c = 0; c < rowValues.size(); c++) {
Object valueData = rowValues.get(c);
int valueType;
if (valueData instanceof String) {
valueType = ValueMetaInterface.TYPE_STRING;
} else if (valueData instanceof Date) {
valueType = ValueMetaInterface.TYPE_DATE;
} else if (valueData instanceof Boolean) {
valueType = ValueMetaInterface.TYPE_BOOLEAN;
} else if (valueData instanceof Integer) {
valueType = ValueMetaInterface.TYPE_INTEGER;
valueData = Long.valueOf(((Integer) valueData).longValue());
} else if (valueData instanceof Short) {
valueType = ValueMetaInterface.TYPE_INTEGER;
valueData = Long.valueOf(((Short) valueData).longValue());
} else if (valueData instanceof Byte) {
valueType = ValueMetaInterface.TYPE_INTEGER;
valueData = Long.valueOf(((Byte) valueData).longValue());
} else if (valueData instanceof Long) {
valueType = ValueMetaInterface.TYPE_INTEGER;
} else if (valueData instanceof Double) {
valueType = ValueMetaInterface.TYPE_NUMBER;
} else if (valueData instanceof Float) {
valueType = ValueMetaInterface.TYPE_NUMBER;
valueData = Double.valueOf(((Float) valueData).doubleValue());
} else if (valueData instanceof BigDecimal) {
valueType = ValueMetaInterface.TYPE_BIGNUMBER;
} else {
throw new KettleDatabaseException(BaseMessages.getString(PKG, "MondrianInputErrorUnhandledType", valueData.getClass().toString()));
}
try {
ValueMetaInterface valueMeta = ValueMetaFactory.createValueMeta(headings.get(c), valueType);
outputRowMeta.addValueMeta(valueMeta);
rowValues.set(i, valueData);
} catch (Exception e) {
throw new KettleDatabaseException(e);
}
}
}
// Now that we painstakingly found the metadata that comes out of the Mondrian database, cache it please...
//
DBCacheEntry cacheEntry = new DBCacheEntry(databaseMeta.getName(), queryString);
DBCache.getInstance().put(cacheEntry, outputRowMeta);
}
use of org.pentaho.di.core.row.RowMeta in project pentaho-kettle by pentaho.
the class MondrianHelper method createRectangularOutput.
/**
* Outputs one row per tuple on the rows axis.
*
* @throws KettleDatabaseException
* in case some or other error occurs
*/
public void createRectangularOutput() throws KettleDatabaseException {
final Axis[] axes = result.getAxes();
if (axes.length != 2) {
throw new KettleDatabaseException(BaseMessages.getString(PKG, "MondrianInputErrorOnlyTabular"));
}
headings = new ArrayList<>();
rows = new ArrayList<>();
final Axis rowsAxis = axes[1];
final Axis columnsAxis = axes[0];
int rowOrdinal = -1;
int[] coords = { 0, 0 };
for (Position rowPos : rowsAxis.getPositions()) {
++rowOrdinal;
coords[1] = rowOrdinal;
if (rowOrdinal == 0) {
// First headings are for the members on the rows axis.
for (Member rowMember : rowPos) {
headings.add(rowMember.getHierarchy().getUniqueName());
}
// concatenate the unique names.
for (Position columnPos : columnsAxis.getPositions()) {
String heading = "";
for (Member columnMember : columnPos) {
if (!heading.equals("")) {
heading += ", ";
}
heading += columnMember.getUniqueName();
}
headings.add(heading);
}
}
List<Object> rowValues = new ArrayList<>();
// The first row values describe the members on the rows axis.
for (Member rowMember : rowPos) {
rowValues.add(rowMember.getUniqueName());
}
// NOTE: Could also output all properties of each cell.
for (int columnOrdinal = 0; columnOrdinal < columnsAxis.getPositions().size(); ++columnOrdinal) {
coords[0] = columnOrdinal;
final Cell cell = result.getCell(coords);
rowValues.add(cell.getValue());
}
rows.add(rowValues);
}
outputRowMeta = new RowMeta();
// column, keep scanning until we find one line that has an actual value
if (rows.size() > 0) {
int columnCount = rows.get(0).size();
HashMap<Integer, ValueMetaInterface> valueMetaHash = new HashMap<>();
for (int i = 0; i < rows.size(); i++) {
List<Object> rowValues = rows.get(i);
for (int c = 0; c < rowValues.size(); c++) {
if (valueMetaHash.containsKey(new Integer(c))) {
// we have this value already
continue;
}
Object valueData = rowValues.get(c);
if (valueData == null) {
// skip this row and look for the metadata in a new one
continue;
}
String valueName = headings.get(c);
ValueMetaInterface valueMeta;
if (valueData instanceof String) {
valueMeta = new ValueMetaString(valueName);
} else if (valueData instanceof Date) {
valueMeta = new ValueMetaDate(valueName);
} else if (valueData instanceof Boolean) {
valueMeta = new ValueMetaBoolean(valueName);
} else if (valueData instanceof Integer) {
valueMeta = new ValueMetaInteger(valueName);
valueData = Long.valueOf(((Integer) valueData).longValue());
} else if (valueData instanceof Short) {
valueMeta = new ValueMetaInteger(valueName);
valueData = Long.valueOf(((Short) valueData).longValue());
} else if (valueData instanceof Byte) {
valueMeta = new ValueMetaInteger(valueName);
valueData = Long.valueOf(((Byte) valueData).longValue());
} else if (valueData instanceof Long) {
valueMeta = new ValueMetaInteger(valueName);
} else if (valueData instanceof Double) {
valueMeta = new ValueMetaNumber(valueName);
} else if (valueData instanceof Float) {
valueMeta = new ValueMetaNumber(valueName);
valueData = Double.valueOf(((Float) valueData).doubleValue());
} else if (valueData instanceof BigDecimal) {
valueMeta = new ValueMetaBigNumber(valueName);
} else {
throw new KettleDatabaseException(BaseMessages.getString(PKG, "MondrianInputErrorUnhandledType", valueData.getClass().toString()));
}
valueMetaHash.put(c, valueMeta);
}
if (valueMetaHash.size() == columnCount) {
// we're done
break;
}
}
// Build the list of valueMetas
List<ValueMetaInterface> valueMetaList = new ArrayList<>();
for (int c = 0; c < columnCount; c++) {
if (valueMetaHash.containsKey(new Integer(c))) {
valueMetaList.add(valueMetaHash.get(new Integer(c)));
} else {
// If the entire column is null, assume the missing data as String.
// Irrelevant, anyway
ValueMetaInterface valueMeta = new ValueMetaString(headings.get(c));
valueMetaList.add(valueMeta);
}
}
outputRowMeta.setValueMetaList(valueMetaList);
}
// Now that we painstakingly found the meta data that comes out of the
// Mondrian database, cache it please...
//
DBCacheEntry cacheEntry = new DBCacheEntry(databaseMeta.getName(), queryString);
DBCache.getInstance().put(cacheEntry, outputRowMeta);
}
use of org.pentaho.di.core.row.RowMeta in project pentaho-kettle by pentaho.
the class MultiMergeJoin method processFirstRow.
private boolean processFirstRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException {
meta = (MultiMergeJoinMeta) smi;
data = (MultiMergeJoinData) sdi;
TransMeta transMeta = getTransMeta();
TransHopMeta transHopMeta;
StepIOMetaInterface stepIOMeta = meta.getStepIOMeta();
List<StreamInterface> infoStreams = stepIOMeta.getInfoStreams();
StreamInterface stream;
StepMeta toStepMeta = meta.getParentStepMeta();
StepMeta fromStepMeta;
ArrayList<String> inputStepNameList = new ArrayList<String>();
String[] inputStepNames = meta.getInputSteps();
String inputStepName;
for (int i = 0; i < infoStreams.size(); i++) {
inputStepName = inputStepNames[i];
stream = infoStreams.get(i);
fromStepMeta = stream.getStepMeta();
if (fromStepMeta == null) {
// should not arrive here, shoud typically have been caught by init.
throw new KettleException(BaseMessages.getString(PKG, "MultiMergeJoin.Log.UnableToFindReferenceStream", inputStepName));
}
// check the hop
transHopMeta = transMeta.findTransHop(fromStepMeta, toStepMeta, true);
// there is no hop: this is unexpected.
if (transHopMeta == null) {
// should not arrive here, shoud typically have been caught by init.
throw new KettleException(BaseMessages.getString(PKG, "MultiMergeJoin.Log.UnableToFindReferenceStream", inputStepName));
} else if (transHopMeta.isEnabled()) {
inputStepNameList.add(inputStepName);
} else {
logDetailed(BaseMessages.getString(PKG, "MultiMergeJoin.Log.IgnoringStep", inputStepName));
}
}
int streamSize = inputStepNameList.size();
if (streamSize == 0) {
return false;
}
String keyField;
String[] keyFields;
data.rowSets = new RowSet[streamSize];
RowSet rowSet;
Object[] row;
data.rows = new Object[streamSize][];
data.metas = new RowMetaInterface[streamSize];
data.rowLengths = new int[streamSize];
MultiMergeJoinData.QueueComparator comparator = new MultiMergeJoinData.QueueComparator(data);
data.queue = new PriorityQueue<MultiMergeJoinData.QueueEntry>(streamSize, comparator);
data.results = new ArrayList<List<Object[]>>(streamSize);
MultiMergeJoinData.QueueEntry queueEntry;
data.queueEntries = new MultiMergeJoinData.QueueEntry[streamSize];
data.drainIndices = new int[streamSize];
data.keyNrs = new int[streamSize][];
data.dummy = new Object[streamSize][];
RowMetaInterface rowMeta;
data.outputRowMeta = new RowMeta();
for (int i = 0, j = 0; i < inputStepNames.length; i++) {
inputStepName = inputStepNames[i];
if (!inputStepNameList.contains(inputStepName)) {
// ignore step with disabled hop.
continue;
}
queueEntry = new MultiMergeJoinData.QueueEntry();
queueEntry.index = j;
data.queueEntries[j] = queueEntry;
data.results.add(new ArrayList<Object[]>());
rowSet = findInputRowSet(inputStepName);
if (rowSet == null) {
throw new KettleException(BaseMessages.getString(PKG, "MultiMergeJoin.Exception.UnableToFindSpecifiedStep", inputStepName));
}
data.rowSets[j] = rowSet;
row = getRowFrom(rowSet);
data.rows[j] = row;
if (row == null) {
rowMeta = getTransMeta().getStepFields(inputStepName);
data.metas[j] = rowMeta;
} else {
queueEntry.row = row;
rowMeta = rowSet.getRowMeta();
keyField = meta.getKeyFields()[i];
String[] keyFieldParts = keyField.split(",");
String keyFieldPart;
data.keyNrs[j] = new int[keyFieldParts.length];
for (int k = 0; k < keyFieldParts.length; k++) {
keyFieldPart = keyFieldParts[k];
data.keyNrs[j][k] = rowMeta.indexOfValue(keyFieldPart);
if (data.keyNrs[j][k] < 0) {
String message = BaseMessages.getString(PKG, "MultiMergeJoin.Exception.UnableToFindFieldInReferenceStream", keyFieldPart, inputStepName);
logError(message);
throw new KettleStepException(message);
}
}
data.metas[j] = rowMeta;
data.queue.add(data.queueEntries[j]);
}
data.outputRowMeta.mergeRowMeta(rowMeta.clone());
data.rowLengths[j] = rowMeta.size();
data.dummy[j] = RowDataUtil.allocateRowData(rowMeta.size());
j++;
}
return true;
}
use of org.pentaho.di.core.row.RowMeta in project pentaho-kettle by pentaho.
the class LDAPConnection method getFields.
public RowMeta getFields(String searchBase) throws KettleException {
RowMeta fields = new RowMeta();
List<String> fieldsl = new ArrayList<String>();
try {
search(searchBase, null, 0, null, SEARCH_SCOPE_SUBTREE_SCOPE);
Attributes attributes = null;
fieldsl = new ArrayList<String>();
while ((attributes = getAttributes()) != null) {
NamingEnumeration<? extends Attribute> ne = attributes.getAll();
while (ne.hasMore()) {
Attribute attr = ne.next();
String fieldName = attr.getID();
if (!fieldsl.contains(fieldName)) {
fieldsl.add(fieldName);
String attributeValue = attr.get().toString();
int valueType;
//
if (IsDate(attributeValue)) {
valueType = ValueMetaInterface.TYPE_DATE;
} else if (IsInteger(attributeValue)) {
valueType = ValueMetaInterface.TYPE_INTEGER;
} else if (IsNumber(attributeValue)) {
valueType = ValueMetaInterface.TYPE_NUMBER;
} else {
valueType = ValueMetaInterface.TYPE_STRING;
}
ValueMetaInterface value = ValueMetaFactory.createValueMeta(fieldName, valueType);
fields.addValueMeta(value);
}
}
}
return fields;
} catch (Exception e) {
throw new KettleException(BaseMessages.getString(PKG, "LDAPConnection.Error.RetrievingFields"));
} finally {
fieldsl = null;
}
}
use of org.pentaho.di.core.row.RowMeta in project pentaho-kettle by pentaho.
the class LDAPInput method processRow.
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException {
if (!data.dynamic) {
if (first) {
first = false;
// Create the output row meta-data
data.outputRowMeta = new RowMeta();
// get the
meta.getFields(data.outputRowMeta, getStepname(), null, null, this, repository, metaStore);
// metadata
// populated
// Create convert meta-data objects that will contain Date & Number formatters
//
data.convertRowMeta = data.outputRowMeta.cloneToType(ValueMetaInterface.TYPE_STRING);
// Search records once
search(data.staticSearchBase, data.staticFilter);
}
}
Object[] outputRowData = null;
try {
outputRowData = getOneRow();
if (outputRowData == null) {
setOutputDone();
return false;
}
// copy row to output rowset(s);
putRow(data.outputRowMeta, outputRowData);
if (log.isRowLevel()) {
logRowlevel(BaseMessages.getString(PKG, "LDAPInput.log.ReadRow"), data.outputRowMeta.getString(outputRowData));
}
if (checkFeedback(getLinesInput())) {
if (log.isDetailed()) {
logDetailed(BaseMessages.getString(PKG, "LDAPInput.log.LineRow") + getLinesInput());
}
}
return true;
} catch (Exception e) {
boolean sendToErrorRow = false;
String errorMessage = null;
if (getStepMeta().isDoingErrorHandling()) {
sendToErrorRow = true;
errorMessage = e.toString();
} else {
logError(BaseMessages.getString(PKG, "LDAPInput.log.Exception", e.getMessage()));
setErrors(1);
logError(Const.getStackTracker(e));
stopAll();
// signal end to receiver(s)
setOutputDone();
return false;
}
if (sendToErrorRow) {
// Simply add this row to the error row
putError(getInputRowMeta(), outputRowData, 1, errorMessage, null, "LDAPINPUT001");
}
}
return true;
}
Aggregations