use of org.pentaho.di.core.exception.KettleXMLException in project pentaho-kettle by pentaho.
the class PaloDimInputMeta method readData.
private void readData(final Node stepnode, final List<? extends SharedObjectInterface> databases) throws KettleXMLException {
try {
this.databaseMeta = DatabaseMeta.findDatabase(databases, XMLHandler.getTagValue(stepnode, "connection"));
this.dimension = XMLHandler.getTagValue(stepnode, "dimension");
baseElementsOnly = (XMLHandler.getTagValue(stepnode, "baseElementsOnly") == null ? false : XMLHandler.getTagValue(stepnode, "baseElementsOnly").equals("Y") ? true : false);
this.levels = new ArrayList<PaloDimensionLevel>();
Node levels = XMLHandler.getSubNode(stepnode, "levels");
int nrLevels = XMLHandler.countNodes(levels, "level");
for (int i = 0; i < nrLevels; i++) {
Node fnode = XMLHandler.getSubNodeByNr(levels, "level", i);
String levelName = XMLHandler.getTagValue(fnode, "levelname");
String levelNumber = XMLHandler.getTagValue(fnode, "levelnumber");
String fieldName = XMLHandler.getTagValue(fnode, "fieldname");
String fieldType = XMLHandler.getTagValue(fnode, "fieldtype");
this.levels.add(new PaloDimensionLevel(levelName, Integer.parseInt(levelNumber), fieldName, fieldType));
}
} catch (Exception e) {
throw new KettleXMLException("Unable to load step info from XML", e);
}
}
use of org.pentaho.di.core.exception.KettleXMLException in project pentaho-kettle by pentaho.
the class PaloDimOutputMeta method readData.
private void readData(final Node stepnode, final List<? extends SharedObjectInterface> databases) throws KettleXMLException {
try {
databaseMeta = DatabaseMeta.findDatabase(databases, XMLHandler.getTagValue(stepnode, "connection"));
dimension = XMLHandler.getTagValue(stepnode, "dimension");
elementType = XMLHandler.getTagValue(stepnode, "elementtype");
createNewDimension = XMLHandler.getTagValue(stepnode, "createdimension").equals("Y") ? true : false;
clearDimension = XMLHandler.getTagValue(stepnode, "cleardimension").equals("Y") ? true : false;
clearConsolidations = (XMLHandler.getTagValue(stepnode, "clearconsolidations") == null ? false : XMLHandler.getTagValue(stepnode, "clearconsolidations").equals("Y") ? true : false);
recreateDimension = (XMLHandler.getTagValue(stepnode, "recreatedimension") == null ? false : XMLHandler.getTagValue(stepnode, "recreatedimension").equals("Y") ? true : false);
enableElementCache = (XMLHandler.getTagValue(stepnode, "enableElementCache") == null ? false : XMLHandler.getTagValue(stepnode, "enableElementCache").equals("Y") ? true : false);
preloadElementCache = (XMLHandler.getTagValue(stepnode, "preloadElementCache") == null ? false : XMLHandler.getTagValue(stepnode, "preloadElementCache").equals("Y") ? true : false);
Node levels = XMLHandler.getSubNode(stepnode, "levels");
int nrLevels = XMLHandler.countNodes(levels, "level");
for (int i = 0; i < nrLevels; i++) {
Node fnode = XMLHandler.getSubNodeByNr(levels, "level", i);
String levelName = XMLHandler.getTagValue(fnode, "levelname");
String levelNumber = XMLHandler.getTagValue(fnode, "levelnumber");
String fieldName = XMLHandler.getTagValue(fnode, "fieldname");
String fieldType = XMLHandler.getTagValue(fnode, "fieldtype");
String consolidationField = XMLHandler.getTagValue(fnode, "consolidationfieldname");
this.levels.add(new PaloDimensionLevel(levelName, Integer.parseInt(levelNumber), fieldName, fieldType, consolidationField));
}
} catch (Exception e) {
throw new KettleXMLException("Unable to load step info from XML", e);
}
}
use of org.pentaho.di.core.exception.KettleXMLException in project pentaho-kettle by pentaho.
the class LucidDBBulkLoaderMeta method readData.
private void readData(Node stepnode, List<? extends SharedObjectInterface> databases) throws KettleXMLException {
try {
String con = XMLHandler.getTagValue(stepnode, "connection");
databaseMeta = DatabaseMeta.findDatabase(databases, con);
String serror = XMLHandler.getTagValue(stepnode, "errors");
// default to 0.
maxErrors = Const.toInt(serror, 0);
bufferSize = XMLHandler.getTagValue(stepnode, "buffer_size");
schemaName = XMLHandler.getTagValue(stepnode, "schema");
tableName = XMLHandler.getTagValue(stepnode, "table");
fifoDirectory = XMLHandler.getTagValue(stepnode, "fifo_directory");
fifoServerName = XMLHandler.getTagValue(stepnode, "fifo_server_name");
encoding = XMLHandler.getTagValue(stepnode, "encoding");
int nrvalues = XMLHandler.countNodes(stepnode, "mapping");
allocate(nrvalues);
for (int i = 0; i < nrvalues; i++) {
Node vnode = XMLHandler.getSubNodeByNr(stepnode, "mapping", i);
fieldTable[i] = XMLHandler.getTagValue(vnode, "stream_name");
fieldStream[i] = XMLHandler.getTagValue(vnode, "field_name");
if (fieldStream[i] == null) {
// default: the same name!
fieldStream[i] = fieldTable[i];
}
fieldFormatOk[i] = "Y".equalsIgnoreCase(XMLHandler.getTagValue(vnode, "field_format_ok"));
}
} catch (Exception e) {
throw new KettleXMLException(BaseMessages.getString(PKG, "LucidDBBulkLoaderMeta.Exception.UnableToReadStepInfoFromXML"), e);
}
}
use of org.pentaho.di.core.exception.KettleXMLException in project pentaho-kettle by pentaho.
the class LucidDBStreamingLoaderMeta method readData.
private void readData(Node stepnode, List<? extends SharedObjectInterface> databases) throws KettleXMLException {
try {
String con = XMLHandler.getTagValue(stepnode, "connection");
databaseMeta = DatabaseMeta.findDatabase(databases, con);
schemaName = XMLHandler.getTagValue(stepnode, "schema");
tableName = XMLHandler.getTagValue(stepnode, "table");
host = XMLHandler.getTagValue(stepnode, "host");
port = XMLHandler.getTagValue(stepnode, "port");
operation = XMLHandler.getTagValue(stepnode, "operation");
custom_sql = XMLHandler.getTagValue(stepnode, "custom_sql");
int nrKeyMapping = XMLHandler.countNodes(stepnode, "keys_mapping");
int nrFieldMapping = XMLHandler.countNodes(stepnode, "fields_mapping");
int nrTabIsEnable = XMLHandler.countNodes(stepnode, "tab_is_enable_mapping");
allocate(nrKeyMapping, nrFieldMapping, nrTabIsEnable);
for (int i = 0; i < nrKeyMapping; i++) {
Node vnode = XMLHandler.getSubNodeByNr(stepnode, "keys_mapping", i);
fieldTableForKeys[i] = XMLHandler.getTagValue(vnode, "key_field_name");
fieldStreamForKeys[i] = XMLHandler.getTagValue(vnode, "key_stream_name");
if (fieldStreamForKeys[i] == null) {
// default:
fieldStreamForKeys[i] = fieldTableForKeys[i];
// the same
// name!
}
}
for (int i = 0; i < nrFieldMapping; i++) {
Node vnode = XMLHandler.getSubNodeByNr(stepnode, "fields_mapping", i);
fieldTableForFields[i] = XMLHandler.getTagValue(vnode, "field_field_name");
fieldStreamForFields[i] = XMLHandler.getTagValue(vnode, "field_stream_name");
if (fieldStreamForFields[i] == null) {
// default:
fieldStreamForFields[i] = fieldTableForFields[i];
}
// the
// same
// name!
insOrUptFlag[i] = "Y".equalsIgnoreCase(XMLHandler.getTagValue(vnode, "insert_or_update_flag"));
}
for (int i = 0; i < nrTabIsEnable; i++) {
Node vnode = XMLHandler.getSubNodeByNr(stepnode, "tab_is_enable_mapping", i);
tabIsEnable[i] = "Y".equalsIgnoreCase(XMLHandler.getTagValue(vnode, "tab_is_enable"));
}
} catch (Exception e) {
throw new KettleXMLException(BaseMessages.getString(PKG, "LucidDBStreamingLoaderMeta.Exception.UnableToReadStepInfoFromXML"), e);
}
}
use of org.pentaho.di.core.exception.KettleXMLException in project pentaho-kettle by pentaho.
the class MetaInjectMeta method loadXML.
@Override
public void loadXML(Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore) throws KettleXMLException {
try {
String method = XMLHandler.getTagValue(stepnode, SPECIFICATION_METHOD);
specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode(method);
String transId = XMLHandler.getTagValue(stepnode, TRANS_OBJECT_ID);
transObjectId = Utils.isEmpty(transId) ? null : new StringObjectId(transId);
transName = XMLHandler.getTagValue(stepnode, TRANS_NAME);
fileName = XMLHandler.getTagValue(stepnode, FILENAME);
directoryPath = XMLHandler.getTagValue(stepnode, DIRECTORY_PATH);
sourceStepName = XMLHandler.getTagValue(stepnode, SOURCE_STEP);
Node outputFieldsNode = XMLHandler.getSubNode(stepnode, SOURCE_OUTPUT_FIELDS);
List<Node> outputFieldNodes = XMLHandler.getNodes(outputFieldsNode, SOURCE_OUTPUT_FIELD);
sourceOutputFields = new ArrayList<MetaInjectOutputField>();
for (Node outputFieldNode : outputFieldNodes) {
String name = XMLHandler.getTagValue(outputFieldNode, SOURCE_OUTPUT_FIELD_NAME);
String typeName = XMLHandler.getTagValue(outputFieldNode, SOURCE_OUTPUT_FIELD_TYPE);
int length = Const.toInt(XMLHandler.getTagValue(outputFieldNode, SOURCE_OUTPUT_FIELD_LENGTH), -1);
int precision = Const.toInt(XMLHandler.getTagValue(outputFieldNode, SOURCE_OUTPUT_FIELD_PRECISION), -1);
int type = ValueMetaFactory.getIdForValueMeta(typeName);
sourceOutputFields.add(new MetaInjectOutputField(name, type, length, precision));
}
targetFile = XMLHandler.getTagValue(stepnode, TARGET_FILE);
noExecution = "Y".equalsIgnoreCase(XMLHandler.getTagValue(stepnode, NO_EXECUTION));
streamSourceStepname = XMLHandler.getTagValue(stepnode, STREAM_SOURCE_STEP);
streamTargetStepname = XMLHandler.getTagValue(stepnode, STREAM_TARGET_STEP);
Node mappingsNode = XMLHandler.getSubNode(stepnode, MAPPINGS);
int nrMappings = XMLHandler.countNodes(mappingsNode, MAPPING);
for (int i = 0; i < nrMappings; i++) {
Node mappingNode = XMLHandler.getSubNodeByNr(mappingsNode, MAPPING, i);
String targetStepname = XMLHandler.getTagValue(mappingNode, TARGET_STEP_NAME);
String targetAttributeKey = XMLHandler.getTagValue(mappingNode, TARGET_ATTRIBUTE_KEY);
boolean targetDetail = "Y".equalsIgnoreCase(XMLHandler.getTagValue(mappingNode, TARGET_DETAIL));
String sourceStepname = XMLHandler.getTagValue(mappingNode, SOURCE_STEP);
String sourceField = XMLHandler.getTagValue(mappingNode, SOURCE_FIELD);
TargetStepAttribute target = new TargetStepAttribute(targetStepname, targetAttributeKey, targetDetail);
SourceStepField source = new SourceStepField(sourceStepname, sourceField);
targetSourceMapping.put(target, source);
}
MetaInjectMigration.migrateFrom70(targetSourceMapping);
} catch (Exception e) {
throw new KettleXMLException("Unable to load step info from XML", e);
}
}
Aggregations