use of org.pentaho.di.repository.StringObjectId in project pentaho-kettle by pentaho.
the class StarDomainMetaStoreUtil method loadStarDomain.
public static StarDomain loadStarDomain(DelegatingMetaStore metaStore, String id) throws MetaStoreException {
IMetaStoreElementType elementType = getStarDomainElementType(metaStore);
IMetaStoreElement element = metaStore.getElement(namespace, elementType, id);
if (element == null) {
return null;
}
StarDomain starDomain = new StarDomain();
starDomain.setObjectId(new StringObjectId(id));
starDomain.setName(element.getName());
starDomain.setDescription(getChildString(element, Attribute.ID_STAR_DOMAIN_DESCRIPTION.id));
return starDomain;
}
use of org.pentaho.di.repository.StringObjectId in project pentaho-kettle by pentaho.
the class JobExecutorMeta method loadXML.
@Override
public void loadXML(Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore) throws KettleXMLException {
try {
String method = XMLHandler.getTagValue(stepnode, "specification_method");
specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode(method);
String jobId = XMLHandler.getTagValue(stepnode, "job_object_id");
jobObjectId = Utils.isEmpty(jobId) ? null : new StringObjectId(jobId);
jobName = XMLHandler.getTagValue(stepnode, "job_name");
fileName = XMLHandler.getTagValue(stepnode, "filename");
directoryPath = XMLHandler.getTagValue(stepnode, "directory_path");
groupSize = XMLHandler.getTagValue(stepnode, "group_size");
groupField = XMLHandler.getTagValue(stepnode, "group_field");
groupTime = XMLHandler.getTagValue(stepnode, "group_time");
// Load the mapping parameters too..
//
Node mappingParametersNode = XMLHandler.getSubNode(stepnode, JobExecutorParameters.XML_TAG);
parameters = new JobExecutorParameters(mappingParametersNode);
// The output side...
//
executionResultTargetStep = XMLHandler.getTagValue(stepnode, "execution_result_target_step");
executionTimeField = XMLHandler.getTagValue(stepnode, "execution_time_field");
executionResultField = XMLHandler.getTagValue(stepnode, "execution_result_field");
executionNrErrorsField = XMLHandler.getTagValue(stepnode, "execution_errors_field");
executionLinesReadField = XMLHandler.getTagValue(stepnode, "execution_lines_read_field");
executionLinesWrittenField = XMLHandler.getTagValue(stepnode, "execution_lines_written_field");
executionLinesInputField = XMLHandler.getTagValue(stepnode, "execution_lines_input_field");
executionLinesOutputField = XMLHandler.getTagValue(stepnode, "execution_lines_output_field");
executionLinesRejectedField = XMLHandler.getTagValue(stepnode, "execution_lines_rejected_field");
executionLinesUpdatedField = XMLHandler.getTagValue(stepnode, "execution_lines_updated_field");
executionLinesDeletedField = XMLHandler.getTagValue(stepnode, "execution_lines_deleted_field");
executionFilesRetrievedField = XMLHandler.getTagValue(stepnode, "execution_files_retrieved_field");
executionExitStatusField = XMLHandler.getTagValue(stepnode, "execution_exit_status_field");
executionLogTextField = XMLHandler.getTagValue(stepnode, "execution_log_text_field");
executionLogChannelIdField = XMLHandler.getTagValue(stepnode, "execution_log_channelid_field");
resultRowsTargetStep = XMLHandler.getTagValue(stepnode, "result_rows_target_step");
int nrFields = XMLHandler.countNodes(stepnode, "result_rows_field");
resultRowsField = new String[nrFields];
resultRowsType = new int[nrFields];
resultRowsLength = new int[nrFields];
resultRowsPrecision = new int[nrFields];
for (int i = 0; i < nrFields; i++) {
Node fieldNode = XMLHandler.getSubNodeByNr(stepnode, "result_rows_field", i);
resultRowsField[i] = XMLHandler.getTagValue(fieldNode, "name");
resultRowsType[i] = ValueMetaFactory.getIdForValueMeta(XMLHandler.getTagValue(fieldNode, "type"));
resultRowsLength[i] = Const.toInt(XMLHandler.getTagValue(fieldNode, "length"), -1);
resultRowsPrecision[i] = Const.toInt(XMLHandler.getTagValue(fieldNode, "precision"), -1);
}
resultFilesTargetStep = XMLHandler.getTagValue(stepnode, "result_files_target_step");
resultFilesFileNameField = XMLHandler.getTagValue(stepnode, "result_files_file_name_field");
} catch (Exception e) {
throw new KettleXMLException(BaseMessages.getString(PKG, "JobExecutorMeta.Exception.ErrorLoadingJobExecutorDetailsFromXML"), e);
}
}
use of org.pentaho.di.repository.StringObjectId in project pentaho-kettle by pentaho.
the class MappingMeta method loadXML.
public void loadXML(Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore) throws KettleXMLException {
try {
String method = XMLHandler.getTagValue(stepnode, "specification_method");
specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode(method);
String transId = XMLHandler.getTagValue(stepnode, "trans_object_id");
transObjectId = Utils.isEmpty(transId) ? null : new StringObjectId(transId);
transName = XMLHandler.getTagValue(stepnode, "trans_name");
fileName = XMLHandler.getTagValue(stepnode, "filename");
directoryPath = XMLHandler.getTagValue(stepnode, "directory_path");
// Backward compatibility check for object specification
//
checkObjectLocationSpecificationMethod();
Node mappingsNode = XMLHandler.getSubNode(stepnode, "mappings");
inputMappings.clear();
outputMappings.clear();
if (mappingsNode != null) {
// Read all the input mapping definitions...
//
Node inputNode = XMLHandler.getSubNode(mappingsNode, "input");
int nrInputMappings = XMLHandler.countNodes(inputNode, MappingIODefinition.XML_TAG);
for (int i = 0; i < nrInputMappings; i++) {
Node mappingNode = XMLHandler.getSubNodeByNr(inputNode, MappingIODefinition.XML_TAG, i);
MappingIODefinition inputMappingDefinition = new MappingIODefinition(mappingNode);
inputMappings.add(inputMappingDefinition);
}
Node outputNode = XMLHandler.getSubNode(mappingsNode, "output");
int nrOutputMappings = XMLHandler.countNodes(outputNode, MappingIODefinition.XML_TAG);
for (int i = 0; i < nrOutputMappings; i++) {
Node mappingNode = XMLHandler.getSubNodeByNr(outputNode, MappingIODefinition.XML_TAG, i);
MappingIODefinition outputMappingDefinition = new MappingIODefinition(mappingNode);
outputMappings.add(outputMappingDefinition);
}
// Load the mapping parameters too..
//
Node mappingParametersNode = XMLHandler.getSubNode(mappingsNode, MappingParameters.XML_TAG);
mappingParameters = new MappingParameters(mappingParametersNode);
} else {
// backward compatibility...
//
Node inputNode = XMLHandler.getSubNode(stepnode, "input");
Node outputNode = XMLHandler.getSubNode(stepnode, "output");
int nrInput = XMLHandler.countNodes(inputNode, "connector");
int nrOutput = XMLHandler.countNodes(outputNode, "connector");
// null means: auto-detect
//
MappingIODefinition inputMappingDefinition = new MappingIODefinition();
inputMappingDefinition.setMainDataPath(true);
for (int i = 0; i < nrInput; i++) {
Node inputConnector = XMLHandler.getSubNodeByNr(inputNode, "connector", i);
String inputField = XMLHandler.getTagValue(inputConnector, "field");
String inputMapping = XMLHandler.getTagValue(inputConnector, "mapping");
inputMappingDefinition.getValueRenames().add(new MappingValueRename(inputField, inputMapping));
}
// null means: auto-detect
//
MappingIODefinition outputMappingDefinition = new MappingIODefinition();
outputMappingDefinition.setMainDataPath(true);
for (int i = 0; i < nrOutput; i++) {
Node outputConnector = XMLHandler.getSubNodeByNr(outputNode, "connector", i);
String outputField = XMLHandler.getTagValue(outputConnector, "field");
String outputMapping = XMLHandler.getTagValue(outputConnector, "mapping");
outputMappingDefinition.getValueRenames().add(new MappingValueRename(outputMapping, outputField));
}
// Don't forget to add these to the input and output mapping
// definitions...
//
inputMappings.add(inputMappingDefinition);
outputMappings.add(outputMappingDefinition);
// The default is to have no mapping parameters: the concept didn't
// exist before.
//
mappingParameters = new MappingParameters();
}
String multiInput = XMLHandler.getTagValue(stepnode, "allow_multiple_input");
allowingMultipleInputs = Utils.isEmpty(multiInput) ? inputMappings.size() > 1 : "Y".equalsIgnoreCase(multiInput);
String multiOutput = XMLHandler.getTagValue(stepnode, "allow_multiple_output");
allowingMultipleOutputs = Utils.isEmpty(multiOutput) ? outputMappings.size() > 1 : "Y".equalsIgnoreCase(multiOutput);
} catch (Exception e) {
throw new KettleXMLException(BaseMessages.getString(PKG, "MappingMeta.Exception.ErrorLoadingTransformationStepFromXML"), e);
}
}
use of org.pentaho.di.repository.StringObjectId in project pentaho-kettle by pentaho.
the class ClusterDelegate method dataNodeToElement.
public void dataNodeToElement(DataNode rootNode, RepositoryElementInterface element) throws KettleException {
ClusterSchema clusterSchema = (ClusterSchema) element;
// The metadata...
clusterSchema.setBasePort(getString(rootNode, PROP_BASE_PORT));
clusterSchema.setSocketsBufferSize(getString(rootNode, PROP_SOCKETS_BUFFER_SIZE));
clusterSchema.setSocketsFlushInterval(getString(rootNode, PROP_SOCKETS_FLUSH_INTERVAL));
clusterSchema.setSocketsCompressed(rootNode.getProperty(PROP_SOCKETS_COMPRESSED).getBoolean());
clusterSchema.setDynamic(rootNode.getProperty(PROP_DYNAMIC).getBoolean());
DataNode attrNode = rootNode.getNode(NODE_ATTRIBUTES);
// The slaves...
long nrSlaves = attrNode.getProperty(PROP_NB_SLAVE_SERVERS).getLong();
for (int i = 0; i < nrSlaves; i++) {
if (attrNode.hasProperty(String.valueOf(i))) {
DataNodeRef slaveNodeRef = attrNode.getProperty(String.valueOf(i)).getRef();
clusterSchema.getSlaveServers().add(findSlaveServer(new StringObjectId(slaveNodeRef.toString())));
}
}
}
use of org.pentaho.di.repository.StringObjectId in project pentaho-kettle by pentaho.
the class DatabaseDelegate method assemble.
public DatabaseMeta assemble(RepositoryFile file, NodeRepositoryFileData data, VersionSummary version) throws KettleException {
DatabaseMeta databaseMeta = (DatabaseMeta) dataNodeToElement(data.getNode());
String fileName = file.getName();
if (fileName.endsWith(".kdb")) {
fileName = fileName.substring(0, fileName.length() - 4);
}
databaseMeta.setName(fileName);
databaseMeta.setDisplayName(file.getTitle());
databaseMeta.setObjectId(new StringObjectId(file.getId().toString()));
databaseMeta.setObjectRevision(repo.createObjectRevision(version));
databaseMeta.clearChanged();
return databaseMeta;
}
Aggregations