use of org.pentaho.di.repository.StringObjectId in project pentaho-kettle by pentaho.
the class JobExecutorMeta method readRep.
@Override
public void readRep(Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases) throws KettleException {
String method = rep.getStepAttributeString(id_step, "specification_method");
specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode(method);
String jobId = rep.getStepAttributeString(id_step, "job_object_id");
jobObjectId = Utils.isEmpty(jobId) ? null : new StringObjectId(jobId);
jobName = rep.getStepAttributeString(id_step, "job_name");
fileName = rep.getStepAttributeString(id_step, "filename");
directoryPath = rep.getStepAttributeString(id_step, "directory_path");
groupSize = rep.getStepAttributeString(id_step, "group_size");
groupField = rep.getStepAttributeString(id_step, "group_field");
groupTime = rep.getStepAttributeString(id_step, "group_time");
parameters = new JobExecutorParameters(rep, id_step);
executionResultTargetStep = rep.getStepAttributeString(id_step, "execution_result_target_step");
executionResultField = rep.getStepAttributeString(id_step, "execution_result_field");
executionTimeField = rep.getStepAttributeString(id_step, "execution_time_field");
executionNrErrorsField = rep.getStepAttributeString(id_step, "execution_errors_field");
executionLinesReadField = rep.getStepAttributeString(id_step, "execution_lines_read_field");
executionLinesWrittenField = rep.getStepAttributeString(id_step, "execution_lines_written_field");
executionLinesInputField = rep.getStepAttributeString(id_step, "execution_lines_input_field");
executionLinesOutputField = rep.getStepAttributeString(id_step, "execution_lines_output_field");
executionLinesRejectedField = rep.getStepAttributeString(id_step, "execution_lines_rejected_field");
executionLinesUpdatedField = rep.getStepAttributeString(id_step, "execution_lines_updated_field");
executionLinesDeletedField = rep.getStepAttributeString(id_step, "execution_lines_deleted_field");
executionFilesRetrievedField = rep.getStepAttributeString(id_step, "execution_files_retrieved_field");
executionExitStatusField = rep.getStepAttributeString(id_step, "execution_exit_status_field");
executionLogTextField = rep.getStepAttributeString(id_step, "execution_log_text_field");
executionLogChannelIdField = rep.getStepAttributeString(id_step, "execution_log_channelid_field");
resultRowsTargetStep = rep.getStepAttributeString(id_step, "result_rows_target_step");
int nrFields = rep.countNrStepAttributes(id_step, "result_rows_field_name");
resultRowsField = new String[nrFields];
resultRowsType = new int[nrFields];
resultRowsLength = new int[nrFields];
resultRowsPrecision = new int[nrFields];
for (int i = 0; i < nrFields; i++) {
resultRowsField[i] = rep.getStepAttributeString(id_step, i, "result_rows_field_name");
resultRowsType[i] = ValueMetaFactory.getIdForValueMeta(rep.getStepAttributeString(id_step, i, "result_rows_field_type"));
resultRowsLength[i] = (int) rep.getStepAttributeInteger(id_step, i, "result_rows_field_length");
resultRowsPrecision[i] = (int) rep.getStepAttributeInteger(id_step, i, "result_rows_field_precision");
}
resultFilesTargetStep = rep.getStepAttributeString(id_step, "result_files_target_step");
resultFilesFileNameField = rep.getStepAttributeString(id_step, "result_files_file_name_field");
}
use of org.pentaho.di.repository.StringObjectId in project pentaho-kettle by pentaho.
the class JobEntryJob method loadRep.
/**
* Load the jobentry from repository
*/
@Override
public void loadRep(Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException {
try {
String method = rep.getJobEntryAttributeString(id_jobentry, "specification_method");
specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode(method);
String jobId = rep.getJobEntryAttributeString(id_jobentry, "job_object_id");
jobObjectId = Utils.isEmpty(jobId) ? null : new StringObjectId(jobId);
jobname = rep.getJobEntryAttributeString(id_jobentry, "name");
directory = rep.getJobEntryAttributeString(id_jobentry, "dir_path");
filename = rep.getJobEntryAttributeString(id_jobentry, "file_name");
// Backward compatibility check for object specification
//
checkObjectLocationSpecificationMethod();
argFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "arg_from_previous");
paramsFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "params_from_previous");
execPerRow = rep.getJobEntryAttributeBoolean(id_jobentry, "exec_per_row");
setLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_logfile");
addDate = rep.getJobEntryAttributeBoolean(id_jobentry, "add_date");
addTime = rep.getJobEntryAttributeBoolean(id_jobentry, "add_time");
logfile = rep.getJobEntryAttributeString(id_jobentry, "logfile");
logext = rep.getJobEntryAttributeString(id_jobentry, "logext");
logFileLevel = LogLevel.getLogLevelForCode(rep.getJobEntryAttributeString(id_jobentry, "loglevel"));
setAppendLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_append_logfile");
remoteSlaveServerName = rep.getJobEntryAttributeString(id_jobentry, "slave_server_name");
passingExport = rep.getJobEntryAttributeBoolean(id_jobentry, "pass_export");
waitingToFinish = rep.getJobEntryAttributeBoolean(id_jobentry, "wait_until_finished", true);
followingAbortRemotely = rep.getJobEntryAttributeBoolean(id_jobentry, "follow_abort_remote");
expandingRemoteJob = rep.getJobEntryAttributeBoolean(id_jobentry, "expand_remote_job");
createParentFolder = rep.getJobEntryAttributeBoolean(id_jobentry, "create_parent_folder");
runConfiguration = rep.getJobEntryAttributeString(id_jobentry, "run_configuration");
// How many arguments?
int argnr = rep.countNrJobEntryAttributes(id_jobentry, "argument");
allocateArgs(argnr);
// Read all arguments ...
for (int a = 0; a < argnr; a++) {
arguments[a] = rep.getJobEntryAttributeString(id_jobentry, a, "argument");
}
// How many arguments?
int parameternr = rep.countNrJobEntryAttributes(id_jobentry, "parameter_name");
allocateParams(parameternr);
// Read all parameters ...
for (int a = 0; a < parameternr; a++) {
parameters[a] = rep.getJobEntryAttributeString(id_jobentry, a, "parameter_name");
parameterFieldNames[a] = rep.getJobEntryAttributeString(id_jobentry, a, "parameter_stream_name");
parameterValues[a] = rep.getJobEntryAttributeString(id_jobentry, a, "parameter_value");
}
passingAllParameters = rep.getJobEntryAttributeBoolean(id_jobentry, "pass_all_parameters", true);
} catch (KettleDatabaseException dbe) {
throw new KettleException("Unable to load job entry of type 'job' from the repository with id_jobentry=" + id_jobentry, dbe);
}
}
use of org.pentaho.di.repository.StringObjectId in project pentaho-kettle by pentaho.
the class JobEntryJob method loadXML.
@Override
public void loadXML(Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore) throws KettleXMLException {
try {
super.loadXML(entrynode, databases, slaveServers);
String method = XMLHandler.getTagValue(entrynode, "specification_method");
specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode(method);
String jobId = XMLHandler.getTagValue(entrynode, "job_object_id");
jobObjectId = Utils.isEmpty(jobId) ? null : new StringObjectId(jobId);
filename = XMLHandler.getTagValue(entrynode, "filename");
jobname = XMLHandler.getTagValue(entrynode, "jobname");
if (rep != null && rep.isConnected() && !Utils.isEmpty(jobname)) {
specificationMethod = ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME;
}
// Backward compatibility check for object specification
//
checkObjectLocationSpecificationMethod();
argFromPrevious = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "arg_from_previous"));
paramsFromPrevious = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "params_from_previous"));
execPerRow = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "exec_per_row"));
setLogfile = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "set_logfile"));
addDate = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "add_date"));
addTime = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "add_time"));
logfile = XMLHandler.getTagValue(entrynode, "logfile");
logext = XMLHandler.getTagValue(entrynode, "logext");
logFileLevel = LogLevel.getLogLevelForCode(XMLHandler.getTagValue(entrynode, "loglevel"));
setAppendLogfile = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "set_append_logfile"));
remoteSlaveServerName = XMLHandler.getTagValue(entrynode, "slave_server_name");
passingExport = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "pass_export"));
directory = XMLHandler.getTagValue(entrynode, "directory");
createParentFolder = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "create_parent_folder"));
runConfiguration = XMLHandler.getTagValue(entrynode, "run_configuration");
String wait = XMLHandler.getTagValue(entrynode, "wait_until_finished");
if (Utils.isEmpty(wait)) {
waitingToFinish = true;
} else {
waitingToFinish = "Y".equalsIgnoreCase(wait);
}
followingAbortRemotely = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "follow_abort_remote"));
expandingRemoteJob = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "expand_remote_job"));
// How many arguments?
int argnr = 0;
while (XMLHandler.getTagValue(entrynode, "argument" + argnr) != null) {
argnr++;
}
allocateArgs(argnr);
// Boden.
for (int a = 0; a < argnr; a++) {
arguments[a] = XMLHandler.getTagValue(entrynode, "argument" + a);
}
Node parametersNode = XMLHandler.getSubNode(entrynode, "parameters");
String passAll = XMLHandler.getTagValue(parametersNode, "pass_all_parameters");
passingAllParameters = Utils.isEmpty(passAll) || "Y".equalsIgnoreCase(passAll);
int nrParameters = XMLHandler.countNodes(parametersNode, "parameter");
allocateParams(nrParameters);
for (int i = 0; i < nrParameters; i++) {
Node knode = XMLHandler.getSubNodeByNr(parametersNode, "parameter", i);
parameters[i] = XMLHandler.getTagValue(knode, "name");
parameterFieldNames[i] = XMLHandler.getTagValue(knode, "stream_name");
parameterValues[i] = XMLHandler.getTagValue(knode, "value");
}
} catch (KettleXMLException xe) {
throw new KettleXMLException("Unable to load 'job' job entry from XML node", xe);
}
}
use of org.pentaho.di.repository.StringObjectId in project pentaho-kettle by pentaho.
the class JobEntryTrans method loadXML.
@Override
public void loadXML(Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep, IMetaStore metaStore) throws KettleXMLException {
try {
super.loadXML(entrynode, databases, slaveServers);
String method = XMLHandler.getTagValue(entrynode, "specification_method");
specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode(method);
String transId = XMLHandler.getTagValue(entrynode, "trans_object_id");
transObjectId = Utils.isEmpty(transId) ? null : new StringObjectId(transId);
filename = XMLHandler.getTagValue(entrynode, "filename");
transname = XMLHandler.getTagValue(entrynode, "transname");
directory = XMLHandler.getTagValue(entrynode, "directory");
if (rep != null && rep.isConnected() && !Utils.isEmpty(transname)) {
specificationMethod = ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME;
}
// Backward compatibility check for object specification
//
checkObjectLocationSpecificationMethod();
argFromPrevious = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "arg_from_previous"));
paramsFromPrevious = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "params_from_previous"));
execPerRow = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "exec_per_row"));
clearResultRows = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "clear_rows"));
clearResultFiles = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "clear_files"));
setLogfile = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "set_logfile"));
addDate = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "add_date"));
addTime = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "add_time"));
logfile = XMLHandler.getTagValue(entrynode, "logfile");
logext = XMLHandler.getTagValue(entrynode, "logext");
logFileLevel = LogLevel.getLogLevelForCode(XMLHandler.getTagValue(entrynode, "loglevel"));
clustering = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "cluster"));
createParentFolder = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "create_parent_folder"));
loggingRemoteWork = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "logging_remote_work"));
runConfiguration = XMLHandler.getTagValue(entrynode, "run_configuration");
remoteSlaveServerName = XMLHandler.getTagValue(entrynode, "slave_server_name");
setAppendLogfile = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "set_append_logfile"));
String wait = XMLHandler.getTagValue(entrynode, "wait_until_finished");
if (Utils.isEmpty(wait)) {
waitingToFinish = true;
} else {
waitingToFinish = "Y".equalsIgnoreCase(wait);
}
followingAbortRemotely = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "follow_abort_remote"));
// How many arguments?
int argnr = 0;
while (XMLHandler.getTagValue(entrynode, "argument" + argnr) != null) {
argnr++;
}
allocateArgs(argnr);
// Read them all...
for (int a = 0; a < argnr; a++) {
arguments[a] = XMLHandler.getTagValue(entrynode, "argument" + a);
}
Node parametersNode = XMLHandler.getSubNode(entrynode, "parameters");
String passAll = XMLHandler.getTagValue(parametersNode, "pass_all_parameters");
passingAllParameters = Utils.isEmpty(passAll) || "Y".equalsIgnoreCase(passAll);
int nrParameters = XMLHandler.countNodes(parametersNode, "parameter");
allocateParams(nrParameters);
for (int i = 0; i < nrParameters; i++) {
Node knode = XMLHandler.getSubNodeByNr(parametersNode, "parameter", i);
parameters[i] = XMLHandler.getTagValue(knode, "name");
parameterFieldNames[i] = XMLHandler.getTagValue(knode, "stream_name");
parameterValues[i] = XMLHandler.getTagValue(knode, "value");
}
} catch (KettleException e) {
throw new KettleXMLException("Unable to load job entry of type 'trans' from XML node", e);
}
}
use of org.pentaho.di.repository.StringObjectId in project pentaho-kettle by pentaho.
the class KettleFileRepository method getSlaveID.
@Override
public ObjectId getSlaveID(String name) throws KettleException {
// Only return the ID if the slave server exists
Object slaveID = name + EXT_SLAVE_SERVER;
Object[] ids = getRootObjectIDs(EXT_SLAVE_SERVER);
for (Object rootID : ids) {
if (rootID.toString().equals(slaveID)) {
return new StringObjectId(slaveID.toString());
}
}
return null;
}
Aggregations