use of org.pentaho.di.repository.RepositoryAttributeInterface in project pentaho-kettle by pentaho.
the class KettleDatabaseRepositoryTransDelegate method loadTransformation.
/**
* Read a transformation with a certain name from a repository
*
* @param transname
* The name of the transformation.
* @param repdir
* the path to the repository directory
* @param monitor
* The progress monitor to display the progress of the file-open operation in a dialog
* @param setInternalVariables
* true if you want to set the internal variables based on this transformation information
*/
public TransMeta loadTransformation(TransMeta transMeta, String transname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor, boolean setInternalVariables) throws KettleException {
transMeta.setRepository(repository);
transMeta.setMetaStore(repository.metaStore);
synchronized (repository) {
try {
String pathAndName = repdir.isRoot() ? repdir + transname : repdir + RepositoryDirectory.DIRECTORY_SEPARATOR + transname;
transMeta.setName(transname);
transMeta.setRepositoryDirectory(repdir);
// Get the transformation id
if (log.isDetailed()) {
log.logDetailed(BaseMessages.getString(PKG, "TransMeta.Log.LookingForTransformation", transname, repdir.getPath()));
}
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.ReadingTransformationInfoTask.Title"));
}
transMeta.setObjectId(getTransformationID(transname, repdir.getObjectId()));
if (monitor != null) {
monitor.worked(1);
}
// If no valid id is available in the database, then give error...
if (transMeta.getObjectId() != null) {
ObjectId[] noteids = repository.getTransNoteIDs(transMeta.getObjectId());
ObjectId[] stepids = repository.getStepIDs(transMeta.getObjectId());
ObjectId[] hopids = getTransHopIDs(transMeta.getObjectId());
int nrWork = 3 + noteids.length + stepids.length + hopids.length;
if (monitor != null) {
monitor.beginTask(BaseMessages.getString(PKG, "TransMeta.Monitor.LoadingTransformationTask.Title") + pathAndName, nrWork);
}
if (log.isDetailed()) {
log.logDetailed(BaseMessages.getString(PKG, "TransMeta.Log.LoadingTransformation", transMeta.getName()));
}
// Load the common database connections
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.ReadingTheAvailableSharedObjectsTask.Title"));
}
try {
transMeta.setSharedObjects(readTransSharedObjects(transMeta));
} catch (Exception e) {
log.logError(BaseMessages.getString(PKG, "TransMeta.ErrorReadingSharedObjects.Message", e.toString()));
log.logError(Const.getStackTracker(e));
}
if (monitor != null) {
monitor.worked(1);
}
// Load the notes...
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.ReadingNoteTask.Title"));
}
for (int i = 0; i < noteids.length; i++) {
NotePadMeta ni = repository.notePadDelegate.loadNotePadMeta(noteids[i]);
if (transMeta.indexOfNote(ni) < 0) {
transMeta.addNote(ni);
}
if (monitor != null) {
monitor.worked(1);
}
}
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.ReadingStepsTask.Title"));
}
// read all the attributes
repository.connectionDelegate.fillStepAttributesBuffer(transMeta.getObjectId());
// on one go!
for (int i = 0; i < stepids.length; i++) {
if (log.isDetailed()) {
log.logDetailed(BaseMessages.getString(PKG, "TransMeta.Log.LoadingStepWithID") + stepids[i]);
}
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.ReadingStepTask.Title") + (i + 1) + "/" + (stepids.length));
}
StepMeta stepMeta = repository.stepDelegate.loadStepMeta(stepids[i], transMeta.getDatabases(), transMeta.getPartitionSchemas());
if (stepMeta.isMissing()) {
transMeta.addMissingTrans((MissingTrans) stepMeta.getStepMetaInterface());
}
// In this case, we just add or replace the shared steps.
// The repository is considered "more central"
transMeta.addOrReplaceStep(stepMeta);
if (monitor != null) {
monitor.worked(1);
}
}
if (monitor != null) {
monitor.worked(1);
}
// clear the buffer (should be empty anyway)
repository.connectionDelegate.setStepAttributesBuffer(null);
// Have all StreamValueLookups, etc. reference the correct source steps...
for (int i = 0; i < transMeta.nrSteps(); i++) {
StepMetaInterface sii = transMeta.getStep(i).getStepMetaInterface();
sii.searchInfoAndTargetSteps(transMeta.getSteps());
}
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.LoadingTransformationDetailsTask.Title"));
}
loadRepTrans(transMeta);
if (monitor != null) {
monitor.worked(1);
}
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.ReadingHopTask.Title"));
}
for (int i = 0; i < hopids.length; i++) {
TransHopMeta hi = loadTransHopMeta(hopids[i], transMeta.getSteps());
if (hi != null) {
transMeta.addTransHop(hi);
}
if (monitor != null) {
monitor.worked(1);
}
}
//
for (int i = 0; i < transMeta.nrSteps(); i++) {
StepPartitioningMeta stepPartitioningMeta = transMeta.getStep(i).getStepPartitioningMeta();
if (stepPartitioningMeta != null) {
stepPartitioningMeta.setPartitionSchemaAfterLoading(transMeta.getPartitionSchemas());
}
}
//
for (int i = 0; i < transMeta.nrSteps(); i++) {
transMeta.getStep(i).setClusterSchemaAfterLoading(transMeta.getClusterSchemas());
}
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.ReadingTheDependenciesTask.Title"));
}
ObjectId[] depids = repository.getTransDependencyIDs(transMeta.getObjectId());
for (int i = 0; i < depids.length; i++) {
TransDependency td = loadTransDependency(depids[i], transMeta.getDatabases());
transMeta.addDependency(td);
}
if (monitor != null) {
monitor.worked(1);
}
// Load the group attributes map
//
transMeta.setAttributesMap(loadTransAttributesMap(transMeta.getObjectId()));
//
for (int i = 0; i < transMeta.nrSteps(); i++) {
StepMeta stepMeta = transMeta.getStep(i);
String sourceStep = repository.getStepAttributeString(stepMeta.getObjectId(), "step_error_handling_source_step");
if (sourceStep != null) {
StepErrorMeta stepErrorMeta = repository.stepDelegate.loadStepErrorMeta(transMeta, stepMeta, transMeta.getSteps());
// a bit of a trick, I know.
stepErrorMeta.getSourceStep().setStepErrorMeta(stepErrorMeta);
}
}
// Load all the log tables for the transformation...
//
RepositoryAttributeInterface attributeInterface = new KettleDatabaseRepositoryTransAttribute(repository.connectionDelegate, transMeta.getObjectId());
for (LogTableInterface logTable : transMeta.getLogTables()) {
logTable.loadFromRepository(attributeInterface);
}
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "TransMeta.Monitor.SortingStepsTask.Title"));
}
transMeta.sortSteps();
if (monitor != null) {
monitor.worked(1);
}
if (monitor != null) {
monitor.done();
}
} else {
throw new KettleException(BaseMessages.getString(PKG, "TransMeta.Exception.TransformationDoesNotExist") + transMeta.getName());
}
if (log.isDetailed()) {
log.logDetailed(BaseMessages.getString(PKG, "TransMeta.Log.LoadedTransformation2", transname, String.valueOf(transMeta.getRepositoryDirectory() == null)));
log.logDetailed(BaseMessages.getString(PKG, "TransMeta.Log.LoadedTransformation", transname, transMeta.getRepositoryDirectory().getPath()));
}
// close prepared statements, minimize locking etc.
//
repository.connectionDelegate.closeAttributeLookupPreparedStatements();
return transMeta;
} catch (KettleDatabaseException e) {
log.logError(BaseMessages.getString(PKG, "TransMeta.Log.DatabaseErrorOccuredReadingTransformation") + Const.CR + e);
throw new KettleException(BaseMessages.getString(PKG, "TransMeta.Exception.DatabaseErrorOccuredReadingTransformation"), e);
} catch (Exception e) {
log.logError(BaseMessages.getString(PKG, "TransMeta.Log.DatabaseErrorOccuredReadingTransformation") + Const.CR + e);
throw new KettleException(BaseMessages.getString(PKG, "TransMeta.Exception.DatabaseErrorOccuredReadingTransformation2"), e);
} finally {
transMeta.initializeVariablesFrom(null);
if (setInternalVariables) {
transMeta.setInternalKettleVariables();
}
}
}
}
use of org.pentaho.di.repository.RepositoryAttributeInterface in project pentaho-kettle by pentaho.
the class JobDelegate method loadJobMetaDetails.
protected void loadJobMetaDetails(DataNode rootNode, JobMeta jobMeta) throws KettleException {
try {
jobMeta.setExtendedDescription(getString(rootNode, PROP_EXTENDED_DESCRIPTION));
jobMeta.setJobversion(getString(rootNode, PROP_JOB_VERSION));
jobMeta.setJobstatus((int) rootNode.getProperty(PROP_JOB_STATUS).getLong());
jobMeta.getJobLogTable().setTableName(getString(rootNode, PROP_TABLE_NAME_LOG));
jobMeta.setCreatedUser(getString(rootNode, PROP_CREATED_USER));
jobMeta.setCreatedDate(getDate(rootNode, PROP_CREATED_DATE));
jobMeta.setModifiedUser(getString(rootNode, PROP_MODIFIED_USER));
jobMeta.setModifiedDate(getDate(rootNode, PROP_MODIFIED_DATE));
if (rootNode.hasProperty(PROP_DATABASE_LOG)) {
String id = rootNode.getProperty(PROP_DATABASE_LOG).getRef().getId().toString();
DatabaseMeta conn = (DatabaseMeta.findDatabase(jobMeta.getDatabases(), new StringObjectId(id)));
jobMeta.getJobLogTable().setConnectionName(conn.getName());
}
jobMeta.getJobLogTable().setBatchIdUsed(rootNode.getProperty(PROP_USE_BATCH_ID).getBoolean());
jobMeta.setBatchIdPassed(rootNode.getProperty(PROP_PASS_BATCH_ID).getBoolean());
jobMeta.getJobLogTable().setLogFieldUsed(rootNode.getProperty(PROP_USE_LOGFIELD).getBoolean());
jobMeta.getJobLogTable().setLogSizeLimit(getString(rootNode, PROP_LOG_SIZE_LIMIT));
// Load the logging tables too..
//
RepositoryAttributeInterface attributeInterface = new PurRepositoryAttribute(rootNode, jobMeta.getDatabases());
for (LogTableInterface logTable : jobMeta.getLogTables()) {
logTable.loadFromRepository(attributeInterface);
}
// Load the attributes map
//
AttributesMapUtil.loadAttributesMap(rootNode, jobMeta);
} catch (Exception e) {
throw new KettleException("Error loading job details", e);
}
}
use of org.pentaho.di.repository.RepositoryAttributeInterface in project pentaho-kettle by pentaho.
the class TransDelegate method loadTransformationDetails.
protected void loadTransformationDetails(final DataNode rootNode, final TransMeta transMeta) throws KettleException {
transMeta.setExtendedDescription(getString(rootNode, PROP_EXTENDED_DESCRIPTION));
transMeta.setTransversion(getString(rootNode, PROP_TRANS_VERSION));
transMeta.setTransstatus((int) rootNode.getProperty(PROP_TRANS_STATUS).getLong());
if (rootNode.hasProperty(PROP_STEP_READ)) {
transMeta.getTransLogTable().setStepRead(StepMeta.findStep(transMeta.getSteps(), getString(rootNode, PROP_STEP_READ)));
}
if (rootNode.hasProperty(PROP_STEP_WRITE)) {
transMeta.getTransLogTable().setStepWritten(StepMeta.findStep(transMeta.getSteps(), getString(rootNode, PROP_STEP_WRITE)));
}
if (rootNode.hasProperty(PROP_STEP_INPUT)) {
transMeta.getTransLogTable().setStepInput(StepMeta.findStep(transMeta.getSteps(), getString(rootNode, PROP_STEP_INPUT)));
}
if (rootNode.hasProperty(PROP_STEP_OUTPUT)) {
transMeta.getTransLogTable().setStepOutput(StepMeta.findStep(transMeta.getSteps(), getString(rootNode, PROP_STEP_OUTPUT)));
}
if (rootNode.hasProperty(PROP_STEP_UPDATE)) {
transMeta.getTransLogTable().setStepUpdate(StepMeta.findStep(transMeta.getSteps(), getString(rootNode, PROP_STEP_UPDATE)));
}
if (rootNode.hasProperty(PROP_STEP_REJECTED)) {
transMeta.getTransLogTable().setStepRejected(StepMeta.findStep(transMeta.getSteps(), getString(rootNode, PROP_STEP_REJECTED)));
}
if (rootNode.hasProperty(PROP_DATABASE_LOG)) {
String id = rootNode.getProperty(PROP_DATABASE_LOG).getRef().getId().toString();
DatabaseMeta conn = DatabaseMeta.findDatabase(transMeta.getDatabases(), new StringObjectId(id));
transMeta.getTransLogTable().setConnectionName(conn.getName());
}
transMeta.getTransLogTable().setTableName(getString(rootNode, PROP_TABLE_NAME_LOG));
transMeta.getTransLogTable().setBatchIdUsed(rootNode.getProperty(PROP_USE_BATCHID).getBoolean());
transMeta.getTransLogTable().setLogFieldUsed(rootNode.getProperty(PROP_USE_LOGFIELD).getBoolean());
if (rootNode.hasProperty(PROP_ID_DATABASE_MAXDATE)) {
String id = rootNode.getProperty(PROP_ID_DATABASE_MAXDATE).getRef().getId().toString();
transMeta.setMaxDateConnection(DatabaseMeta.findDatabase(transMeta.getDatabases(), new StringObjectId(id)));
}
transMeta.setMaxDateTable(getString(rootNode, PROP_TABLE_NAME_MAXDATE));
transMeta.setMaxDateField(getString(rootNode, PROP_FIELD_NAME_MAXDATE));
transMeta.setMaxDateOffset(rootNode.getProperty(PROP_OFFSET_MAXDATE).getDouble());
transMeta.setMaxDateDifference(rootNode.getProperty(PROP_DIFF_MAXDATE).getDouble());
transMeta.setCreatedUser(getString(rootNode, PROP_CREATED_USER));
transMeta.setCreatedDate(getDate(rootNode, PROP_CREATED_DATE));
transMeta.setModifiedUser(getString(rootNode, PROP_MODIFIED_USER));
transMeta.setModifiedDate(getDate(rootNode, PROP_MODIFIED_DATE));
// Optional:
transMeta.setSizeRowset(Const.ROWS_IN_ROWSET);
long val_size_rowset = rootNode.getProperty(PROP_SIZE_ROWSET).getLong();
if (val_size_rowset > 0) {
transMeta.setSizeRowset((int) val_size_rowset);
}
if (rootNode.hasProperty(PROP_ID_DIRECTORY)) {
String id_directory = getString(rootNode, PROP_ID_DIRECTORY);
if (log.isDetailed()) {
// $NON-NLS-1$
log.logDetailed(toString(), PROP_ID_DIRECTORY + "=" + id_directory);
}
// Set right directory...
// always reload the
transMeta.setRepositoryDirectory(repo.findDirectory(new StringObjectId(id_directory)));
// folder structure
}
transMeta.setUsingUniqueConnections(rootNode.getProperty(PROP_UNIQUE_CONNECTIONS).getBoolean());
boolean feedbackShown = true;
if (rootNode.hasProperty(PROP_FEEDBACK_SHOWN)) {
feedbackShown = rootNode.getProperty(PROP_FEEDBACK_SHOWN).getBoolean();
}
transMeta.setFeedbackShown(feedbackShown);
transMeta.setFeedbackSize((int) rootNode.getProperty(PROP_FEEDBACK_SIZE).getLong());
boolean usingThreadPriorityManagement = true;
if (rootNode.hasProperty(PROP_USING_THREAD_PRIORITIES)) {
usingThreadPriorityManagement = rootNode.getProperty(PROP_USING_THREAD_PRIORITIES).getBoolean();
}
transMeta.setUsingThreadPriorityManagment(usingThreadPriorityManagement);
transMeta.setSharedObjectsFile(getString(rootNode, PROP_SHARED_FILE));
String transTypeCode = getString(rootNode, PROP_TRANSFORMATION_TYPE);
transMeta.setTransformationType(TransformationType.getTransformationTypeByCode(transTypeCode));
// Performance monitoring for steps...
//
boolean capturingStepPerformanceSnapShots = true;
if (rootNode.hasProperty(PROP_CAPTURE_STEP_PERFORMANCE)) {
capturingStepPerformanceSnapShots = rootNode.getProperty(PROP_CAPTURE_STEP_PERFORMANCE).getBoolean();
}
transMeta.setCapturingStepPerformanceSnapShots(capturingStepPerformanceSnapShots);
transMeta.setStepPerformanceCapturingDelay(getLong(rootNode, PROP_STEP_PERFORMANCE_CAPTURING_DELAY));
transMeta.setStepPerformanceCapturingSizeLimit(getString(rootNode, PROP_STEP_PERFORMANCE_CAPTURING_SIZE_LIMIT));
transMeta.getPerformanceLogTable().setTableName(getString(rootNode, PROP_STEP_PERFORMANCE_LOG_TABLE));
transMeta.getTransLogTable().setLogSizeLimit(getString(rootNode, PROP_LOG_SIZE_LIMIT));
// Load the logging tables too..
//
RepositoryAttributeInterface attributeInterface = new PurRepositoryAttribute(rootNode, transMeta.getDatabases());
for (LogTableInterface logTable : transMeta.getLogTables()) {
logTable.loadFromRepository(attributeInterface);
}
AttributesMapUtil.loadAttributesMap(rootNode, transMeta);
}
Aggregations