use of org.pentaho.di.repository.LongObjectId in project pentaho-kettle by pentaho.
the class KettleDatabaseRepositoryConnectionDelegate method getTransAttributesWithPrefix.
public synchronized List<Object[]> getTransAttributesWithPrefix(ObjectId id_transformation, String codePrefix) throws KettleException {
String sql = "SELECT *" + " FROM " + databaseMeta.getQuotedSchemaTableCombination(null, KettleDatabaseRepository.TABLE_R_TRANS_ATTRIBUTE) + " WHERE " + quote(KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION) + " = ?" + " AND " + quote(KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_CODE) + " LIKE '" + codePrefix + "%'";
RowMetaAndData table = new RowMetaAndData();
table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_TRANS_ATTRIBUTE_ID_TRANSFORMATION), new LongObjectId(id_transformation));
return callRead(() -> database.getRows(sql, table.getRowMeta(), table.getData(), ResultSet.FETCH_FORWARD, false, 0, null));
}
use of org.pentaho.di.repository.LongObjectId in project pentaho-kettle by pentaho.
the class KettleDatabaseRepositoryDatabaseDelegate method insertDatabase.
public synchronized ObjectId insertDatabase(String name, String type, String access, String host, String dbname, String port, String user, String pass, String servername, String data_tablespace, String index_tablespace) throws KettleException {
ObjectId id = repository.connectionDelegate.getNextDatabaseID();
ObjectId id_database_type = getDatabaseTypeID(type);
if (id_database_type == null) {
// New support database type: add it!
id_database_type = repository.connectionDelegate.getNextDatabaseTypeID();
String tablename = KettleDatabaseRepository.TABLE_R_DATABASE_TYPE;
RowMetaInterface tableMeta = new RowMeta();
tableMeta.addValueMeta(new ValueMetaInteger(KettleDatabaseRepository.FIELD_DATABASE_TYPE_ID_DATABASE_TYPE, 5, 0));
tableMeta.addValueMeta(new ValueMetaString(KettleDatabaseRepository.FIELD_DATABASE_TYPE_CODE, KettleDatabaseRepository.REP_STRING_CODE_LENGTH, 0));
tableMeta.addValueMeta(new ValueMetaString(KettleDatabaseRepository.FIELD_DATABASE_TYPE_DESCRIPTION, KettleDatabaseRepository.REP_STRING_LENGTH, 0));
repository.connectionDelegate.getDatabase().prepareInsert(tableMeta, tablename);
Object[] tableData = new Object[3];
int tableIndex = 0;
tableData[tableIndex++] = new LongObjectId(id_database_type).longValue();
tableData[tableIndex++] = type;
tableData[tableIndex++] = type;
repository.connectionDelegate.getDatabase().setValuesInsert(tableMeta, tableData);
repository.connectionDelegate.getDatabase().insertRow();
repository.connectionDelegate.getDatabase().closeInsert();
}
ObjectId id_database_contype = getDatabaseConTypeID(access);
RowMetaAndData table = new RowMetaAndData();
table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE), id);
table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_DATABASE_NAME), name);
table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_TYPE), id_database_type);
table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_DATABASE_ID_DATABASE_CONTYPE), id_database_contype);
table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_DATABASE_HOST_NAME), host);
table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_DATABASE_DATABASE_NAME), dbname);
table.addValue(new ValueMetaInteger(KettleDatabaseRepository.FIELD_DATABASE_PORT), Long.valueOf(Const.toLong(port, -1)));
table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_DATABASE_USERNAME), user);
table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_DATABASE_PASSWORD), Encr.encryptPasswordIfNotUsingVariables(pass));
table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_DATABASE_SERVERNAME), servername);
table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_DATABASE_DATA_TBS), data_tablespace);
table.addValue(new ValueMetaString(KettleDatabaseRepository.FIELD_DATABASE_INDEX_TBS), index_tablespace);
repository.connectionDelegate.getDatabase().prepareInsert(table.getRowMeta(), KettleDatabaseRepository.TABLE_R_DATABASE);
repository.connectionDelegate.getDatabase().setValuesInsert(table);
repository.connectionDelegate.getDatabase().insertRow();
repository.connectionDelegate.getDatabase().closeInsert();
return id;
}
use of org.pentaho.di.repository.LongObjectId in project pentaho-kettle by pentaho.
the class KettleDatabaseRepositoryDirectoryDelegate method loadPathToRoot.
public RepositoryDirectoryInterface loadPathToRoot(ObjectId id_directory) throws KettleException {
List<RepositoryDirectory> path = new ArrayList<>();
ObjectId directoryId = id_directory;
RowMetaAndData directoryRow = getDirectory(directoryId);
Long parentId = directoryRow.getInteger(1);
//
while (parentId != null && parentId >= 0) {
RepositoryDirectory directory = new RepositoryDirectory();
// Name of the directory
directory.setName(directoryRow.getString(2, null));
directory.setObjectId(directoryId);
path.add(directory);
// System.out.println( "+ dir '" + directory.getName() + "'" );
directoryId = new LongObjectId(parentId);
directoryRow = getDirectory(directoryId);
parentId = directoryRow.getInteger(KettleDatabaseRepository.FIELD_DIRECTORY_ID_DIRECTORY_PARENT);
}
RepositoryDirectory root = new RepositoryDirectory();
root.setObjectId(new LongObjectId(0));
path.add(root);
//
for (int i = 0; i < path.size() - 1; i++) {
RepositoryDirectory item = path.get(i);
RepositoryDirectory parent = path.get(i + 1);
item.setParent(parent);
parent.addSubdirectory(item);
}
RepositoryDirectory repositoryDirectory = path.get(0);
return repositoryDirectory;
}
use of org.pentaho.di.repository.LongObjectId in project pentaho-kettle by pentaho.
the class KettleDatabaseRepositoryDirectoryDelegate method loadRepositoryDirectory.
public void loadRepositoryDirectory(RepositoryDirectory repositoryDirectory, ObjectId id_directory) throws KettleException {
if (id_directory == null) {
// This is the root directory, id = OL
id_directory = new LongObjectId(0L);
}
try {
RowMetaAndData row = getDirectory(id_directory);
if (row != null) {
repositoryDirectory.setObjectId(id_directory);
// Content?
//
repositoryDirectory.setName(row.getString("DIRECTORY_NAME", null));
// The sub-directories?
//
ObjectId[] subids = repository.getSubDirectoryIDs(repositoryDirectory.getObjectId());
for (int i = 0; i < subids.length; i++) {
RepositoryDirectory subdir = new RepositoryDirectory();
loadRepositoryDirectory(subdir, subids[i]);
repositoryDirectory.addSubdirectory(subdir);
}
}
} catch (Exception e) {
throw new KettleException(BaseMessages.getString(PKG, "Repository.LoadRepositoryDirectory.ErrorLoading.Exception"), e);
}
}
use of org.pentaho.di.repository.LongObjectId in project pentaho-kettle by pentaho.
the class KettleDatabaseRepositoryJobDelegate method loadJobMeta.
/**
* Load a job in a directory
*
* @param log
* the logging channel
* @param rep
* The Repository
* @param jobname
* The name of the job
* @param repdir
* The directory in which the job resides.
* @throws KettleException
*/
public JobMeta loadJobMeta(String jobname, RepositoryDirectoryInterface repdir, ProgressMonitorListener monitor) throws KettleException {
JobMeta jobMeta = new JobMeta();
synchronized (repository) {
try {
// Clear everything...
jobMeta.clear();
jobMeta.setRepositoryDirectory(repdir);
// Get the transformation id
jobMeta.setObjectId(getJobID(jobname, repdir.getObjectId()));
// If no valid id is available in the database, then give error...
if (jobMeta.getObjectId() != null) {
// Load the notes...
ObjectId[] noteids = repository.getJobNoteIDs(jobMeta.getObjectId());
ObjectId[] jecids = repository.getJobEntryCopyIDs(jobMeta.getObjectId());
ObjectId[] hopid = repository.getJobHopIDs(jobMeta.getObjectId());
int nrWork = 2 + noteids.length + jecids.length + hopid.length;
if (monitor != null) {
monitor.beginTask(BaseMessages.getString(PKG, "JobMeta.Monitor.LoadingJob") + repdir + Const.FILE_SEPARATOR + jobname, nrWork);
}
//
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobInformation"));
}
RowMetaAndData jobRow = getJob(jobMeta.getObjectId());
jobMeta.setName(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_NAME, null));
jobMeta.setDescription(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_DESCRIPTION, null));
jobMeta.setExtendedDescription(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_EXTENDED_DESCRIPTION, null));
jobMeta.setJobversion(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_JOB_VERSION, null));
jobMeta.setJobstatus(Const.toInt(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_JOB_STATUS, null), -1));
jobMeta.setCreatedUser(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_CREATED_USER, null));
jobMeta.setCreatedDate(jobRow.getDate(KettleDatabaseRepository.FIELD_JOB_CREATED_DATE, new Date()));
jobMeta.setModifiedUser(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_MODIFIED_USER, null));
jobMeta.setModifiedDate(jobRow.getDate(KettleDatabaseRepository.FIELD_JOB_MODIFIED_DATE, new Date()));
long id_logdb = jobRow.getInteger(KettleDatabaseRepository.FIELD_JOB_ID_DATABASE_LOG, 0);
if (id_logdb > 0) {
// Get the logconnection
//
DatabaseMeta logDb = repository.loadDatabaseMeta(new LongObjectId(id_logdb), null);
jobMeta.getJobLogTable().setConnectionName(logDb.getName());
// jobMeta.getJobLogTable().getDatabaseMeta().shareVariablesWith(jobMeta);
}
jobMeta.getJobLogTable().setTableName(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_TABLE_NAME_LOG, null));
jobMeta.getJobLogTable().setBatchIdUsed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_USE_BATCH_ID, false));
jobMeta.getJobLogTable().setLogFieldUsed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_USE_LOGFIELD, false));
jobMeta.getJobLogTable().setLogSizeLimit(getJobAttributeString(jobMeta.getObjectId(), 0, KettleDatabaseRepository.JOB_ATTRIBUTE_LOG_SIZE_LIMIT));
jobMeta.setBatchIdPassed(jobRow.getBoolean(KettleDatabaseRepository.FIELD_JOB_PASS_BATCH_ID, false));
// Load all the log tables for the job...
//
RepositoryAttributeInterface attributeInterface = new KettleDatabaseRepositoryJobAttribute(repository.connectionDelegate, jobMeta.getObjectId());
for (LogTableInterface logTable : jobMeta.getLogTables()) {
logTable.loadFromRepository(attributeInterface);
}
if (monitor != null) {
monitor.worked(1);
}
//
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingAvailableDatabasesFromRepository"));
}
// Read objects from the shared XML file & the repository
try {
jobMeta.setSharedObjectsFile(jobRow.getString(KettleDatabaseRepository.FIELD_JOB_SHARED_FILE, null));
jobMeta.setSharedObjects(repository != null ? repository.readJobMetaSharedObjects(jobMeta) : jobMeta.readSharedObjects());
} catch (Exception e) {
log.logError(BaseMessages.getString(PKG, "JobMeta.ErrorReadingSharedObjects.Message", e.toString()));
//
log.logError(Const.getStackTracker(e));
}
if (monitor != null) {
monitor.worked(1);
}
if (log.isDetailed()) {
log.logDetailed("Loading " + noteids.length + " notes");
}
for (int i = 0; i < noteids.length; i++) {
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingNoteNr") + (i + 1) + "/" + noteids.length);
}
NotePadMeta ni = repository.notePadDelegate.loadNotePadMeta(noteids[i]);
if (jobMeta.indexOfNote(ni) < 0) {
jobMeta.addNote(ni);
}
if (monitor != null) {
monitor.worked(1);
}
}
// Load the group attributes map
//
jobMeta.setAttributesMap(loadJobAttributesMap(jobMeta.getObjectId()));
// Load the job entries...
//
// Keep a unique list of job entries to facilitate in the loading.
//
List<JobEntryInterface> jobentries = new ArrayList<JobEntryInterface>();
if (log.isDetailed()) {
log.logDetailed("Loading " + jecids.length + " job entries");
}
for (int i = 0; i < jecids.length; i++) {
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobEntryNr") + (i + 1) + "/" + (jecids.length));
}
JobEntryCopy jec = repository.jobEntryDelegate.loadJobEntryCopy(jobMeta.getObjectId(), jecids[i], jobentries, jobMeta.getDatabases(), jobMeta.getSlaveServers(), jobname);
if (jec.isMissing()) {
jobMeta.addMissingEntry((MissingEntry) jec.getEntry());
}
// Also set the copy number...
// We count the number of job entry copies that use the job
// entry
//
int copyNr = 0;
for (JobEntryCopy copy : jobMeta.getJobCopies()) {
if (jec.getEntry() == copy.getEntry()) {
copyNr++;
}
}
jec.setNr(copyNr);
int idx = jobMeta.indexOfJobEntry(jec);
if (idx < 0) {
if (jec.getName() != null && jec.getName().length() > 0) {
jobMeta.addJobEntry(jec);
}
} else {
// replace it!
jobMeta.setJobEntry(idx, jec);
}
if (monitor != null) {
monitor.worked(1);
}
}
// Load the hops...
if (log.isDetailed()) {
log.logDetailed("Loading " + hopid.length + " job hops");
}
for (int i = 0; i < hopid.length; i++) {
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.ReadingJobHopNr") + (i + 1) + "/" + (jecids.length));
}
JobHopMeta hi = loadJobHopMeta(hopid[i], jobMeta.getJobCopies());
jobMeta.getJobhops().add(hi);
if (monitor != null) {
monitor.worked(1);
}
}
loadRepParameters(jobMeta);
// Finally, clear the changed flags...
jobMeta.clearChanged();
if (monitor != null) {
monitor.subTask(BaseMessages.getString(PKG, "JobMeta.Monitor.FinishedLoadOfJob"));
}
if (monitor != null) {
monitor.done();
}
// close prepared statements, minimize locking etc.
//
repository.connectionDelegate.closeAttributeLookupPreparedStatements();
return jobMeta;
} else {
throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.CanNotFindJob") + jobname);
}
} catch (KettleException dbe) {
throw new KettleException(BaseMessages.getString(PKG, "JobMeta.Exception.AnErrorOccuredReadingJob", jobname), dbe);
} finally {
jobMeta.initializeVariablesFrom(jobMeta.getParentVariableSpace());
jobMeta.setInternalKettleVariables();
}
}
}
Aggregations