use of org.pentaho.di.trans.steps.tableoutput.TableOutputMeta in project pentaho-kettle by pentaho.
the class SpoonDBDelegate method copyTable.
public boolean copyTable(DatabaseMeta sourceDBInfo, DatabaseMeta targetDBInfo, String tablename) {
try {
//
// Create a new transformation...
//
TransMeta meta = new TransMeta();
meta.addDatabase(sourceDBInfo);
meta.addDatabase(targetDBInfo);
//
// Add a note
//
String note = BaseMessages.getString(PKG, "Spoon.Message.Note.ReadInformationFromTableOnDB", tablename, sourceDBInfo.getDatabaseName()) + // "Reads information from table ["+tablename+"]
Const.CR;
// on database ["+sourceDBInfo+"]"
note += BaseMessages.getString(PKG, "Spoon.Message.Note.WriteInformationToTableOnDB", tablename, targetDBInfo.getDatabaseName());
// the information to table
// ["+tablename+"] on
// database
// ["+targetDBInfo+"]"
NotePadMeta ni = new NotePadMeta(note, 150, 10, -1, -1);
meta.addNote(ni);
//
// create the source step...
//
// "read
String fromstepname = BaseMessages.getString(PKG, "Spoon.Message.Note.ReadFromTable", tablename);
// from
// ["+tablename+"]";
TableInputMeta tii = new TableInputMeta();
tii.setDatabaseMeta(sourceDBInfo);
tii.setSQL("SELECT * FROM " + tablename);
PluginRegistry registry = PluginRegistry.getInstance();
String fromstepid = registry.getPluginId(StepPluginType.class, tii);
StepMeta fromstep = new StepMeta(fromstepid, fromstepname, tii);
fromstep.setLocation(150, 100);
fromstep.setDraw(true);
fromstep.setDescription(BaseMessages.getString(PKG, "Spoon.Message.Note.ReadInformationFromTableOnDB", tablename, sourceDBInfo.getDatabaseName()));
meta.addStep(fromstep);
//
// add logic to rename fields in case any of the field names contain
// reserved words...
// Use metadata logic in SelectValues, use SelectValueInfo...
//
Database sourceDB = new Database(loggingObject, sourceDBInfo);
sourceDB.shareVariablesWith(meta);
sourceDB.connect();
try {
// Get the fields for the input table...
RowMetaInterface fields = sourceDB.getTableFields(tablename);
// See if we need to deal with reserved words...
int nrReserved = targetDBInfo.getNrReservedWords(fields);
if (nrReserved > 0) {
SelectValuesMeta svi = new SelectValuesMeta();
svi.allocate(0, 0, nrReserved);
int nr = 0;
// CHECKSTYLE:Indentation:OFF
for (int i = 0; i < fields.size(); i++) {
ValueMetaInterface v = fields.getValueMeta(i);
if (targetDBInfo.isReservedWord(v.getName())) {
if (svi.getMeta()[nr] == null) {
svi.getMeta()[nr] = new SelectMetadataChange(svi);
}
svi.getMeta()[nr].setName(v.getName());
svi.getMeta()[nr].setRename(targetDBInfo.quoteField(v.getName()));
nr++;
}
}
String selstepname = BaseMessages.getString(PKG, "Spoon.Message.Note.HandleReservedWords");
String selstepid = registry.getPluginId(StepPluginType.class, svi);
StepMeta selstep = new StepMeta(selstepid, selstepname, svi);
selstep.setLocation(350, 100);
selstep.setDraw(true);
selstep.setDescription(BaseMessages.getString(PKG, "Spoon.Message.Note.RenamesReservedWords", //
targetDBInfo.getPluginId()));
meta.addStep(selstep);
TransHopMeta shi = new TransHopMeta(fromstep, selstep);
meta.addTransHop(shi);
fromstep = selstep;
}
//
// Create the target step...
//
//
// Add the TableOutputMeta step...
//
String tostepname = BaseMessages.getString(PKG, "Spoon.Message.Note.WriteToTable", tablename);
TableOutputMeta toi = new TableOutputMeta();
toi.setDatabaseMeta(targetDBInfo);
toi.setTableName(tablename);
toi.setCommitSize(200);
toi.setTruncateTable(true);
String tostepid = registry.getPluginId(StepPluginType.class, toi);
StepMeta tostep = new StepMeta(tostepid, tostepname, toi);
tostep.setLocation(550, 100);
tostep.setDraw(true);
tostep.setDescription(BaseMessages.getString(PKG, "Spoon.Message.Note.WriteInformationToTableOnDB2", tablename, targetDBInfo.getDatabaseName()));
meta.addStep(tostep);
//
// Add a hop between the two steps...
//
TransHopMeta hi = new TransHopMeta(fromstep, tostep);
meta.addTransHop(hi);
// OK, if we're still here: overwrite the current transformation...
// Set a name on this generated transformation
//
String name = "Copy table from [" + sourceDBInfo.getName() + "] to [" + targetDBInfo.getName() + "]";
String transName = name;
int nr = 1;
if (spoon.delegates.trans.getTransformation(transName) != null) {
nr++;
transName = name + " " + nr;
}
meta.setName(transName);
spoon.delegates.trans.addTransGraph(meta);
spoon.refreshGraph();
spoon.refreshTree();
} finally {
sourceDB.disconnect();
}
} catch (Exception e) {
new ErrorDialog(spoon.getShell(), BaseMessages.getString(PKG, "Spoon.Dialog.UnexpectedError.Title"), BaseMessages.getString(PKG, "Spoon.Dialog.UnexpectedError.Message"), new KettleException(e.getMessage(), e));
return false;
}
return true;
}
use of org.pentaho.di.trans.steps.tableoutput.TableOutputMeta in project pentaho-kettle by pentaho.
the class SpoonJobDelegate method ripDB.
public JobMeta ripDB(final List<DatabaseMeta> databases, final String jobname, final RepositoryDirectoryInterface repdir, final String directory, final DatabaseMeta sourceDbInfo, final DatabaseMeta targetDbInfo, final String[] tables) {
//
// Create a new job...
//
final JobMeta jobMeta = new JobMeta();
jobMeta.setDatabases(databases);
jobMeta.setFilename(null);
jobMeta.setName(jobname);
if (spoon.getRepository() != null) {
jobMeta.setRepositoryDirectory(repdir);
} else {
jobMeta.setFilename(Const.createFilename(directory, jobname, "." + Const.STRING_JOB_DEFAULT_EXT));
}
spoon.refreshTree();
spoon.refreshGraph();
final Point location = new Point(50, 50);
// The start entry...
final JobEntryCopy start = JobMeta.createStartEntry();
start.setLocation(new Point(location.x, location.y));
start.setDrawn();
jobMeta.addJobEntry(start);
// final Thread parentThread = Thread.currentThread();
// Create a dialog with a progress indicator!
IRunnableWithProgress op = monitor -> {
try {
// This is running in a new process: copy some KettleVariables
// info
// LocalVariables.getInstance().createKettleVariables(Thread.currentThread().getName(),
// parentThread.getName(), true);
monitor.beginTask(BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.BuildingNewJob"), tables.length);
monitor.worked(0);
JobEntryCopy previous = start;
// Loop over the table-names...
for (int i = 0; i < tables.length && !monitor.isCanceled(); i++) {
monitor.setTaskName(BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.ProcessingTable") + tables[i] + "]...");
//
// Create the new transformation...
//
String transname = BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.Transname1") + sourceDbInfo + "].[" + tables[i] + BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.Transname2") + targetDbInfo + "]";
TransMeta transMeta = new TransMeta();
if (repdir != null) {
transMeta.setRepositoryDirectory(repdir);
} else {
transMeta.setFilename(Const.createFilename(directory, transname, "." + Const.STRING_TRANS_DEFAULT_EXT));
}
// Add the source & target db
transMeta.addDatabase(sourceDbInfo);
transMeta.addDatabase(targetDbInfo);
//
// Add a note
//
String note = BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.Note1") + tables[i] + BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.Note2") + sourceDbInfo + "]" + Const.CR;
note += BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.Note3") + tables[i] + BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.Note4") + targetDbInfo + "]";
NotePadMeta ni = new NotePadMeta(note, 150, 10, -1, -1);
transMeta.addNote(ni);
//
// Add the TableInputMeta step...
//
String fromstepname = BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.FromStep.Name") + tables[i] + "]";
TableInputMeta tii = new TableInputMeta();
tii.setDefault();
tii.setDatabaseMeta(sourceDbInfo);
// It's already quoted!
tii.setSQL("SELECT * FROM " + tables[i]);
String fromstepid = PluginRegistry.getInstance().getPluginId(StepPluginType.class, tii);
StepMeta fromstep = new StepMeta(fromstepid, fromstepname, tii);
fromstep.setLocation(150, 100);
fromstep.setDraw(true);
fromstep.setDescription(BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.FromStep.Description") + tables[i] + BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.FromStep.Description2") + sourceDbInfo + "]");
transMeta.addStep(fromstep);
//
// Add the TableOutputMeta step...
//
String tostepname = BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.ToStep.Name") + tables[i] + "]";
TableOutputMeta toi = new TableOutputMeta();
toi.setDatabaseMeta(targetDbInfo);
toi.setTableName(tables[i]);
toi.setCommitSize(100);
toi.setTruncateTable(true);
String tostepid = PluginRegistry.getInstance().getPluginId(StepPluginType.class, toi);
StepMeta tostep = new StepMeta(tostepid, tostepname, toi);
tostep.setLocation(500, 100);
tostep.setDraw(true);
tostep.setDescription(BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.ToStep.Description1") + tables[i] + BaseMessages.getString(PKG, "Spoon.RipDB.Monitor.ToStep.Description2") + targetDbInfo + "]");
transMeta.addStep(tostep);
//
// Add a hop between the two steps...
//
TransHopMeta hi = new TransHopMeta(fromstep, tostep);
transMeta.addTransHop(hi);
//
// Now we generate the SQL needed to run for this
// transformation.
//
// First set the limit to 1 to speed things up!
String tmpSql = tii.getSQL();
tii.setSQL(tii.getSQL() + sourceDbInfo.getLimitClause(1));
String sql;
try {
sql = transMeta.getSQLStatementsString();
} catch (KettleStepException kse) {
throw new InvocationTargetException(kse, BaseMessages.getString(PKG, "Spoon.RipDB.Exception.ErrorGettingSQLFromTransformation") + transMeta + "] : " + kse.getMessage());
}
// remove the limit
tii.setSQL(tmpSql);
//
// Now, save the transformation...
//
boolean ok;
if (spoon.getRepository() != null) {
ok = spoon.saveToRepository(transMeta, false);
} else {
ok = spoon.saveToFile(transMeta);
}
if (!ok) {
throw new InvocationTargetException(new Exception(BaseMessages.getString(PKG, "Spoon.RipDB.Exception.UnableToSaveTransformationToRepository")), BaseMessages.getString(PKG, "Spoon.RipDB.Exception.UnableToSaveTransformationToRepository"));
}
// We can now continue with the population of the job...
// //////////////////////////////////////////////////////////////////////
location.x = 250;
if (i > 0) {
location.y += 100;
}
//
if (!Utils.isEmpty(sql)) {
String jesqlname = BaseMessages.getString(PKG, "Spoon.RipDB.JobEntrySQL.Name") + tables[i] + "]";
JobEntrySQL jesql = new JobEntrySQL(jesqlname);
jesql.setDatabase(targetDbInfo);
jesql.setSQL(sql);
jesql.setDescription(BaseMessages.getString(PKG, "Spoon.RipDB.JobEntrySQL.Description") + targetDbInfo + "].[" + tables[i] + "]");
JobEntryCopy jecsql = new JobEntryCopy();
jecsql.setEntry(jesql);
jecsql.setLocation(new Point(location.x, location.y));
jecsql.setDrawn();
jobMeta.addJobEntry(jecsql);
// Add the hop too...
JobHopMeta jhi = new JobHopMeta(previous, jecsql);
jobMeta.addJobHop(jhi);
previous = jecsql;
}
//
// Add the jobentry for the transformation too...
//
String jetransname = BaseMessages.getString(PKG, "Spoon.RipDB.JobEntryTrans.Name") + tables[i] + "]";
JobEntryTrans jetrans = new JobEntryTrans(jetransname);
jetrans.setTransname(transMeta.getName());
if (spoon.getRepository() != null) {
jetrans.setSpecificationMethod(ObjectLocationSpecificationMethod.REPOSITORY_BY_NAME);
jetrans.setDirectory(transMeta.getRepositoryDirectory().getPath());
} else {
jetrans.setSpecificationMethod(ObjectLocationSpecificationMethod.FILENAME);
jetrans.setFileName(Const.createFilename("${" + Const.INTERNAL_VARIABLE_JOB_FILENAME_DIRECTORY + "}", transMeta.getName(), "." + Const.STRING_TRANS_DEFAULT_EXT));
}
JobEntryCopy jectrans = new JobEntryCopy(jetrans);
jectrans.setDescription(BaseMessages.getString(PKG, "Spoon.RipDB.JobEntryTrans.Description1") + Const.CR + BaseMessages.getString(PKG, "Spoon.RipDB.JobEntryTrans.Description2") + sourceDbInfo + "].[" + tables[i] + "]" + Const.CR + BaseMessages.getString(PKG, "Spoon.RipDB.JobEntryTrans.Description3") + targetDbInfo + "].[" + tables[i] + "]");
jectrans.setDrawn();
location.x += 400;
jectrans.setLocation(new Point(location.x, location.y));
jobMeta.addJobEntry(jectrans);
// Add a hop between the last 2 job entries.
JobHopMeta jhi2 = new JobHopMeta(previous, jectrans);
jobMeta.addJobHop(jhi2);
previous = jectrans;
monitor.worked(1);
}
monitor.worked(100);
monitor.done();
} catch (Exception e) {
new ErrorDialog(spoon.getShell(), "Error", "An unexpected error occurred!", e);
}
};
try {
ProgressMonitorDialog pmd = new ProgressMonitorDialog(spoon.getShell());
pmd.run(false, true, op);
} catch (InvocationTargetException | InterruptedException e) {
new ErrorDialog(spoon.getShell(), BaseMessages.getString(PKG, "Spoon.ErrorDialog.RipDB.ErrorRippingTheDatabase.Title"), BaseMessages.getString(PKG, "Spoon.ErrorDialog.RipDB.ErrorRippingTheDatabase.Message"), e);
return null;
} finally {
spoon.refreshGraph();
spoon.refreshTree();
}
return jobMeta;
}
use of org.pentaho.di.trans.steps.tableoutput.TableOutputMeta in project pentaho-kettle by pentaho.
the class TableAgileMart method adjustSchema.
@Override
public boolean adjustSchema() {
TableOutputMeta meta = getMeta();
TableOutputData data = getData();
TransMeta transMeta = getTransMeta();
StepMeta stepMeta = meta.getParentStepMeta();
DBCache dbcache = transMeta.getDbCache();
StringBuilder messageBuffer = new StringBuilder();
try {
RowMetaInterface prev = transMeta.getPrevStepFields(stepMeta.getName());
if (log.isDetailed()) {
logDetailed("Attempting to auto adjust table structure");
}
if (log.isDetailed()) {
logDetailed("getTransMeta: " + getTransMeta());
}
if (log.isDetailed()) {
logDetailed("getStepname: " + getStepname());
}
SQLStatement statement = meta.getSQLStatements(transMeta, stepMeta, prev, repository, metaStore);
if (log.isDetailed()) {
logDetailed("Statement: " + statement);
}
if (log.isDetailed() && statement != null) {
logDetailed("Statement has SQL: " + statement.hasSQL());
}
if (statement != null && statement.hasSQL()) {
String sql = statement.getSQL();
if (log.isDetailed()) {
logDetailed("Trying: " + sql);
}
try {
log.logDetailed("Executing SQL: " + Const.CR + sql);
data.db.execStatement(sql);
// Clear the database cache, in case we're using one...
if (dbcache != null) {
dbcache.clear(data.databaseMeta.getName());
}
} catch (Exception dbe) {
String error = BaseMessages.getString(PKG, "SQLEditor.Log.SQLExecError", sql, dbe.toString());
messageBuffer.append(error).append(Const.CR);
return false;
}
if (log.isDetailed()) {
logDetailed("Successfull: " + sql);
}
} else if (statement.getError() == null) {
// there were no changes to be made
return true;
} else {
this.message = statement.getError();
logError(statement.getError());
return false;
}
} catch (Exception e) {
logError("An error ocurred trying to adjust the table schema", e);
}
return true;
}
use of org.pentaho.di.trans.steps.tableoutput.TableOutputMeta in project pentaho-metaverse by pentaho.
the class MetaverseValidationIT method testTableOutputStepNode.
@Test
public void testTableOutputStepNode() throws Exception {
// this tests a specific step in a specific transform
TableOutputStepNode node = root.getTableOutputStepNode();
// check the table that it writes to
TableOutputMeta meta = (TableOutputMeta) getStepMeta(node);
String tableName = meta.getTableName();
String schema = meta.getSchemaName();
boolean truncateTable = meta.truncateTable();
DatabaseTableNode databaseTableNode = node.getDatabaseTable();
assertEquals(tableName, databaseTableNode.getName());
assertEquals(schema, node.getSchema());
assertEquals(truncateTable, node.isTruncateTable());
Iterable<StreamFieldNode> inputs = node.getInputStreamFields();
Iterable<StreamFieldNode> outputs = node.getOutputStreamFields();
assertEquals(getIterableSize(inputs) + meta.getFieldDatabase().length, getIterableSize(outputs));
for (StreamFieldNode input : inputs) {
assertEquals(input.getName(), input.getFieldPopulatedByMe().getName());
}
DatasourceNode datasource = node.getDatasource(meta.getDatabaseMeta().getName());
assertEquals(meta.getDatabaseMeta().getHostname(), datasource.getHost());
assertEquals(meta.getDatabaseMeta().getDatabasePortNumberString(), datasource.getPort());
assertEquals(meta.getDatabaseMeta().getUsername(), datasource.getUserName());
assertEquals(meta.getDatabaseMeta().getDatabaseName(), datasource.getDatabaseName());
assertEquals(DictionaryConst.NODE_TYPE_DATASOURCE, datasource.getType());
assertEquals(meta.getTableName(), databaseTableNode.getName());
assertEquals(DictionaryConst.NODE_TYPE_DATA_TABLE, databaseTableNode.getType());
}
use of org.pentaho.di.trans.steps.tableoutput.TableOutputMeta in project pentaho-metaverse by pentaho.
the class TableOutputExternalResourceConsumerTest method testGetResourcesFromMeta_static.
@Test
public void testGetResourcesFromMeta_static() throws Exception {
TableOutputMeta meta = mock(TableOutputMeta.class);
DatabaseMeta dbMeta = mock(DatabaseMeta.class);
DatabaseInterface dbi = mock(DatabaseInterface.class);
when(meta.getDatabaseMeta()).thenReturn(dbMeta);
when(meta.getTableName()).thenReturn("tableName");
when(meta.getSchemaName()).thenReturn("schemaName");
when(meta.getParentStepMeta()).thenReturn(parentStepMeta);
when(parentStepMeta.getParentTransMeta()).thenReturn(parentTransMeta);
when(parentTransMeta.environmentSubstitute("tableName")).thenReturn("tableName");
when(parentTransMeta.environmentSubstitute("schemaName")).thenReturn("schemaName");
when(dbMeta.getAccessTypeDesc()).thenReturn("JNDI");
when(dbMeta.getName()).thenReturn("TestConnection");
when(dbMeta.getDescription()).thenReturn("my conn description");
when(dbMeta.getDatabaseInterface()).thenReturn(dbi);
when(dbi.getPluginId()).thenReturn("POSTGRESQL");
Collection<IExternalResourceInfo> resources = consumer.getResourcesFromMeta(meta, new AnalysisContext(DictionaryConst.CONTEXT_STATIC));
assertEquals(1, resources.size());
IExternalResourceInfo res = resources.iterator().next();
assertEquals("TestConnection", res.getName());
assertEquals("tableName", res.getAttributes().get(DictionaryConst.PROPERTY_TABLE));
assertEquals("schemaName", res.getAttributes().get(DictionaryConst.PROPERTY_SCHEMA));
}
Aggregations