Search in sources :

Example 66 with Database

use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.

the class PGBulkLoaderMeta method getSQLStatements.

public SQLStatement getSQLStatements(TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, Repository repository, IMetaStore metaStore) throws KettleStepException {
    // default: nothing to do!
    SQLStatement retval = new SQLStatement(stepMeta.getName(), databaseMeta, null);
    if (databaseMeta != null) {
        if (prev != null && prev.size() > 0) {
            // Copy the row
            RowMetaInterface tableFields = new RowMeta();
            // Now change the field names
            for (int i = 0; i < fieldTable.length; i++) {
                ValueMetaInterface v = prev.searchValueMeta(fieldStream[i]);
                if (v != null) {
                    ValueMetaInterface tableField = v.clone();
                    tableField.setName(fieldTable[i]);
                    tableFields.addValueMeta(tableField);
                } else {
                    throw new KettleStepException("Unable to find field [" + fieldStream[i] + "] in the input rows");
                }
            }
            if (!Utils.isEmpty(tableName)) {
                Database db = new Database(loggingObject, databaseMeta);
                db.shareVariablesWith(transMeta);
                try {
                    db.connect();
                    String schemaTable = databaseMeta.getQuotedSchemaTableCombination(transMeta.environmentSubstitute(schemaName), transMeta.environmentSubstitute(tableName));
                    String sql = db.getDDL(schemaTable, tableFields, null, false, null, true);
                    if (sql.length() == 0) {
                        retval.setSQL(null);
                    } else {
                        retval.setSQL(sql);
                    }
                } catch (KettleException e) {
                    retval.setError(BaseMessages.getString(PKG, "GPBulkLoaderMeta.GetSQL.ErrorOccurred") + e.getMessage());
                }
            } else {
                retval.setError(BaseMessages.getString(PKG, "GPBulkLoaderMeta.GetSQL.NoTableDefinedOnConnection"));
            }
        } else {
            retval.setError(BaseMessages.getString(PKG, "GPBulkLoaderMeta.GetSQL.NotReceivingAnyFields"));
        }
    } else {
        retval.setError(BaseMessages.getString(PKG, "GPBulkLoaderMeta.GetSQL.NoConnectionDefined"));
    }
    return retval;
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) KettleStepException(org.pentaho.di.core.exception.KettleStepException) RowMeta(org.pentaho.di.core.row.RowMeta) Database(org.pentaho.di.core.database.Database) RowMetaInterface(org.pentaho.di.core.row.RowMetaInterface) SQLStatement(org.pentaho.di.core.SQLStatement) ValueMetaInterface(org.pentaho.di.core.row.ValueMetaInterface)

Example 67 with Database

use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.

the class PGBulkLoaderMeta method getRequiredFields.

public RowMetaInterface getRequiredFields(VariableSpace space) throws KettleException {
    String realTableName = space.environmentSubstitute(tableName);
    String realSchemaName = space.environmentSubstitute(schemaName);
    if (databaseMeta != null) {
        Database db = new Database(loggingObject, databaseMeta);
        try {
            db.connect();
            if (!Utils.isEmpty(realTableName)) {
                String schemaTable = databaseMeta.getQuotedSchemaTableCombination(realSchemaName, realTableName);
                // Check if this table exists...
                if (db.checkTableExists(schemaTable)) {
                    return db.getTableFields(schemaTable);
                } else {
                    throw new KettleException(BaseMessages.getString(PKG, "GPBulkLoaderMeta.Exception.TableNotFound"));
                }
            } else {
                throw new KettleException(BaseMessages.getString(PKG, "GPBulkLoaderMeta.Exception.TableNotSpecified"));
            }
        } catch (Exception e) {
            throw new KettleException(BaseMessages.getString(PKG, "GPBulkLoaderMeta.Exception.ErrorGettingFields"), e);
        } finally {
            db.disconnect();
        }
    } else {
        throw new KettleException(BaseMessages.getString(PKG, "GPBulkLoaderMeta.Exception.ConnectionNotDefined"));
    }
}
Also used : KettleException(org.pentaho.di.core.exception.KettleException) Database(org.pentaho.di.core.database.Database) KettleException(org.pentaho.di.core.exception.KettleException) KettleXMLException(org.pentaho.di.core.exception.KettleXMLException) KettleStepException(org.pentaho.di.core.exception.KettleStepException)

Example 68 with Database

use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.

the class CombinationLookupMetaTest method testProvidesModelerMeta.

@Test
public void testProvidesModelerMeta() throws Exception {
    final RowMeta rowMeta = Mockito.mock(RowMeta.class);
    final CombinationLookupMeta combinationLookupMeta = new CombinationLookupMeta() {

        @Override
        Database createDatabaseObject() {
            return Mockito.mock(Database.class);
        }

        @Override
        protected RowMetaInterface getDatabaseTableFields(Database db, String schemaName, String tableName) throws KettleDatabaseException {
            assertEquals("aSchema", schemaName);
            assertEquals("aDimTable", tableName);
            return rowMeta;
        }
    };
    combinationLookupMeta.setKeyLookup(new String[] { "f1", "f2", "f3" });
    combinationLookupMeta.setKeyField(new String[] { "s4", "s5", "s6" });
    combinationLookupMeta.setSchemaName("aSchema");
    combinationLookupMeta.setTablename("aDimTable");
    final CombinationLookupData dimensionLookupData = new CombinationLookupData();
    assertEquals(rowMeta, combinationLookupMeta.getRowMeta(dimensionLookupData));
    assertEquals(3, combinationLookupMeta.getDatabaseFields().size());
    assertEquals("f1", combinationLookupMeta.getDatabaseFields().get(0));
    assertEquals("f2", combinationLookupMeta.getDatabaseFields().get(1));
    assertEquals("f3", combinationLookupMeta.getDatabaseFields().get(2));
    assertEquals(3, combinationLookupMeta.getStreamFields().size());
    assertEquals("s4", combinationLookupMeta.getStreamFields().get(0));
    assertEquals("s5", combinationLookupMeta.getStreamFields().get(1));
    assertEquals("s6", combinationLookupMeta.getStreamFields().get(2));
}
Also used : RowMeta(org.pentaho.di.core.row.RowMeta) Database(org.pentaho.di.core.database.Database) Test(org.junit.Test)

Example 69 with Database

use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.

the class JobEntryMysqlBulkFile method execute.

public Result execute(Result previousResult, int nr) {
    String LimitNbrLignes = "";
    String ListOfColumn = "*";
    String strHighPriority = "";
    String OutDumpText = "";
    String OptionEnclosed = "";
    String FieldSeparator = "";
    String LinesTerminated = "";
    Result result = previousResult;
    result.setResult(false);
    // Let's check the filename ...
    if (filename != null) {
        // User has specified a file, We can continue ...
        String realFilename = getRealFilename();
        File file = new File(realFilename);
        if (file.exists() && iffileexists == 2) {
            // the file exists and user want to Fail
            result.setResult(false);
            result.setNrErrors(1);
            logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists1.Label") + realFilename + BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists2.Label"));
        } else if (file.exists() && iffileexists == 1) {
            // the file exists and user want to do nothing
            result.setResult(true);
            if (log.isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists1.Label") + realFilename + BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists2.Label"));
            }
        } else {
            if (file.exists() && iffileexists == 0) {
                // File exists and user want to renamme it with unique name
                // Format Date
                // Try to clean filename (without wildcard)
                String wildcard = realFilename.substring(realFilename.length() - 4, realFilename.length());
                if (wildcard.substring(0, 1).equals(".")) {
                    // Find wildcard
                    realFilename = realFilename.substring(0, realFilename.length() - 4) + "_" + StringUtil.getFormattedDateTimeNow(true) + wildcard;
                } else {
                    // did not find wildcard
                    realFilename = realFilename + "_" + StringUtil.getFormattedDateTimeNow(true);
                }
                logDebug(BaseMessages.getString(PKG, "JobMysqlBulkFile.FileNameChange1.Label") + realFilename + BaseMessages.getString(PKG, "JobMysqlBulkFile.FileNameChange1.Label"));
            }
            // User has specified an existing file, We can continue ...
            if (log.isDetailed()) {
                logDetailed(BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists1.Label") + realFilename + BaseMessages.getString(PKG, "JobMysqlBulkFile.FileExists2.Label"));
            }
            if (connection != null) {
                // User has specified a connection, We can continue ...
                Database db = new Database(this, connection);
                db.shareVariablesWith(this);
                try {
                    db.connect(parentJob.getTransactionId(), null);
                    // Get schemaname
                    String realSchemaname = environmentSubstitute(schemaname);
                    // Get tablename
                    String realTablename = environmentSubstitute(tablename);
                    if (db.checkTableExists(realTablename)) {
                        // The table existe, We can continue ...
                        if (log.isDetailed()) {
                            logDetailed(BaseMessages.getString(PKG, "JobMysqlBulkFile.TableExists1.Label") + realTablename + BaseMessages.getString(PKG, "JobMysqlBulkFile.TableExists2.Label"));
                        }
                        // Add schemaname (Most the time Schemaname.Tablename)
                        if (schemaname != null) {
                            realTablename = realSchemaname + "." + realTablename;
                        }
                        // Set the Limit lines
                        if (Const.toInt(getRealLimitlines(), 0) > 0) {
                            LimitNbrLignes = "LIMIT " + getRealLimitlines();
                        }
                        // Set list of Column, if null get all columns (*)
                        if (getRealListColumn() != null) {
                            ListOfColumn = MysqlString(getRealListColumn());
                        }
                        // Fields separator
                        if (getRealSeparator() != null && outdumpvalue == 0) {
                            FieldSeparator = "FIELDS TERMINATED BY '" + Const.replace(getRealSeparator(), "'", "''") + "'";
                        }
                        // Lines Terminated by
                        if (getRealLineterminated() != null && outdumpvalue == 0) {
                            LinesTerminated = "LINES TERMINATED BY '" + Const.replace(getRealLineterminated(), "'", "''") + "'";
                        }
                        // High Priority ?
                        if (isHighPriority()) {
                            strHighPriority = "HIGH_PRIORITY";
                        }
                        if (getRealEnclosed() != null && outdumpvalue == 0) {
                            if (isOptionEnclosed()) {
                                OptionEnclosed = "OPTIONALLY ";
                            }
                            OptionEnclosed = OptionEnclosed + "ENCLOSED BY '" + Const.replace(getRealEnclosed(), "'", "''") + "'";
                        }
                        // OutFile or Dumpfile
                        if (outdumpvalue == 0) {
                            OutDumpText = "INTO OUTFILE";
                        } else {
                            OutDumpText = "INTO DUMPFILE";
                        }
                        String FILEBulkFile = "SELECT " + strHighPriority + " " + ListOfColumn + " " + OutDumpText + " '" + realFilename + "' " + FieldSeparator + " " + OptionEnclosed + " " + LinesTerminated + " FROM " + realTablename + " " + LimitNbrLignes + " LOCK IN SHARE MODE";
                        try {
                            if (log.isDetailed()) {
                                logDetailed(FILEBulkFile);
                            }
                            // Run the SQL
                            PreparedStatement ps = db.prepareSQL(FILEBulkFile);
                            ps.execute();
                            // Everything is OK...we can disconnect now
                            db.disconnect();
                            if (isAddFileToResult()) {
                                // Add filename to output files
                                ResultFile resultFile = new ResultFile(ResultFile.FILE_TYPE_GENERAL, KettleVFS.getFileObject(realFilename, this), parentJob.getJobname(), toString());
                                result.getResultFiles().put(resultFile.getFile().toString(), resultFile);
                            }
                            result.setResult(true);
                        } catch (SQLException je) {
                            db.disconnect();
                            result.setNrErrors(1);
                            logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.Error.Label") + " " + je.getMessage());
                        } catch (KettleFileException e) {
                            logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.Error.Label") + e.getMessage());
                            result.setNrErrors(1);
                        }
                    } else {
                        // Of course, the table should have been created already before the bulk load operation
                        db.disconnect();
                        result.setNrErrors(1);
                        if (log.isDetailed()) {
                            logDetailed(BaseMessages.getString(PKG, "JobMysqlBulkFile.TableNotExists1.Label") + realTablename + BaseMessages.getString(PKG, "JobMysqlBulkFile.TableNotExists2.Label"));
                        }
                    }
                } catch (KettleDatabaseException dbe) {
                    db.disconnect();
                    result.setNrErrors(1);
                    logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.Error.Label") + " " + dbe.getMessage());
                }
            } else {
                // No database connection is defined
                result.setNrErrors(1);
                logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.Nodatabase.Label"));
            }
        }
    } else {
        // No file was specified
        result.setNrErrors(1);
        logError(BaseMessages.getString(PKG, "JobMysqlBulkFile.Nofilename.Label"));
    }
    return result;
}
Also used : KettleFileException(org.pentaho.di.core.exception.KettleFileException) SQLException(java.sql.SQLException) KettleDatabaseException(org.pentaho.di.core.exception.KettleDatabaseException) Database(org.pentaho.di.core.database.Database) PreparedStatement(java.sql.PreparedStatement) ResultFile(org.pentaho.di.core.ResultFile) File(java.io.File) ResultFile(org.pentaho.di.core.ResultFile) Result(org.pentaho.di.core.Result)

Example 70 with Database

use of org.pentaho.di.core.database.Database in project pentaho-kettle by pentaho.

the class SQLFileOutput method init.

public boolean init(StepMetaInterface smi, StepDataInterface sdi) {
    meta = (SQLFileOutputMeta) smi;
    data = (SQLFileOutputData) sdi;
    if (super.init(smi, sdi)) {
        try {
            if (meta.getDatabaseMeta() == null) {
                throw new KettleStepException("The connection is not defined (empty)");
            }
            if (meta.getDatabaseMeta() == null) {
                logError(BaseMessages.getString(PKG, "SQLFileOutput.Init.ConnectionMissing", getStepname()));
                return false;
            }
            data.db = new Database(this, meta.getDatabaseMeta());
            data.db.shareVariablesWith(this);
            logBasic("Connected to database [" + meta.getDatabaseMeta() + "]");
            if (meta.isCreateParentFolder()) {
                // Check for parent folder
                FileObject parentfolder = null;
                try {
                    // Get parent folder
                    String filename = environmentSubstitute(meta.getFileName());
                    parentfolder = KettleVFS.getFileObject(filename, getTransMeta()).getParent();
                    if (!parentfolder.exists()) {
                        log.logBasic("Folder parent", "Folder parent " + parentfolder.getName() + " does not exist !");
                        parentfolder.createFolder();
                        log.logBasic("Folder parent", "Folder parent was created.");
                    }
                } catch (Exception e) {
                    logError("Couldn't created parent folder " + parentfolder.getName());
                    setErrors(1L);
                    stopAll();
                } finally {
                    if (parentfolder != null) {
                        try {
                            parentfolder.close();
                        } catch (Exception ex) {
                        /* Ignore */
                        }
                    }
                }
            }
            if (!meta.isDoNotOpenNewFileInit()) {
                if (!openNewFile()) {
                    logError("Couldn't open file [" + buildFilename() + "]");
                    setErrors(1L);
                    stopAll();
                }
            }
            tableName = environmentSubstitute(meta.getTablename());
            schemaName = environmentSubstitute(meta.getSchemaName());
            if (Utils.isEmpty(tableName)) {
                throw new KettleStepException("The tablename is not defined (empty)");
            }
            schemaTable = data.db.getDatabaseMeta().getQuotedSchemaTableCombination(schemaName, tableName);
        } catch (Exception e) {
            logError("An error occurred intialising this step: " + e.getMessage());
            stopAll();
            setErrors(1);
        }
        return true;
    }
    return false;
}
Also used : KettleStepException(org.pentaho.di.core.exception.KettleStepException) Database(org.pentaho.di.core.database.Database) FileObject(org.apache.commons.vfs2.FileObject) KettleException(org.pentaho.di.core.exception.KettleException) KettleStepException(org.pentaho.di.core.exception.KettleStepException)

Aggregations

Database (org.pentaho.di.core.database.Database)238 KettleException (org.pentaho.di.core.exception.KettleException)135 DatabaseMeta (org.pentaho.di.core.database.DatabaseMeta)90 RowMetaInterface (org.pentaho.di.core.row.RowMetaInterface)82 KettleDatabaseException (org.pentaho.di.core.exception.KettleDatabaseException)62 ValueMetaInterface (org.pentaho.di.core.row.ValueMetaInterface)46 ErrorDialog (org.pentaho.di.ui.core.dialog.ErrorDialog)32 KettleStepException (org.pentaho.di.core.exception.KettleStepException)30 MessageBox (org.eclipse.swt.widgets.MessageBox)28 CheckResult (org.pentaho.di.core.CheckResult)25 ValueMetaString (org.pentaho.di.core.row.value.ValueMetaString)25 KettleXMLException (org.pentaho.di.core.exception.KettleXMLException)24 RowMeta (org.pentaho.di.core.row.RowMeta)22 SQLStatement (org.pentaho.di.core.SQLStatement)21 EnterSelectionDialog (org.pentaho.di.ui.core.dialog.EnterSelectionDialog)21 Test (org.junit.Test)20 ArrayList (java.util.ArrayList)18 KettleValueException (org.pentaho.di.core.exception.KettleValueException)17 RowMetaAndData (org.pentaho.di.core.RowMetaAndData)16 ColumnInfo (org.pentaho.di.ui.core.widget.ColumnInfo)15