use of org.knime.core.data.blob.BinaryObjectCellFactory in project knime-core by knime.
the class DatabaseReaderConnection method createTable.
/**
* Called from the database port to read the first n-number of rows.
* @param cachedNoRows number of rows cached for data preview
* @param cp {@link CredentialsProvider} providing user/password
* @return buffered data table read from database
* @throws SQLException if the connection could not be opened
*/
DataTable createTable(final int cachedNoRows, final CredentialsProvider cp) throws SQLException {
if (m_blobFactory == null) {
m_blobFactory = new BinaryObjectCellFactory();
}
// synchronized (m_conn.syncConnection(conn)) {
return m_conn.execute(cp, conn -> {
// remember auto-commit flag
final boolean autoCommit = conn.getAutoCommit();
final Statement stmt = initStatement(cp, conn);
try {
final String[] oQueries = m_conn.getQuery().split(SQL_QUERY_SEPARATOR);
if (cachedNoRows < 0) {
int fetchsize = (DatabaseConnectionSettings.FETCH_SIZE != null) ? DatabaseConnectionSettings.FETCH_SIZE : -1;
m_conn.getUtility().getStatementManipulator().setFetchSize(stmt, fetchsize);
} else {
final int hashAlias = System.identityHashCode(this);
final int selectIdx = oQueries.length - 1;
// replace last element in statement(s) with wrapped SQL
oQueries[selectIdx] = "SELECT * FROM (" + oQueries[selectIdx] + ") table_" + hashAlias;
try {
// bugfix 2925: may fail, e.g. on sqlite
stmt.setMaxRows(cachedNoRows);
} catch (SQLException ex) {
Throwable cause = ExceptionUtils.getRootCause(ex);
if (cause == null) {
cause = ex;
}
LOGGER.warn("Can't set max rows on statement, reason: " + cause.getMessage(), ex);
}
}
// execute all except the last query
for (int i = 0; i < oQueries.length - 1; i++) {
LOGGER.debug("Executing SQL statement as execute: " + oQueries[i]);
stmt.execute(oQueries[i]);
}
final String lastQuery = oQueries[oQueries.length - 1];
LOGGER.debug("Executing SQL statement as executeQuery: " + lastQuery);
final ResultSet result = stmt.executeQuery(lastQuery);
LOGGER.debug("Reading meta data from database ResultSet...");
m_spec = createTableSpec(result.getMetaData());
LOGGER.debug("Parsing database ResultSet...");
DBRowIterator it = new DBRowIterator(result, true);
DataContainer buf = new DataContainer(m_spec);
while (it.hasNext()) {
buf.addRowToTable(it.next());
}
buf.close();
return buf.getTable();
} finally {
if (stmt != null) {
if (!conn.getAutoCommit()) {
conn.commit();
}
DatabaseConnectionSettings.setAutoCommit(conn, autoCommit);
stmt.close();
}
}
});
}
use of org.knime.core.data.blob.BinaryObjectCellFactory in project knime-core by knime.
the class DBReaderImpl method createRowIteratorConnection.
/**
* Read data from database using a {@link RowIterator}.
* @param conn
* @param exec used for progress info
* @param cp {@link CredentialsProvider} providing user/password
* @param useDbRowId <code>true</code> if the row id returned by the database should be used to generate the
* KNIME row id
* @return an object that represents the open database connection. The individual entries are accessible by means of a {@link RowIterator}.
* @throws SQLException if the connection could not be opened
*/
private DBRowIterator createRowIteratorConnection(final Connection conn, final ExecutionContext exec, final CredentialsProvider cp, final boolean useDbRowId) throws SQLException {
if (m_blobFactory == null) {
m_blobFactory = new BinaryObjectCellFactory(exec);
}
exec.setMessage("Start reading rows from database...");
// remember auto-commit flag
final Statement stmt = initStatement(cp, conn);
int fetchsize = (DatabaseConnectionSettings.FETCH_SIZE != null) ? DatabaseConnectionSettings.FETCH_SIZE : -1;
final DatabaseQueryConnectionSettings dbConn = getQueryConnection();
dbConn.getUtility().getStatementManipulator().setFetchSize(stmt, fetchsize);
final String[] oQueries = dbConn.getQuery().split(SQL_QUERY_SEPARATOR);
// execute all except the last query
for (int i = 0; i < oQueries.length - 1; i++) {
LOGGER.debug("Executing SQL statement as execute: " + oQueries[i]);
stmt.execute(oQueries[i]);
}
final String selectQuery = oQueries[oQueries.length - 1];
LOGGER.debug("Executing SQL statement as executeQuery: " + selectQuery);
final ResultSet result = stmt.executeQuery(selectQuery);
LOGGER.debug("Reading meta data from database ResultSet...");
m_spec = createTableSpec(result.getMetaData());
LOGGER.debug("Parsing database ResultSet...");
final RowIterator iterator = createDBRowIterator(m_spec, dbConn, m_blobFactory, useDbRowId, result);
return new RowIteratorConnection(conn, stmt, m_spec, iterator);
}
use of org.knime.core.data.blob.BinaryObjectCellFactory in project knime-core by knime.
the class DataCellToJavaConversionTest method testBinaryObject.
/**
* Test BinaryObjectDataCell -> InputStream conversion.
*
* @throws Exception When something went wrong
*/
@Test
public void testBinaryObject() throws Exception {
/* retrieve converter from DataCellToJavaConverterRegistry */
final Optional<? extends DataCellToJavaConverterFactory<? extends DataValue, InputStream>> factory = DataCellToJavaConverterRegistry.getInstance().getConverterFactories(BinaryObjectDataCell.TYPE, InputStream.class).stream().findFirst();
assertTrue(factory.isPresent());
final DataCellToJavaConverter<DataCell, InputStream> converter = (DataCellToJavaConverter<DataCell, InputStream>) factory.get().create();
assertNotNull(converter);
final BinaryObjectCellFactory cellFactory = new BinaryObjectCellFactory();
assertTrue(converter.convert(cellFactory.create(new byte[] { 4, 2 })) instanceof InputStream);
/* convert a BinaryObjectDataCell */
InputStream stream = converter.convert(cellFactory.create(new byte[] { 4, 2 }));
assertEquals(stream.read(), 4);
assertEquals(stream.read(), 2);
stream.close();
}
use of org.knime.core.data.blob.BinaryObjectCellFactory in project knime-core by knime.
the class DatabaseReaderConnection method createRowIteratorConnection.
/**
* Read data from database using a {@link RowIterator}.
* @param exec used for progress info
* @param cp {@link CredentialsProvider} providing user/password
* @param useDbRowId <code>true</code> if the row id returned by the database should be used to generate the
* KNIME row id
* @return an object that represents the open database connection. The individual entries are accessible by means of a {@link RowIterator}.
* @throws CanceledExecutionException if canceled in between
* @throws SQLException if the connection could not be opened
* @since 3.1
*/
public RowIteratorConnection createRowIteratorConnection(final ExecutionContext exec, final CredentialsProvider cp, final boolean useDbRowId) throws SQLException {
if (m_blobFactory == null) {
m_blobFactory = new BinaryObjectCellFactory(exec);
}
// synchronized (m_conn.syncConnection(conn)) {
return m_conn.execute(cp, conn -> {
exec.setMessage("Start reading rows from database...");
// remember auto-commit flag
final boolean autoCommit = conn.getAutoCommit();
final Statement stmt = initStatement(cp, conn);
int fetchsize = (DatabaseConnectionSettings.FETCH_SIZE != null) ? DatabaseConnectionSettings.FETCH_SIZE : -1;
m_conn.getUtility().getStatementManipulator().setFetchSize(stmt, fetchsize);
final String[] oQueries = m_conn.getQuery().split(SQL_QUERY_SEPARATOR);
// execute all except the last query
for (int i = 0; i < oQueries.length - 1; i++) {
LOGGER.debug("Executing SQL statement as execute: " + oQueries[i]);
stmt.execute(oQueries[i]);
}
final String selectQuery = oQueries[oQueries.length - 1];
LOGGER.debug("Executing SQL statement as executeQuery: " + selectQuery);
final ResultSet result = stmt.executeQuery(selectQuery);
LOGGER.debug("Reading meta data from database ResultSet...");
m_spec = createTableSpec(result.getMetaData());
LOGGER.debug("Parsing database ResultSet...");
return new RowIteratorConnection(conn, stmt, result, m_spec, autoCommit, useDbRowId);
});
}
use of org.knime.core.data.blob.BinaryObjectCellFactory in project knime-core by knime.
the class DBReaderImpl method createTable.
/**
* Called from the database port to read the first n-number of rows.
*
* @param useDbRowId <code>true</code> if the KNIME row id should based on the db row id
* @param cachedNoRows number of rows cached for data preview
* @param cp {@link CredentialsProvider} providing user/password
* @return buffered data table read from database
* @throws SQLException if the connection could not be opened
*/
DataTable createTable(final boolean useDbRowId, final int cachedNoRows, final CredentialsProvider cp) throws SQLException {
if (m_blobFactory == null) {
m_blobFactory = new BinaryObjectCellFactory();
}
// retrieve connection
// final Connection conn = initConnection(cp);
final DatabaseQueryConnectionSettings dbConn = getQueryConnection();
// synchronized (dbConn.syncConnection(conn)) {
return dbConn.execute(cp, conn -> {
// remember auto-commit flag
final boolean autoCommit = conn.getAutoCommit();
final Statement stmt = initStatement(cp, conn);
try {
final String[] oQueries = dbConn.getQuery().split(SQL_QUERY_SEPARATOR);
if (cachedNoRows < 0) {
int fetchsize = (DatabaseConnectionSettings.FETCH_SIZE != null) ? DatabaseConnectionSettings.FETCH_SIZE : -1;
dbConn.getUtility().getStatementManipulator().setFetchSize(stmt, fetchsize);
} else {
final int hashAlias = System.identityHashCode(this);
final int selectIdx = oQueries.length - 1;
// replace last element in statement(s) with wrapped SQL
oQueries[selectIdx] = "SELECT * FROM (" + oQueries[selectIdx] + ") table_" + hashAlias;
try {
// bugfix 2925: may fail, e.g. on sqlite
stmt.setMaxRows(cachedNoRows);
} catch (SQLException ex) {
Throwable cause = ExceptionUtils.getRootCause(ex);
if (cause == null) {
cause = ex;
}
LOGGER.warn("Can't set max rows on statement, reason: " + cause.getMessage(), ex);
}
}
// execute all except the last query
for (int i = 0; i < oQueries.length - 1; i++) {
LOGGER.debug("Executing SQL statement as execute: " + oQueries[i]);
stmt.execute(oQueries[i]);
}
final String lastQuery = oQueries[oQueries.length - 1];
LOGGER.debug("Executing SQL statement as executeQuery: " + lastQuery);
final ResultSet result = stmt.executeQuery(lastQuery);
LOGGER.debug("Reading meta data from database ResultSet...");
m_spec = createTableSpec(result.getMetaData());
LOGGER.debug("Parsing database ResultSet...");
// final DBRowIterator dbIt = createRowIterator(useDbRowId, result);
final RowIterator it = createDBRowIterator(m_spec, dbConn, m_blobFactory, useDbRowId, result);
DataContainer buf = new DataContainer(m_spec);
while (it.hasNext()) {
buf.addRowToTable(it.next());
}
buf.close();
return buf.getTable();
} finally {
if (stmt != null) {
if (!conn.getAutoCommit()) {
conn.commit();
}
DatabaseConnectionSettings.setAutoCommit(conn, autoCommit);
stmt.close();
}
}
});
}
Aggregations