use of org.h2.command.dml.Set in project h2database by h2database.
the class FullText method search.
/**
* Do the search.
*
* @param conn the database connection
* @param text the query
* @param limit the limit
* @param offset the offset
* @param data whether the raw data should be returned
* @return the result set
*/
protected static ResultSet search(Connection conn, String text, int limit, int offset, boolean data) throws SQLException {
SimpleResultSet result = createResultSet(data);
if (conn.getMetaData().getURL().startsWith("jdbc:columnlist:")) {
// this is just to query the result set columns
return result;
}
if (text == null || text.trim().length() == 0) {
return result;
}
FullTextSettings setting = FullTextSettings.getInstance(conn);
if (!setting.isInitialized()) {
init(conn);
}
Set<String> words = new HashSet<>();
addWords(setting, words, text);
Set<Integer> rIds = null, lastRowIds;
PreparedStatement prepSelectMapByWordId = setting.prepare(conn, SELECT_MAP_BY_WORD_ID);
for (String word : words) {
lastRowIds = rIds;
rIds = new HashSet<>();
Integer wId = setting.getWordId(word);
if (wId == null) {
continue;
}
prepSelectMapByWordId.setInt(1, wId.intValue());
ResultSet rs = prepSelectMapByWordId.executeQuery();
while (rs.next()) {
Integer rId = rs.getInt(1);
if (lastRowIds == null || lastRowIds.contains(rId)) {
rIds.add(rId);
}
}
}
if (rIds == null || rIds.isEmpty()) {
return result;
}
PreparedStatement prepSelectRowById = setting.prepare(conn, SELECT_ROW_BY_ID);
int rowCount = 0;
for (int rowId : rIds) {
prepSelectRowById.setInt(1, rowId);
ResultSet rs = prepSelectRowById.executeQuery();
if (!rs.next()) {
continue;
}
if (offset > 0) {
offset--;
} else {
String key = rs.getString(1);
int indexId = rs.getInt(2);
IndexInfo index = setting.getIndexInfo(indexId);
if (data) {
Object[][] columnData = parseKey(conn, key);
result.addRow(index.schema, index.table, columnData[0], columnData[1], 1.0);
} else {
String query = StringUtils.quoteIdentifier(index.schema) + "." + StringUtils.quoteIdentifier(index.table) + " WHERE " + key;
result.addRow(query, 1.0);
}
rowCount++;
if (limit > 0 && rowCount >= limit) {
break;
}
}
}
return result;
}
use of org.h2.command.dml.Set in project h2database by h2database.
the class FullTextLucene method search.
/**
* Do the search.
*
* @param conn the database connection
* @param text the query
* @param limit the limit
* @param offset the offset
* @param data whether the raw data should be returned
* @return the result set
*/
protected static ResultSet search(Connection conn, String text, int limit, int offset, boolean data) throws SQLException {
SimpleResultSet result = createResultSet(data);
if (conn.getMetaData().getURL().startsWith("jdbc:columnlist:")) {
// this is just to query the result set columns
return result;
}
if (text == null || text.trim().length() == 0) {
return result;
}
try {
IndexAccess access = getIndexAccess(conn);
// take a reference as the searcher may change
IndexSearcher searcher = access.getSearcher();
try {
// reuse the same analyzer; it's thread-safe;
// also allows subclasses to control the analyzer used.
Analyzer analyzer = access.writer.getAnalyzer();
QueryParser parser = new QueryParser(Version.LUCENE_30, LUCENE_FIELD_DATA, analyzer);
Query query = parser.parse(text);
// Lucene 3 insists on a hard limit and will not provide
// a total hits value. Take at least 100 which is
// an optimal limit for Lucene as any more
// will trigger writing results to disk.
int maxResults = (limit == 0 ? 100 : limit) + offset;
TopDocs docs = searcher.search(query, maxResults);
if (limit == 0) {
limit = docs.totalHits;
}
for (int i = 0, len = docs.scoreDocs.length; i < limit && i + offset < docs.totalHits && i + offset < len; i++) {
ScoreDoc sd = docs.scoreDocs[i + offset];
Document doc = searcher.doc(sd.doc);
float score = sd.score;
String q = doc.get(LUCENE_FIELD_QUERY);
if (data) {
int idx = q.indexOf(" WHERE ");
JdbcConnection c = (JdbcConnection) conn;
Session session = (Session) c.getSession();
Parser p = new Parser(session);
String tab = q.substring(0, idx);
ExpressionColumn expr = (ExpressionColumn) p.parseExpression(tab);
String schemaName = expr.getOriginalTableAliasName();
String tableName = expr.getColumnName();
q = q.substring(idx + " WHERE ".length());
Object[][] columnData = parseKey(conn, q);
result.addRow(schemaName, tableName, columnData[0], columnData[1], score);
} else {
result.addRow(q, score);
}
}
} finally {
access.returnSearcher(searcher);
}
} catch (Exception e) {
throw convertException(e);
}
return result;
}
use of org.h2.command.dml.Set in project h2database by h2database.
the class SessionRemote method checkClusterDisableAutoCommit.
private void checkClusterDisableAutoCommit(String serverList) {
if (autoCommit && transferList.size() > 1) {
setAutoCommitSend(false);
CommandInterface c = prepareCommand("SET CLUSTER " + serverList, Integer.MAX_VALUE);
// this will set autoCommit to false
c.executeUpdate(false);
// so we need to switch it on
autoCommit = true;
cluster = true;
}
}
use of org.h2.command.dml.Set in project h2database by h2database.
the class BaseIndex method getCostRangeIndex.
/**
* Calculate the cost for the given mask as if this index was a typical
* b-tree range index. This is the estimated cost required to search one
* row, and then iterate over the given number of rows.
*
* @param masks the IndexCondition search masks, one for each column in the
* table
* @param rowCount the number of rows in the index
* @param filters all joined table filters
* @param filter the current table filter index
* @param sortOrder the sort order
* @param isScanIndex whether this is a "table scan" index
* @param allColumnsSet the set of all columns
* @return the estimated cost
*/
protected final long getCostRangeIndex(int[] masks, long rowCount, TableFilter[] filters, int filter, SortOrder sortOrder, boolean isScanIndex, HashSet<Column> allColumnsSet) {
rowCount += Constants.COST_ROW_OFFSET;
int totalSelectivity = 0;
long rowsCost = rowCount;
if (masks != null) {
for (int i = 0, len = columns.length; i < len; i++) {
Column column = columns[i];
int index = column.getColumnId();
int mask = masks[index];
if ((mask & IndexCondition.EQUALITY) == IndexCondition.EQUALITY) {
if (i == columns.length - 1 && getIndexType().isUnique()) {
rowsCost = 3;
break;
}
totalSelectivity = 100 - ((100 - totalSelectivity) * (100 - column.getSelectivity()) / 100);
long distinctRows = rowCount * totalSelectivity / 100;
if (distinctRows <= 0) {
distinctRows = 1;
}
rowsCost = 2 + Math.max(rowCount / distinctRows, 1);
} else if ((mask & IndexCondition.RANGE) == IndexCondition.RANGE) {
rowsCost = 2 + rowCount / 4;
break;
} else if ((mask & IndexCondition.START) == IndexCondition.START) {
rowsCost = 2 + rowCount / 3;
break;
} else if ((mask & IndexCondition.END) == IndexCondition.END) {
rowsCost = rowCount / 3;
break;
} else {
break;
}
}
}
// If the ORDER BY clause matches the ordering of this index,
// it will be cheaper than another index, so adjust the cost
// accordingly.
long sortingCost = 0;
if (sortOrder != null) {
sortingCost = 100 + rowCount / 10;
}
if (sortOrder != null && !isScanIndex) {
boolean sortOrderMatches = true;
int coveringCount = 0;
int[] sortTypes = sortOrder.getSortTypes();
TableFilter tableFilter = filters == null ? null : filters[filter];
for (int i = 0, len = sortTypes.length; i < len; i++) {
if (i >= indexColumns.length) {
// more of the order by columns.
break;
}
Column col = sortOrder.getColumn(i, tableFilter);
if (col == null) {
sortOrderMatches = false;
break;
}
IndexColumn indexCol = indexColumns[i];
if (!col.equals(indexCol.column)) {
sortOrderMatches = false;
break;
}
int sortType = sortTypes[i];
if (sortType != indexCol.sortType) {
sortOrderMatches = false;
break;
}
coveringCount++;
}
if (sortOrderMatches) {
// "coveringCount" makes sure that when we have two
// or more covering indexes, we choose the one
// that covers more.
sortingCost = 100 - coveringCount;
}
}
// If we have two indexes with the same cost, and one of the indexes can
// satisfy the query without needing to read from the primary table
// (scan index), make that one slightly lower cost.
boolean needsToReadFromScanIndex = true;
if (!isScanIndex && allColumnsSet != null && !allColumnsSet.isEmpty()) {
boolean foundAllColumnsWeNeed = true;
for (Column c : allColumnsSet) {
if (c.getTable() == getTable()) {
boolean found = false;
for (Column c2 : columns) {
if (c == c2) {
found = true;
break;
}
}
if (!found) {
foundAllColumnsWeNeed = false;
break;
}
}
}
if (foundAllColumnsWeNeed) {
needsToReadFromScanIndex = false;
}
}
long rc;
if (isScanIndex) {
rc = rowsCost + sortingCost + 20;
} else if (needsToReadFromScanIndex) {
rc = rowsCost + rowsCost + sortingCost + 20;
} else {
// The (20-x) calculation makes sure that when we pick a covering
// index, we pick the covering index that has the smallest number of
// columns (the more columns we have in index - the higher cost).
// This is faster because a smaller index will fit into fewer data
// blocks.
rc = rowsCost + sortingCost + columns.length;
}
return rc;
}
use of org.h2.command.dml.Set in project h2database by h2database.
the class LinkedIndex method update.
/**
* Update a row using a UPDATE statement. This method is to be called if the
* emit updates option is enabled.
*
* @param oldRow the old data
* @param newRow the new data
*/
public void update(Row oldRow, Row newRow) {
ArrayList<Value> params = New.arrayList();
StatementBuilder buff = new StatementBuilder("UPDATE ");
buff.append(targetTableName).append(" SET ");
for (int i = 0; i < newRow.getColumnCount(); i++) {
buff.appendExceptFirst(", ");
buff.append(table.getColumn(i).getSQL()).append('=');
Value v = newRow.getValue(i);
if (v == null) {
buff.append("DEFAULT");
} else {
buff.append('?');
params.add(v);
}
}
buff.append(" WHERE ");
buff.resetCount();
for (int i = 0; i < oldRow.getColumnCount(); i++) {
Column col = table.getColumn(i);
buff.appendExceptFirst(" AND ");
buff.append(col.getSQL());
Value v = oldRow.getValue(i);
if (isNull(v)) {
buff.append(" IS NULL");
} else {
buff.append('=');
params.add(v);
addParameter(buff, col);
}
}
String sql = buff.toString();
try {
link.execute(sql, params, true);
} catch (Exception e) {
throw TableLink.wrapException(sql, e);
}
}
Aggregations