Search in sources :

Example 1 with Data

use of org.h2.store.Data in project siena by mandubian.

the class FullText method createResultSet.

/**
     * Create an empty search result and initialize the columns.
     *
     * @param data true if the result set should contain the primary key data as
     *            an array.
     * @return the empty result set
     */
protected static SimpleResultSet createResultSet(boolean data) {
    SimpleResultSet result = new SimpleResultSet();
    if (data) {
        result.addColumn(FullText.FIELD_SCHEMA, Types.VARCHAR, 0, 0);
        result.addColumn(FullText.FIELD_TABLE, Types.VARCHAR, 0, 0);
        result.addColumn(FullText.FIELD_COLUMNS, Types.ARRAY, 0, 0);
        result.addColumn(FullText.FIELD_KEYS, Types.ARRAY, 0, 0);
    } else {
        result.addColumn(FullText.FIELD_QUERY, Types.VARCHAR, 0, 0);
    }
    result.addColumn(FullText.FIELD_SCORE, Types.FLOAT, 0, 0);
    return result;
}
Also used : SimpleResultSet(org.h2.tools.SimpleResultSet)

Example 2 with Data

use of org.h2.store.Data in project siena by mandubian.

the class FullText method parseKey.

/**
     * Parse a primary key condition into the primary key columns.
     *
     * @param conn the database connection
     * @param key the primary key condition as a string
     * @return an array containing the column name list and the data list
     */
protected static Object[][] parseKey(Connection conn, String key) {
    ArrayList<String> columns = New.arrayList();
    ArrayList<String> data = New.arrayList();
    JdbcConnection c = (JdbcConnection) conn;
    Session session = (Session) c.getSession();
    Parser p = new Parser(session);
    Expression expr = p.parseExpression(key);
    addColumnData(columns, data, expr);
    Object[] col = new Object[columns.size()];
    columns.toArray(col);
    Object[] dat = new Object[columns.size()];
    data.toArray(dat);
    Object[][] columnData = { col, dat };
    return columnData;
}
Also used : ValueExpression(org.h2.expression.ValueExpression) Expression(org.h2.expression.Expression) JdbcConnection(org.h2.jdbc.JdbcConnection) Session(org.h2.engine.Session) Parser(org.h2.command.Parser)

Example 3 with Data

use of org.h2.store.Data in project siena by mandubian.

the class FullText method search.

/**
     * Do the search.
     *
     * @param conn the database connection
     * @param text the query
     * @param limit the limit
     * @param offset the offset
     * @param data whether the raw data should be returned
     * @return the result set
     */
protected static ResultSet search(Connection conn, String text, int limit, int offset, boolean data) throws SQLException {
    SimpleResultSet result = createResultSet(data);
    if (conn.getMetaData().getURL().startsWith("jdbc:columnlist:")) {
        // this is just to query the result set columns
        return result;
    }
    if (text == null || text.trim().length() == 0) {
        return result;
    }
    FullTextSettings setting = FullTextSettings.getInstance(conn);
    if (!setting.isInitialized()) {
        init(conn);
    }
    HashSet<String> words = New.hashSet();
    addWords(setting, words, text);
    HashSet<Integer> rIds = null, lastRowIds = null;
    HashMap<String, Integer> allWords = setting.getWordList();
    PreparedStatement prepSelectMapByWordId = setting.prepare(conn, SELECT_MAP_BY_WORD_ID);
    for (String word : words) {
        lastRowIds = rIds;
        rIds = New.hashSet();
        Integer wId = allWords.get(word);
        if (wId == null) {
            continue;
        }
        prepSelectMapByWordId.setInt(1, wId.intValue());
        ResultSet rs = prepSelectMapByWordId.executeQuery();
        while (rs.next()) {
            Integer rId = rs.getInt(1);
            if (lastRowIds == null || lastRowIds.contains(rId)) {
                rIds.add(rId);
            }
        }
    }
    if (rIds == null || rIds.size() == 0) {
        return result;
    }
    PreparedStatement prepSelectRowById = setting.prepare(conn, SELECT_ROW_BY_ID);
    int rowCount = 0;
    for (int rowId : rIds) {
        prepSelectRowById.setInt(1, rowId);
        ResultSet rs = prepSelectRowById.executeQuery();
        if (!rs.next()) {
            continue;
        }
        if (offset > 0) {
            offset--;
        } else {
            String key = rs.getString(1);
            int indexId = rs.getInt(2);
            IndexInfo index = setting.getIndexInfo(indexId);
            if (data) {
                /*Object[][] columnData = parseKey(conn, key);
                	result.addRow(
                            index.schema,
                            index.table,
                            columnData[0],
                            columnData[1],
                            1.0);*/
                String[] splits = key.split("=");
                String[] col0 = new String[1];
                col0[0] = splits[0];
                String[] col1 = new String[1];
                col1[0] = splits[1];
                result.addRow(index.schema, index.table, col0, col1, 1.0);
            } else {
                String query = StringUtils.quoteIdentifier(index.schema) + "." + StringUtils.quoteIdentifier(index.table) + " WHERE " + key;
                result.addRow(query, 1.0);
            }
            rowCount++;
            if (limit > 0 && rowCount >= limit) {
                break;
            }
        }
    }
    return result;
}
Also used : SimpleResultSet(org.h2.tools.SimpleResultSet) SimpleResultSet(org.h2.tools.SimpleResultSet) ResultSet(java.sql.ResultSet) PreparedStatement(java.sql.PreparedStatement)

Example 4 with Data

use of org.h2.store.Data in project ignite by apache.

the class GridH2KeyValueRowOffheap method incrementRefCount.

/** {@inheritDoc} */
@SuppressWarnings({ "NonPrivateFieldAccessedInSynchronizedContext" })
@Override
public void incrementRefCount() {
    long p = ptr;
    GridUnsafeMemory mem = desc.memory();
    if (p == 0) {
        // Serialize data to offheap memory.
        Value key = peekValue(KEY_COL);
        Value val = peekValue(VAL_COL);
        assert key != null;
        assert val != null;
        Data data = Data.create(null, new byte[SIZE_CALCULATOR.getValueLen(key)]);
        data.writeValue(key);
        int keySize = data.length();
        p = mem.allocate(keySize + OFFSET_KEY);
        // We don't need any synchronization or volatility here because we publish via
        // volatile write to tree node.
        mem.writeInt(p, 1);
        mem.writeLong(p + OFFSET_EXPIRATION, expirationTime);
        mem.writeInt(p + OFFSET_KEY_SIZE, keySize);
        mem.writeBytes(p + OFFSET_KEY, data.getBytes(), 0, keySize);
        data = Data.create(null, new byte[SIZE_CALCULATOR.getValueLen(val)]);
        data.writeValue(val);
        int valSize = data.length();
        long valPtr = mem.allocate(valSize + OFFSET_VALUE);
        mem.writeInt(valPtr, valSize);
        mem.writeBytes(valPtr + OFFSET_VALUE, data.getBytes(), 0, valSize);
        mem.writeLongVolatile(p + OFFSET_VALUE_REF, valPtr);
        ptr = p;
        desc.cache(this);
    } else {
        for (; ; ) {
            int cnt = mem.readIntVolatile(p);
            assert cnt > 0 : cnt;
            if (mem.casInt(p, cnt, cnt + 1))
                break;
        }
    }
}
Also used : Value(org.h2.value.Value) Data(org.h2.store.Data) GridUnsafeMemory(org.apache.ignite.internal.util.offheap.unsafe.GridUnsafeMemory)

Example 5 with Data

use of org.h2.store.Data in project ignite by apache.

the class GridSqlQueryParser method parseCreateTable.

/**
     * Parse {@code CREATE TABLE} statement.
     *
     * @param createTbl {@code CREATE TABLE} statement.
     * @see <a href="http://h2database.com/html/grammar.html#create_table">H2 {@code CREATE TABLE} spec.</a>
     */
private GridSqlCreateTable parseCreateTable(CreateTable createTbl) {
    GridSqlCreateTable res = new GridSqlCreateTable();
    res.templateName(QueryUtils.TEMPLATE_PARTITIONED);
    Query qry = CREATE_TABLE_QUERY.get(createTbl);
    if (qry != null)
        throw new IgniteSQLException("CREATE TABLE ... AS ... syntax is not supported", IgniteQueryErrorCode.UNSUPPORTED_OPERATION);
    List<DefineCommand> constraints = CREATE_TABLE_CONSTRAINTS.get(createTbl);
    if (constraints.size() == 0)
        throw new IgniteSQLException("No PRIMARY KEY defined for CREATE TABLE", IgniteQueryErrorCode.PARSING);
    if (constraints.size() > 1)
        throw new IgniteSQLException("Too many constraints - only PRIMARY KEY is supported for CREATE TABLE", IgniteQueryErrorCode.UNSUPPORTED_OPERATION);
    DefineCommand constraint = constraints.get(0);
    if (!(constraint instanceof AlterTableAddConstraint))
        throw new IgniteSQLException("Unsupported type of constraint for CREATE TABLE - only PRIMARY KEY " + "is supported", IgniteQueryErrorCode.UNSUPPORTED_OPERATION);
    AlterTableAddConstraint alterTbl = (AlterTableAddConstraint) constraint;
    if (alterTbl.getType() != Command.ALTER_TABLE_ADD_CONSTRAINT_PRIMARY_KEY)
        throw new IgniteSQLException("Unsupported type of constraint for CREATE TABLE - only PRIMARY KEY " + "is supported", IgniteQueryErrorCode.UNSUPPORTED_OPERATION);
    Schema schema = SCHEMA_COMMAND_SCHEMA.get(createTbl);
    res.schemaName(schema.getName());
    CreateTableData data = CREATE_TABLE_DATA.get(createTbl);
    LinkedHashMap<String, GridSqlColumn> cols = new LinkedHashMap<>(data.columns.size());
    for (Column col : data.columns) {
        if (col.isAutoIncrement())
            throw new IgniteSQLException("AUTO_INCREMENT columns are not supported [colName=" + col.getName() + ']', IgniteQueryErrorCode.UNSUPPORTED_OPERATION);
        if (!col.isNullable())
            throw new IgniteSQLException("Non nullable columns are forbidden [colName=" + col.getName() + ']', IgniteQueryErrorCode.PARSING);
        if (COLUMN_IS_COMPUTED.get(col))
            throw new IgniteSQLException("Computed columns are not supported [colName=" + col.getName() + ']', IgniteQueryErrorCode.UNSUPPORTED_OPERATION);
        if (col.getDefaultExpression() != null)
            throw new IgniteSQLException("DEFAULT expressions are not supported [colName=" + col.getName() + ']', IgniteQueryErrorCode.UNSUPPORTED_OPERATION);
        if (col.getSequence() != null)
            throw new IgniteSQLException("SEQUENCE columns are not supported [colName=" + col.getName() + ']', IgniteQueryErrorCode.UNSUPPORTED_OPERATION);
        if (col.getSelectivity() != Constants.SELECTIVITY_DEFAULT)
            throw new IgniteSQLException("SELECTIVITY column attr is not supported [colName=" + col.getName() + ']', IgniteQueryErrorCode.UNSUPPORTED_OPERATION);
        if (COLUMN_CHECK_CONSTRAINT.get(col) != null)
            throw new IgniteSQLException("Column CHECK constraints are not supported [colName=" + col.getName() + ']', IgniteQueryErrorCode.UNSUPPORTED_OPERATION);
        GridSqlColumn gridCol = new GridSqlColumn(col, null, col.getName());
        gridCol.resultType(GridSqlType.fromColumn(col));
        cols.put(col.getName(), gridCol);
    }
    if (cols.containsKey(QueryUtils.KEY_FIELD_NAME.toUpperCase()) || cols.containsKey(QueryUtils.VAL_FIELD_NAME.toUpperCase()))
        throw new IgniteSQLException("Direct specification of _KEY and _VAL columns is forbidden", IgniteQueryErrorCode.PARSING);
    IndexColumn[] pkIdxCols = CREATE_TABLE_PK.get(createTbl);
    if (F.isEmpty(pkIdxCols))
        throw new AssertionError("No PRIMARY KEY columns specified");
    LinkedHashSet<String> pkCols = new LinkedHashSet<>();
    for (IndexColumn pkIdxCol : pkIdxCols) {
        GridSqlColumn gridCol = cols.get(pkIdxCol.columnName);
        assert gridCol != null;
        pkCols.add(gridCol.columnName());
    }
    int valColsNum = cols.size() - pkCols.size();
    if (valColsNum == 0)
        throw new IgniteSQLException("No cache value related columns found");
    res.columns(cols);
    res.primaryKeyColumns(pkCols);
    res.tableName(data.tableName);
    res.ifNotExists(CREATE_TABLE_IF_NOT_EXISTS.get(createTbl));
    List<String> extraParams = data.tableEngineParams != null ? new ArrayList<String>() : null;
    if (data.tableEngineParams != null)
        for (String s : data.tableEngineParams) extraParams.addAll(F.asList(s.split(",")));
    res.params(extraParams);
    if (!F.isEmpty(extraParams)) {
        Map<String, String> params = new HashMap<>();
        for (String p : extraParams) {
            String[] parts = p.split(PARAM_NAME_VALUE_SEPARATOR);
            if (parts.length > 2)
                throw new IgniteSQLException("Invalid parameter (key[=value] expected): " + p, IgniteQueryErrorCode.PARSING);
            String name = parts[0].trim().toUpperCase();
            String val = parts.length > 1 ? parts[1].trim() : null;
            if (F.isEmpty(name))
                throw new IgniteSQLException("Invalid parameter (key[=value] expected): " + p, IgniteQueryErrorCode.PARSING);
            if (params.put(name, val) != null)
                throw new IgniteSQLException("Duplicate parameter: " + p, IgniteQueryErrorCode.PARSING);
        }
        for (Map.Entry<String, String> e : params.entrySet()) processExtraParam(e.getKey(), e.getValue(), res);
    }
    return res;
}
Also used : LinkedHashSet(java.util.LinkedHashSet) Query(org.h2.command.dml.Query) LinkedHashMap(java.util.LinkedHashMap) IdentityHashMap(java.util.IdentityHashMap) HashMap(java.util.HashMap) Schema(org.h2.schema.Schema) DefineCommand(org.h2.command.ddl.DefineCommand) LinkedHashMap(java.util.LinkedHashMap) IndexColumn(org.h2.table.IndexColumn) GridSqlType.fromColumn(org.apache.ignite.internal.processors.query.h2.sql.GridSqlType.fromColumn) Column(org.h2.table.Column) ExpressionColumn(org.h2.expression.ExpressionColumn) IndexColumn(org.h2.table.IndexColumn) AlterTableAddConstraint(org.h2.command.ddl.AlterTableAddConstraint) CreateTableData(org.h2.command.ddl.CreateTableData) AlterTableAddConstraint(org.h2.command.ddl.AlterTableAddConstraint) IgniteSQLException(org.apache.ignite.internal.processors.query.IgniteSQLException) Map(java.util.Map) LinkedHashMap(java.util.LinkedHashMap) IdentityHashMap(java.util.IdentityHashMap) HashMap(java.util.HashMap)

Aggregations

ResultSet (java.sql.ResultSet)5 LinkedHashMap (java.util.LinkedHashMap)4 CacheException (javax.cache.CacheException)4 IgniteException (org.apache.ignite.IgniteException)4 SQLException (java.sql.SQLException)3 ArrayList (java.util.ArrayList)3 HashMap (java.util.HashMap)3 List (java.util.List)3 UUID (java.util.UUID)3 IgniteCheckedException (org.apache.ignite.IgniteCheckedException)3 ClusterNode (org.apache.ignite.cluster.ClusterNode)3 IgniteSQLException (org.apache.ignite.internal.processors.query.IgniteSQLException)3 Index (org.h2.index.Index)3 StringReader (java.io.StringReader)2 PreparedStatement (java.sql.PreparedStatement)2 Collections.singletonList (java.util.Collections.singletonList)2 Map (java.util.Map)2 Random (java.util.Random)2 IgniteClientDisconnectedException (org.apache.ignite.IgniteClientDisconnectedException)2 QueryCancelledException (org.apache.ignite.cache.query.QueryCancelledException)2