use of org.h2.api.Trigger in project h2database by h2database.
the class TestTriggersConstraints method testViewTriggerGeneratedKeys.
private void testViewTriggerGeneratedKeys() throws SQLException {
Connection conn;
Statement stat;
conn = getConnection("trigger");
stat = conn.createStatement();
stat.execute("drop table if exists test");
stat.execute("create table test(id int identity)");
stat.execute("create view test_view as select * from test");
stat.execute("create trigger test_view_insert " + "instead of insert on test_view for each row call \"" + TestViewGeneratedKeys.class.getName() + "\"");
if (!config.memory) {
conn.close();
conn = getConnection("trigger");
stat = conn.createStatement();
}
PreparedStatement pstat;
pstat = conn.prepareStatement("insert into test_view values()", Statement.RETURN_GENERATED_KEYS);
int count = pstat.executeUpdate();
assertEquals(1, count);
ResultSet gkRs;
gkRs = stat.executeQuery("select scope_identity()");
assertTrue(gkRs.next());
assertEquals(1, gkRs.getInt(1));
assertFalse(gkRs.next());
ResultSet rs;
rs = stat.executeQuery("select * from test");
assertTrue(rs.next());
assertFalse(rs.next());
stat.execute("drop view test_view");
stat.execute("drop table test");
conn.close();
}
use of org.h2.api.Trigger in project h2database by h2database.
the class TestTriggersConstraints method testTriggerSelectEachRow.
private void testTriggerSelectEachRow() throws SQLException {
Connection conn;
Statement stat;
conn = getConnection("trigger");
stat = conn.createStatement();
stat.execute("drop table if exists test");
stat.execute("create table test(id int)");
assertThrows(ErrorCode.TRIGGER_SELECT_AND_ROW_BASED_NOT_SUPPORTED, stat).execute("create trigger test_insert before select on test " + "for each row call \"" + TestTriggerAdapter.class.getName() + "\"");
conn.close();
}
use of org.h2.api.Trigger in project h2database by h2database.
the class TestSequence method testConcurrentCreate.
private void testConcurrentCreate() throws Exception {
deleteDb("sequence");
final String url = getURL("sequence;MULTI_THREADED=1;LOCK_TIMEOUT=2000", true);
Connection conn = getConnection(url);
Task[] tasks = new Task[2];
try {
Statement stat = conn.createStatement();
stat.execute("create table dummy(id bigint primary key)");
stat.execute("create table test(id bigint primary key)");
stat.execute("create sequence test_seq cache 2");
for (int i = 0; i < tasks.length; i++) {
final int x = i;
tasks[i] = new Task() {
@Override
public void call() throws Exception {
try (Connection conn = getConnection(url)) {
PreparedStatement prep = conn.prepareStatement("insert into test(id) values(next value for test_seq)");
PreparedStatement prep2 = conn.prepareStatement("delete from test");
while (!stop) {
prep.execute();
if (Math.random() < 0.01) {
prep2.execute();
}
if (Math.random() < 0.01) {
createDropTrigger(conn);
}
}
}
}
private void createDropTrigger(Connection conn) throws Exception {
String triggerName = "t_" + x;
Statement stat = conn.createStatement();
stat.execute("create trigger " + triggerName + " before insert on dummy call \"" + TriggerTest.class.getName() + "\"");
stat.execute("drop trigger " + triggerName);
}
}.execute();
}
Thread.sleep(1000);
for (Task t : tasks) {
t.get();
}
} finally {
for (Task t : tasks) {
t.join();
}
conn.close();
}
}
use of org.h2.api.Trigger in project siena by mandubian.
the class FullText method removeAllTriggers.
/**
* Remove all triggers that start with the given prefix.
*
* @param conn the database connection
* @param prefix the prefix
*/
protected static void removeAllTriggers(Connection conn, String prefix) throws SQLException {
Statement stat = conn.createStatement();
ResultSet rs = stat.executeQuery("SELECT * FROM INFORMATION_SCHEMA.TRIGGERS");
Statement stat2 = conn.createStatement();
while (rs.next()) {
String schema = rs.getString("TRIGGER_SCHEMA");
String name = rs.getString("TRIGGER_NAME");
if (name.startsWith(prefix)) {
name = StringUtils.quoteIdentifier(schema) + "." + StringUtils.quoteIdentifier(name);
stat2.execute("DROP TRIGGER " + name);
}
}
}
use of org.h2.api.Trigger in project frostwire by frostwire.
the class FullTextLucene2 method search.
/**
* Do the search.
*
* @param conn the database connection
* @param text the query
* @param limit the limit
* @param offset the offset
* @param data whether the raw data should be returned
* @return the result set
*/
protected static ResultSet search(Connection conn, String text, int limit, int offset, boolean data) throws SQLException {
SimpleResultSet result = createResultSet(data);
if (conn.getMetaData().getURL().startsWith("jdbc:columnlist:")) {
// this is just to query the result set columns
return result;
}
if (text == null || text.trim().length() == 0) {
return result;
}
try {
IndexAccess access = getIndexAccess(conn);
/*## LUCENE2 ##
access.modifier.flush();
String path = getIndexPath(conn);
IndexReader reader = IndexReader.open(path);
Analyzer analyzer = new StandardAnalyzer();
Searcher searcher = new IndexSearcher(reader);
QueryParser parser = new QueryParser(LUCENE_FIELD_DATA, analyzer);
Query query = parser.parse(text);
Hits hits = searcher.search(query);
int max = hits.length();
if (limit == 0) {
limit = max;
}
for (int i = 0; i < limit && i + offset < max; i++) {
Document doc = hits.doc(i + offset);
float score = hits.score(i + offset);
//*/
// ## LUCENE3 ##
// take a reference as the searcher may change
Searcher searcher = access.searcher;
// reuse the same analyzer; it's thread-safe;
// also allows subclasses to control the analyzer used.
Analyzer analyzer = access.writer.getAnalyzer();
QueryParser parser = new QueryParser(Version.LUCENE_30, LUCENE_FIELD_DATA, analyzer);
Query query = parser.parse(text);
// Lucene 3 insists on a hard limit and will not provide
// a total hits value. Take at least 100 which is
// an optimal limit for Lucene as any more
// will trigger writing results to disk.
int maxResults = (limit == 0 ? 100 : limit) + offset;
TopDocs docs = searcher.search(query, maxResults);
if (limit == 0) {
limit = docs.totalHits;
}
for (int i = 0, len = docs.scoreDocs.length; i < limit && i + offset < docs.totalHits && i + offset < len; i++) {
ScoreDoc sd = docs.scoreDocs[i + offset];
Document doc = searcher.doc(sd.doc);
float score = sd.score;
// */
String q = doc.get(LUCENE_FIELD_QUERY);
if (data) {
int idx = q.indexOf(" WHERE ");
JdbcConnection c = (JdbcConnection) conn;
Session session = (Session) c.getSession();
Parser p = new Parser(session);
String tab = q.substring(0, idx);
ExpressionColumn expr = (ExpressionColumn) p.parseExpression(tab);
String schemaName = expr.getOriginalTableAliasName();
String tableName = expr.getColumnName();
q = q.substring(idx + " WHERE ".length());
Object[][] columnData = parseKey(conn, q);
result.addRow(schemaName, tableName, columnData[0], columnData[1], score);
} else {
result.addRow(q, score);
}
}
/*## LUCENE2 ##
// TODO keep it open if possible
reader.close();
//*/
} catch (Exception e) {
throw convertException(e);
}
return result;
}
Aggregations