use of org.apache.activemq.store.kahadb.KahaDBStore in project activemq-artemis by apache.
the class ThreeBrokerStompTemporaryQueueTest method configurePersistenceAdapter.
protected void configurePersistenceAdapter(BrokerService broker) throws IOException {
File dataFileDir = new File("target/test-amq-data/kahadb/" + broker.getBrokerName());
KahaDBStore kaha = new KahaDBStore();
kaha.setDirectory(dataFileDir);
broker.setPersistenceAdapter(kaha);
}
use of org.apache.activemq.store.kahadb.KahaDBStore in project activemq-artemis by apache.
the class TwoBrokerVirtualDestDinamicallyIncludedDestTest method configurePersistenceAdapter.
protected void configurePersistenceAdapter(BrokerService broker) throws IOException {
File dataFileDir = new File("target/test-amq-data/kahadb/" + broker.getBrokerName());
KahaDBStore kaha = new KahaDBStore();
kaha.setDirectory(dataFileDir);
kaha.deleteAllMessages();
broker.setPersistenceAdapter(kaha);
}
use of org.apache.activemq.store.kahadb.KahaDBStore in project activemq-artemis by apache.
the class DurableSubsOfflineSelectorConcurrentConsumeIndexUseTest method testIndexPageUsage.
public void testIndexPageUsage() throws Exception {
Connection con = createConnection();
Session session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
session.createDurableSubscriber(topic, "true", "filter = 'true'", true);
session.close();
session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
session.createDurableSubscriber(topic, "false", "filter = 'false'", true);
session.close();
session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
session.createDurableSubscriber(topic, "all", null, true);
session.close();
session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
session.createDurableSubscriber(topic, "all2", null, true);
session.close();
con.close();
// send messages
final CountDownLatch goOn = new CountDownLatch(1);
Thread sendThread = new Thread() {
@Override
public void run() {
try {
final Connection sendCon = createConnection("send");
final Session sendSession = sendCon.createSession(false, Session.AUTO_ACKNOWLEDGE);
final MessageProducer producer = sendSession.createProducer(null);
for (int i = 0; i < messageCount; i++) {
boolean filter = i % 2 == 1;
Message message = sendSession.createMessage();
message.setStringProperty("filter", filter ? "true" : "false");
producer.send(topic, message);
if (i > 0 && i % 10000 == 0) {
LOG.info("Sent:" + i);
}
if (i > messageCount / 2) {
goOn.countDown();
}
}
sendSession.close();
sendCon.close();
} catch (Exception e) {
exceptions.add(e);
}
}
};
sendThread.start();
goOn.await(5, TimeUnit.MINUTES);
LOG.info("Activating consumers");
// consume messages in parallel
con = createConnection();
session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumerTrue = session.createDurableSubscriber(topic, "true", "filter = 'true'", true);
Listener listenerT = new Listener();
consumerTrue.setMessageListener(listenerT);
MessageConsumer consumerFalse = session.createDurableSubscriber(topic, "false", "filter = 'false'", true);
Listener listenerF = new Listener();
consumerFalse.setMessageListener(listenerF);
MessageConsumer consumerAll = session.createDurableSubscriber(topic, "all", null, true);
Listener listenerA = new Listener();
consumerAll.setMessageListener(listenerA);
MessageConsumer consumerAll2 = session.createDurableSubscriber(topic, "all2", null, true);
Listener listenerA2 = new Listener();
consumerAll2.setMessageListener(listenerA2);
waitFor(listenerA, messageCount);
assertEquals(messageCount, listenerA.count);
waitFor(listenerA2, messageCount);
assertEquals(messageCount, listenerA2.count);
assertEquals(messageCount / 2, listenerT.count);
assertEquals(messageCount / 2, listenerF.count);
consumerTrue.close();
session.unsubscribe("true");
consumerFalse.close();
session.unsubscribe("false");
consumerAll.close();
session.unsubscribe("all");
session.close();
con.close();
PersistenceAdapter persistenceAdapter = broker.getPersistenceAdapter();
if (persistenceAdapter instanceof KahaDBPersistenceAdapter) {
final KahaDBStore store = ((KahaDBPersistenceAdapter) persistenceAdapter).getStore();
LOG.info("Store page count: " + store.getPageFile().getPageCount());
LOG.info("Store free page count: " + store.getPageFile().getFreePageCount());
LOG.info("Store page in-use: " + (store.getPageFile().getPageCount() - store.getPageFile().getFreePageCount()));
assertTrue("no leak of pages, always use just 11", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
return 11 == store.getPageFile().getPageCount() - store.getPageFile().getFreePageCount();
}
}, TimeUnit.SECONDS.toMillis(10)));
}
}
use of org.apache.activemq.store.kahadb.KahaDBStore in project activemq-artemis by apache.
the class DurableSubscriberNonPersistentMessageTest method setUp.
@Override
protected void setUp() throws Exception {
super.setUp();
broker = new BrokerService();
TransportConnector transportConnector = broker.addConnector("tcp://localhost:0");
KahaDBStore store = new KahaDBStore();
store.setDirectory(new File("data"));
broker.setPersistenceAdapter(store);
broker.start();
brokerURL = "failover:(" + transportConnector.getPublishableConnectString() + ")";
consumerBrokerURL = brokerURL + "?jms.prefetchPolicy.all=100";
mbeanServer = ManagementFactory.getPlatformMBeanServer();
}
use of org.apache.activemq.store.kahadb.KahaDBStore in project activemq-artemis by apache.
the class DurableSubsOfflineSelectorIndexUseTest method testIndexPageUsage.
public void testIndexPageUsage() throws Exception {
Connection con = createConnection();
Session session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
session.createDurableSubscriber(topic, "true", "filter = 'true'", true);
session.close();
session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
session.createDurableSubscriber(topic, "false", "filter = 'false'", true);
session.close();
con.close();
// send messages
final Connection sendCon = createConnection("send");
final Session sendSession = sendCon.createSession(false, Session.AUTO_ACKNOWLEDGE);
final MessageProducer producer = sendSession.createProducer(null);
Thread sendThread = new Thread() {
@Override
public void run() {
try {
for (int i = 0; i < messageCount; i++) {
boolean filter = i % 2 == 1;
Message message = sendSession.createMessage();
message.setStringProperty("filter", filter ? "true" : "false");
producer.send(topic, message);
if (i > 0 && i % 1000 == 0) {
LOG.info("Sent:" + i);
}
}
sendSession.close();
sendCon.close();
} catch (Exception e) {
exceptions.add(e);
}
}
};
sendThread.start();
sendThread.join();
// settle with sent messages
TimeUnit.SECONDS.sleep(4);
// consume messages
con = createConnection();
session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumerTrue = session.createDurableSubscriber(topic, "true", "filter = 'true'", true);
Listener listenerT = new Listener();
consumerTrue.setMessageListener(listenerT);
waitFor(listenerT, messageCount / 2);
MessageConsumer consumerFalse = session.createDurableSubscriber(topic, "false", "filter = 'false'", true);
Listener listenerF = new Listener();
consumerFalse.setMessageListener(listenerF);
waitFor(listenerF, messageCount / 2);
assertEquals(messageCount / 2, listenerT.count);
assertEquals(messageCount / 2, listenerF.count);
consumerTrue.close();
session.unsubscribe("true");
consumerFalse.close();
session.unsubscribe("false");
session.close();
con.close();
PersistenceAdapter persistenceAdapter = broker.getPersistenceAdapter();
if (persistenceAdapter instanceof KahaDBPersistenceAdapter) {
final KahaDBStore store = ((KahaDBPersistenceAdapter) persistenceAdapter).getStore();
LOG.info("Store page count: " + store.getPageFile().getPageCount());
LOG.info("Store free page count: " + store.getPageFile().getFreePageCount());
LOG.info("Store page in-use: " + (store.getPageFile().getPageCount() - store.getPageFile().getFreePageCount()));
assertTrue("no leak of pages, always use just 10", Wait.waitFor(new Wait.Condition() {
@Override
public boolean isSatisified() throws Exception {
return 10 == store.getPageFile().getPageCount() - store.getPageFile().getFreePageCount();
}
}, TimeUnit.SECONDS.toMillis(10)));
}
}
Aggregations