use of org.hibernate.Cache in project hibernate-orm by hibernate.
the class ReadWriteTest method testEntityCacheContentsAfterEvictAll.
@Test
public void testEntityCacheContentsAfterEvictAll() throws Exception {
final List<Citizen> citizens = saveSomeCitizens();
withTxSession(s -> {
Cache cache = s.getSessionFactory().getCache();
Statistics stats = sessionFactory().getStatistics();
SecondLevelCacheStatistics slcStats = stats.getSecondLevelCacheStatistics(Citizen.class.getName());
assertTrue("2lc entity cache is expected to contain Citizen id = " + citizens.get(0).getId(), cache.containsEntity(Citizen.class, citizens.get(0).getId()));
assertTrue("2lc entity cache is expected to contain Citizen id = " + citizens.get(1).getId(), cache.containsEntity(Citizen.class, citizens.get(1).getId()));
assertEquals(2, slcStats.getPutCount());
cache.evictEntityRegions();
TIME_SERVICE.advance(1);
assertEquals(0, slcStats.getElementCountInMemory());
assertFalse("2lc entity cache is expected to not contain Citizen id = " + citizens.get(0).getId(), cache.containsEntity(Citizen.class, citizens.get(0).getId()));
assertFalse("2lc entity cache is expected to not contain Citizen id = " + citizens.get(1).getId(), cache.containsEntity(Citizen.class, citizens.get(1).getId()));
Citizen citizen = s.load(Citizen.class, citizens.get(0).getId());
assertNotNull(citizen);
assertNotNull(citizen.getFirstname());
assertEquals(1, slcStats.getMissCount());
markRollbackOnly(s);
});
}
use of org.hibernate.Cache in project hibernate-orm by hibernate.
the class ReadWriteTest method testMultipleEvictAll.
@Test
public void testMultipleEvictAll() throws Exception {
final List<Citizen> citizens = saveSomeCitizens();
withTxSession(s -> {
Cache cache = s.getSessionFactory().getCache();
cache.evictEntityRegions();
cache.evictEntityRegions();
});
withTxSession(s -> {
Cache cache = s.getSessionFactory().getCache();
cache.evictEntityRegions();
s.delete(s.load(Citizen.class, citizens.get(0).getId()));
s.delete(s.load(Citizen.class, citizens.get(1).getId()));
});
}
use of org.hibernate.Cache in project uPortal by Jasig.
the class PortalRawEventsAggregatorImpl method evictAggregates.
@AggrEventsTransactional
@Override
public void evictAggregates(Map<Class<?>, Collection<Serializable>> entitiesToEvict) {
int evictedEntities = 0;
int evictedCollections = 0;
final Session session = getEntityManager().unwrap(Session.class);
final SessionFactory sessionFactory = session.getSessionFactory();
final Cache cache = sessionFactory.getCache();
for (final Entry<Class<?>, Collection<Serializable>> evictedEntityEntry : entitiesToEvict.entrySet()) {
final Class<?> entityClass = evictedEntityEntry.getKey();
final List<String> collectionRoles = getCollectionRoles(sessionFactory, entityClass);
for (final Serializable id : evictedEntityEntry.getValue()) {
cache.evictEntity(entityClass, id);
evictedEntities++;
for (final String collectionRole : collectionRoles) {
cache.evictCollection(collectionRole, id);
evictedCollections++;
}
}
}
logger.debug("Evicted {} entities and {} collections from hibernate caches", evictedEntities, evictedCollections);
}
use of org.hibernate.Cache in project uPortal by Jasig.
the class PortalRawEventsAggregatorImpl method doAggregateRawEventsInternal.
private EventProcessingResult doAggregateRawEventsInternal() {
if (!this.clusterLockService.isLockOwner(AGGREGATION_LOCK_NAME)) {
throw new IllegalStateException("The cluster lock " + AGGREGATION_LOCK_NAME + " must be owned by the current thread and server");
}
if (!this.portalEventDimensionPopulator.isCheckedDimensions()) {
// First time aggregation has happened, run populateDimensions to ensure enough
// dimension data exists
final boolean populatedDimensions = this.portalEventAggregationManager.populateDimensions();
if (!populatedDimensions) {
this.logger.warn("Aborting raw event aggregation, populateDimensions returned false so the state of date/time dimensions is unknown");
return null;
}
}
// Flush any dimension creation before aggregation
final EntityManager entityManager = this.getEntityManager();
entityManager.flush();
entityManager.setFlushMode(FlushModeType.COMMIT);
final IEventAggregatorStatus eventAggregatorStatus = eventAggregationManagementDao.getEventAggregatorStatus(IEventAggregatorStatus.ProcessingType.AGGREGATION, true);
// Update status with current server name
final String serverName = this.portalInfoProvider.getUniqueServerName();
final String previousServerName = eventAggregatorStatus.getServerName();
if (previousServerName != null && !serverName.equals(previousServerName)) {
this.logger.debug("Last aggregation run on {} clearing all aggregation caches", previousServerName);
final Session session = getEntityManager().unwrap(Session.class);
final Cache cache = session.getSessionFactory().getCache();
cache.evictEntityRegions();
}
eventAggregatorStatus.setServerName(serverName);
// Calculate date range for aggregation
DateTime lastAggregated = eventAggregatorStatus.getLastEventDate();
if (lastAggregated == null) {
lastAggregated = portalEventDao.getOldestPortalEventTimestamp();
// No portal events to aggregate, skip aggregation
if (lastAggregated == null) {
return new EventProcessingResult(0, null, null, true);
}
// First time aggregation has run, initialize the CLEAN_UNCLOSED status to save catch-up
// time
final IEventAggregatorStatus cleanUnclosedStatus = eventAggregationManagementDao.getEventAggregatorStatus(IEventAggregatorStatus.ProcessingType.CLEAN_UNCLOSED, true);
AggregationIntervalInfo oldestMinuteInterval = this.intervalHelper.getIntervalInfo(AggregationInterval.MINUTE, lastAggregated);
cleanUnclosedStatus.setLastEventDate(oldestMinuteInterval.getStart().minusMinutes(1));
eventAggregationManagementDao.updateEventAggregatorStatus(cleanUnclosedStatus);
}
final DateTime newestEventTime = DateTime.now().minus(this.aggregationDelay).secondOfMinute().roundFloorCopy();
final Thread currentThread = Thread.currentThread();
final String currentName = currentThread.getName();
final MutableInt events = new MutableInt();
final MutableObject lastEventDate = new MutableObject(newestEventTime);
boolean complete;
try {
currentThread.setName(currentName + "-" + lastAggregated + "_" + newestEventTime);
logger.debug("Starting aggregation of events between {} (inc) and {} (exc)", lastAggregated, newestEventTime);
// Do aggregation, capturing the start and end dates
eventAggregatorStatus.setLastStart(DateTime.now());
complete = portalEventDao.aggregatePortalEvents(lastAggregated, newestEventTime, this.eventAggregationBatchSize, new AggregateEventsHandler(events, lastEventDate, eventAggregatorStatus));
eventAggregatorStatus.setLastEventDate((DateTime) lastEventDate.getValue());
eventAggregatorStatus.setLastEnd(DateTime.now());
} finally {
currentThread.setName(currentName);
}
// Store the results of the aggregation
eventAggregationManagementDao.updateEventAggregatorStatus(eventAggregatorStatus);
complete = complete && (this.eventAggregationBatchSize <= 0 || events.intValue() < this.eventAggregationBatchSize);
return new EventProcessingResult(events.intValue(), lastAggregated, eventAggregatorStatus.getLastEventDate(), complete);
}
Aggregations