use of org.apereo.portal.concurrency.CallableWithoutResult in project uPortal by Jasig.
the class JaxbPortalDataHandlerService method importDataDirectory.
@Override
public void importDataDirectory(File directory, String pattern, final BatchImportOptions options) {
if (!directory.exists()) {
throw new IllegalArgumentException("The specified directory '" + directory + "' does not exist");
}
// Create the file filter to use when searching for files to import
final FileFilter fileFilter;
if (pattern != null) {
fileFilter = new AntPatternFileFilter(true, false, pattern, this.dataFileExcludes);
} else {
fileFilter = new AntPatternFileFilter(true, false, this.dataFileIncludes, this.dataFileExcludes);
}
// Determine the parent directory to log to
final File logDirectory = determineLogDirectory(options, "import");
// Setup reporting file
final File importReport = new File(logDirectory, "data-import.txt");
final PrintWriter reportWriter;
try {
reportWriter = new PrintWriter(new PeriodicFlushingBufferedWriter(500, new FileWriter(importReport)));
} catch (IOException e) {
throw new RuntimeException("Failed to create FileWriter for: " + importReport, e);
}
// Convert directory to URI String to provide better logging output
final URI directoryUri = directory.toURI();
final String directoryUriStr = directoryUri.toString();
IMPORT_BASE_DIR.set(directoryUriStr);
try {
// Scan the specified directory for files to import
logger.info("Scanning for files to Import from: {}", directory);
final PortalDataKeyFileProcessor fileProcessor = new PortalDataKeyFileProcessor(this.dataKeyTypes, options);
this.directoryScanner.scanDirectoryNoResults(directory, fileFilter, fileProcessor);
final long resourceCount = fileProcessor.getResourceCount();
logger.info("Found {} files to Import from: {}", resourceCount, directory);
// See if the import should fail on error
final boolean failOnError = options != null ? options.isFailOnError() : true;
// Map of files to import, grouped by type
final ConcurrentMap<PortalDataKey, Queue<Resource>> dataToImport = fileProcessor.getDataToImport();
// Import the data files
for (final PortalDataKey portalDataKey : this.dataKeyImportOrder) {
final Queue<Resource> files = dataToImport.remove(portalDataKey);
if (files == null) {
continue;
}
final Queue<ImportFuture<?>> importFutures = new LinkedList<>();
final List<FutureHolder<?>> failedFutures = new LinkedList<>();
final int fileCount = files.size();
logger.info("Importing {} files of type {}", fileCount, portalDataKey);
reportWriter.println(portalDataKey + "," + fileCount);
while (!files.isEmpty()) {
final Resource file = files.poll();
// Check for completed futures on every iteration, needed to fail as fast as
// possible on an import exception
final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory, false);
failedFutures.addAll(newFailed);
final AtomicLong importTime = new AtomicLong(-1);
// Create import task
final Callable<Object> task = new CallableWithoutResult() {
@Override
protected void callWithoutResult() {
IMPORT_BASE_DIR.set(directoryUriStr);
importTime.set(System.nanoTime());
try {
importData(file, portalDataKey);
} finally {
importTime.set(System.nanoTime() - importTime.get());
IMPORT_BASE_DIR.remove();
}
}
};
// Submit the import task
final Future<?> importFuture = this.importExportThreadPool.submit(task);
// Add the future for tracking
importFutures.offer(new ImportFuture(importFuture, file, portalDataKey, importTime));
}
// Wait for all of the imports on of this type to complete
final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory, true);
failedFutures.addAll(newFailed);
if (failOnError && !failedFutures.isEmpty()) {
throw new RuntimeException(failedFutures.size() + " " + portalDataKey + " entities failed to import.\n\n" + "\tPer-entity exception logs and a full report can be found in " + // location clickable in some shells
logDirectory.toURI() + "\n");
}
reportWriter.flush();
}
if (!dataToImport.isEmpty()) {
throw new IllegalStateException("The following PortalDataKeys are not listed in the dataTypeImportOrder List: " + dataToImport.keySet());
}
logger.info("For a detailed report on the data import see " + importReport);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while waiting for entities to import", e);
} finally {
IOUtils.closeQuietly(reportWriter);
IMPORT_BASE_DIR.remove();
}
}
use of org.apereo.portal.concurrency.CallableWithoutResult in project uPortal by Jasig.
the class JpaPortletDaoTest method testAllDefinitionDaoMethods.
@Test
public void testAllDefinitionDaoMethods() throws Exception {
final IPortletDefinitionId portletDefinitionId = execute(new Callable<IPortletDefinitionId>() {
@Override
public IPortletDefinitionId call() {
final IPortletType channelType = jpaChannelTypeDao.createPortletType("BaseType", "foobar");
// Create a definition
final IPortletDefinition chanDef1 = new PortletDefinitionImpl(channelType, "fname1", "Test Portlet 1", "Test Portlet 1 Title", "/context1", "portletName1", false);
jpaPortletDefinitionDao.savePortletDefinition(chanDef1);
// Try all of the retrieval options
final IPortletDefinition portDef1a = jpaPortletDefinitionDao.getPortletDefinition(chanDef1.getPortletDefinitionId());
jpaPortletDefinitionDao.savePortletDefinition(chanDef1);
assertEquals(chanDef1, portDef1a);
// Create a second definition with the same app/portlet
final IPortletDefinition chanDef2 = new PortletDefinitionImpl(channelType, "fname2", "Test Portlet 2", "Test Portlet 2 Title", "/uPortal", "portletName2", true);
jpaPortletDefinitionDao.savePortletDefinition(chanDef2);
return chanDef2.getPortletDefinitionId();
}
});
execute(new CallableWithoutResult() {
@Override
protected void callWithoutResult() {
final IPortletDefinition chanDef2 = jpaPortletDefinitionDao.getPortletDefinitionByFname("fname2");
// Add some preferences
final List<IPortletPreference> prefsList2 = chanDef2.getPortletPreferences();
prefsList2.add(new PortletPreferenceImpl("prefName1", false, "val1", "val2"));
prefsList2.add(new PortletPreferenceImpl("prefName2", true, "val3", "val4"));
jpaPortletDefinitionDao.savePortletDefinition(chanDef2);
}
});
execute(new CallableWithoutResult() {
@Override
protected void callWithoutResult() {
final IPortletDefinition chanDef2 = jpaPortletDefinitionDao.getPortletDefinitionByFname("fname2");
// verify preferences
final List<IPortletPreference> prefsList2 = chanDef2.getPortletPreferences();
assertEquals(2, prefsList2.size());
}
});
execute(new CallableWithoutResult() {
@Override
protected void callWithoutResult() {
// Check prefs, remove one and another
final IPortletDefinition portDef3 = jpaPortletDefinitionDao.getPortletDefinitionByName("Test Portlet 2");
final List<IPortletPreference> prefsList3 = portDef3.getPortletPreferences();
final List<IPortletPreference> expectedPrefsList3 = new ArrayList<IPortletPreference>();
expectedPrefsList3.add(new PortletPreferenceImpl("prefName1", false, "val1", "val2"));
expectedPrefsList3.add(new PortletPreferenceImpl("prefName2", true, "val3", "val4"));
assertEquals(expectedPrefsList3, prefsList3);
prefsList3.remove(1);
prefsList3.add(new PortletPreferenceImpl("prefName3", false, "val5", "val6"));
jpaPortletDefinitionDao.savePortletDefinition(portDef3);
}
});
execute(new CallableWithoutResult() {
@Override
protected void callWithoutResult() {
// Check prefs
final IPortletDefinition portDef4 = jpaPortletDefinitionDao.getPortletDefinition(portletDefinitionId);
final List<IPortletPreference> prefsList4 = portDef4.getPortletPreferences();
final List<IPortletPreference> expectedPrefsList4 = new ArrayList<IPortletPreference>();
expectedPrefsList4.add(new PortletPreferenceImpl("prefName1", false, "val1", "val2"));
expectedPrefsList4.add(new PortletPreferenceImpl("prefName3", false, "val5", "val6"));
assertEquals(expectedPrefsList4, prefsList4);
}
});
}
use of org.apereo.portal.concurrency.CallableWithoutResult in project uPortal by Jasig.
the class JpaVersionDaoTest method testVersionBadSql.
@Test
public void testVersionBadSql() {
final String productName = "TEST_VERSION";
// Create
this.execute(new CallableWithoutResult() {
@Override
protected void callWithoutResult() {
Version version = versionDao.getVersion(productName);
assertNull(version);
version = versionDao.setVersion(productName, 1, 2, 3, null);
assertNotNull(version);
assertEquals(1, version.getMajor());
assertEquals(2, version.getMinor());
assertEquals(3, version.getPatch());
assertNull(version.getLocal());
}
});
jdbcOperations.execute("ALTER TABLE UP_VERSION DROP LOCAL_VER");
try {
// Doesn't exist
this.execute(new CallableWithoutResult() {
@Override
protected void callWithoutResult() {
final Version version = versionDao.getVersion(productName);
assertNotNull(version);
assertEquals(1, version.getMajor());
assertEquals(2, version.getMinor());
assertEquals(3, version.getPatch());
assertNull(version.getLocal());
}
});
} finally {
jdbcOperations.execute("ALTER TABLE UP_VERSION ADD COLUMN LOCAL_VER INTEGER");
}
}
use of org.apereo.portal.concurrency.CallableWithoutResult in project uPortal by Jasig.
the class JpaBaseAggregationDaoTest method testUnclosedBaseAggregationRangeQuery.
@Test
public final void testUnclosedBaseAggregationRangeQuery() throws Exception {
final IEntityGroup entityGroupA = mock(IEntityGroup.class);
when(entityGroupA.getServiceName()).thenReturn(new CompositeName("local"));
when(entityGroupA.getName()).thenReturn("Group A");
when(compositeGroupService.findGroup("local.0")).thenReturn(entityGroupA);
final IEntityGroup entityGroupB = mock(IEntityGroup.class);
when(entityGroupB.getServiceName()).thenReturn(new CompositeName("local"));
when(entityGroupB.getName()).thenReturn("Group B");
when(compositeGroupService.findGroup("local.1")).thenReturn(entityGroupB);
final MutableInt aggrs = new MutableInt();
// Create 10 minutes of aggregations
final DateTime start = new DateTime(1326734644000l, DateTimeZone.UTC).minuteOfDay().roundFloorCopy();
final DateTime end = start.plusMinutes(10);
final AggregationInterval interval = AggregationInterval.FIVE_MINUTE;
final MutableObject startObj = new MutableObject();
final MutableObject endObj = new MutableObject();
this.executeInTransaction(new CallableWithoutResult() {
@Override
protected void callWithoutResult() {
final Random r = new Random(0);
final AggregatedGroupMapping groupA = aggregatedGroupLookupDao.getGroupMapping("local.0");
final AggregatedGroupMapping groupB = aggregatedGroupLookupDao.getGroupMapping("local.1");
populateDateTimeDimensions(start, end, new FunctionWithoutResult<Tuple<DateDimension, TimeDimension>>() {
@Override
protected void applyWithoutResult(Tuple<DateDimension, TimeDimension> input) {
final TimeDimension td = input.second;
final DateDimension dd = input.first;
final DateTime instant = td.getTime().toDateTime(dd.getDate());
if (startObj.getValue() == null) {
startObj.setValue(instant);
}
endObj.setValue(instant);
if (instant.equals(interval.determineStart(instant))) {
final AggregationIntervalInfo intervalInfo = aggregationIntervalHelper.getIntervalInfo(interval, instant);
final T baseAggregationA = getAggregationDao().createAggregation(createAggregationKey(intervalInfo, groupA));
final T baseAggregationB = getAggregationDao().createAggregation(createAggregationKey(intervalInfo, groupB));
for (int u = 0; u < r.nextInt(50); u++) {
updateAggregation(intervalInfo, baseAggregationA, r);
updateAggregation(intervalInfo, baseAggregationB, r);
}
if (aggrs.intValue() % 4 == 0) {
baseAggregationA.intervalComplete(5);
}
baseAggregationB.intervalComplete(5);
getAggregationDao().updateAggregation(baseAggregationA);
getAggregationDao().updateAggregation(baseAggregationB);
aggrs.add(2);
}
}
});
}
});
// Verify all aggrs created
assertEquals(4, aggrs.intValue());
// Find unclosed 1 aggr
this.execute(new CallableWithoutResult() {
@Override
protected void callWithoutResult() {
final Collection<T> baseAggregations = getAggregationDao().getUnclosedAggregations(start.minusDays(1), end.plusDays(1), interval);
assertEquals(1, baseAggregations.size());
for (final T baseAggregationImpl : baseAggregations) {
baseAggregationImpl.intervalComplete(5);
getAggregationDao().updateAggregation(baseAggregationImpl);
}
}
});
// Find unclosed 0 aggr
this.execute(new CallableWithoutResult() {
@Override
protected void callWithoutResult() {
final Collection<T> baseAggregations = getAggregationDao().getUnclosedAggregations(start.minusDays(1), end.plusDays(1), interval);
assertEquals(0, baseAggregations.size());
}
});
}
use of org.apereo.portal.concurrency.CallableWithoutResult in project uPortal by Jasig.
the class JpaBaseAggregationDaoTest method testBaseAggregationLifecycle.
@Test
public final void testBaseAggregationLifecycle() throws Exception {
final IEntityGroup entityGroupA = mock(IEntityGroup.class);
when(entityGroupA.getServiceName()).thenReturn(new CompositeName("local"));
when(entityGroupA.getName()).thenReturn("Group A");
when(compositeGroupService.findGroup("local.0")).thenReturn(entityGroupA);
final IEntityGroup entityGroupB = mock(IEntityGroup.class);
when(entityGroupB.getServiceName()).thenReturn(new CompositeName("local"));
when(entityGroupB.getName()).thenReturn("Group B");
when(compositeGroupService.findGroup("local.1")).thenReturn(entityGroupB);
final DateTime instant = // just a random time
new DateTime(1326734644000l, DateTimeZone.UTC);
// Create required date and time dimensions
populateDateTimeDimensions(instant.minusHours(2), instant.plusHours(2), null);
// Create aggregations
final Map<K, T> createdAggrs = this.executeInTransaction(new Callable<Map<K, T>>() {
@Override
public Map<K, T> call() throws Exception {
final AggregatedGroupMapping groupA = aggregatedGroupLookupDao.getGroupMapping("local.0");
final AggregatedGroupMapping groupB = aggregatedGroupLookupDao.getGroupMapping("local.1");
final AggregationIntervalInfo fiveMinuteInfo = aggregationIntervalHelper.getIntervalInfo(AggregationInterval.FIVE_MINUTE, instant);
final AggregationIntervalInfo hourInfo = aggregationIntervalHelper.getIntervalInfo(AggregationInterval.HOUR, instant);
final Map<K, T> fiveMinGroupA = createAggregations(fiveMinuteInfo, groupA);
final Map<K, T> fiveMinGroupB = createAggregations(fiveMinuteInfo, groupB);
final Map<K, T> hourGroupA = createAggregations(hourInfo, groupA);
final Map<K, T> aggrs = new HashMap<K, T>(fiveMinGroupA);
aggrs.putAll(fiveMinGroupB);
aggrs.putAll(hourGroupA);
return aggrs;
}
});
// Verify aggregations were created
this.execute(new CallableWithoutResult() {
@Override
protected void callWithoutResult() {
final AggregationIntervalInfo fiveMinuteInfo = aggregationIntervalHelper.getIntervalInfo(AggregationInterval.FIVE_MINUTE, instant);
final AggregationIntervalInfo hourInfo = aggregationIntervalHelper.getIntervalInfo(AggregationInterval.HOUR, instant);
final Map<K, T> fiveMinGroup = getAggregationDao().getAggregationsForInterval(fiveMinuteInfo.getDateDimension(), fiveMinuteInfo.getTimeDimension(), fiveMinuteInfo.getAggregationInterval());
final Map<K, T> hourGroup = getAggregationDao().getAggregationsForInterval(hourInfo.getDateDimension(), hourInfo.getTimeDimension(), hourInfo.getAggregationInterval());
final Map<K, T> foundAggrs = new HashMap<K, T>(fiveMinGroup);
foundAggrs.putAll(hourGroup);
assertEquals("Aggregations not created as expected", createdAggrs, foundAggrs);
}
});
// Update Aggregations
final Map<K, T> updatedAggrs = this.executeInTransaction(new Callable<Map<K, T>>() {
@Override
public Map<K, T> call() throws Exception {
final Random r = new Random(0);
final AggregationIntervalInfo fiveMinuteInfo = aggregationIntervalHelper.getIntervalInfo(AggregationInterval.FIVE_MINUTE, instant);
final AggregationIntervalInfo hourInfo = aggregationIntervalHelper.getIntervalInfo(AggregationInterval.HOUR, instant);
final Map<K, T> fiveMinGroup = getAggregationDao().getAggregationsForInterval(fiveMinuteInfo.getDateDimension(), fiveMinuteInfo.getTimeDimension(), fiveMinuteInfo.getAggregationInterval());
final Map<K, T> hourGroup = getAggregationDao().getAggregationsForInterval(hourInfo.getDateDimension(), hourInfo.getTimeDimension(), hourInfo.getAggregationInterval());
final Map<K, T> updatedAggrs = new HashMap<K, T>();
for (final Entry<K, T> aggrEntry : fiveMinGroup.entrySet()) {
final T aggr = aggrEntry.getValue();
updateAggregation(fiveMinuteInfo, aggr, r);
getAggregationDao().updateAggregation(aggr);
updatedAggrs.put(aggrEntry.getKey(), aggr);
}
for (final Entry<K, T> aggrEntry : hourGroup.entrySet()) {
final T aggr = aggrEntry.getValue();
updateAggregation(hourInfo, aggr, r);
getAggregationDao().updateAggregation(aggr);
updatedAggrs.put(aggrEntry.getKey(), aggr);
}
return updatedAggrs;
}
});
// Verify aggregations were updated
this.execute(new CallableWithoutResult() {
@Override
protected void callWithoutResult() {
final AggregationIntervalInfo fiveMinuteInfo = aggregationIntervalHelper.getIntervalInfo(AggregationInterval.FIVE_MINUTE, instant);
final AggregationIntervalInfo hourInfo = aggregationIntervalHelper.getIntervalInfo(AggregationInterval.HOUR, instant);
final Map<K, T> fiveMinGroup = getAggregationDao().getAggregationsForInterval(fiveMinuteInfo.getDateDimension(), fiveMinuteInfo.getTimeDimension(), fiveMinuteInfo.getAggregationInterval());
final Map<K, T> hourGroup = getAggregationDao().getAggregationsForInterval(hourInfo.getDateDimension(), hourInfo.getTimeDimension(), hourInfo.getAggregationInterval());
final Map<K, T> foundAggrs = new HashMap<K, T>(fiveMinGroup);
foundAggrs.putAll(hourGroup);
assertEquals("Aggregations not updated as expected", updatedAggrs, foundAggrs);
}
});
// Complete intervals
final Map<K, T> completeAggrs = this.executeInTransaction(new Callable<Map<K, T>>() {
@Override
public Map<K, T> call() throws Exception {
final AggregationIntervalInfo fiveMinuteInfo = aggregationIntervalHelper.getIntervalInfo(AggregationInterval.FIVE_MINUTE, instant);
final AggregationIntervalInfo hourInfo = aggregationIntervalHelper.getIntervalInfo(AggregationInterval.HOUR, instant);
final Map<K, T> fiveMinGroup = getAggregationDao().getAggregationsForInterval(fiveMinuteInfo.getDateDimension(), fiveMinuteInfo.getTimeDimension(), fiveMinuteInfo.getAggregationInterval());
final Map<K, T> hourGroup = getAggregationDao().getAggregationsForInterval(hourInfo.getDateDimension(), hourInfo.getTimeDimension(), hourInfo.getAggregationInterval());
final Map<K, T> completeAggrs = new HashMap<K, T>();
for (final Entry<K, T> aggrEntry : fiveMinGroup.entrySet()) {
final T aggr = aggrEntry.getValue();
aggr.intervalComplete(5);
getAggregationDao().updateAggregation(aggr);
completeAggrs.put(aggrEntry.getKey(), aggr);
}
for (final Entry<K, T> aggrEntry : hourGroup.entrySet()) {
final T aggr = aggrEntry.getValue();
aggr.intervalComplete(60);
getAggregationDao().updateAggregation(aggr);
completeAggrs.put(aggrEntry.getKey(), aggr);
}
return completeAggrs;
}
});
// Verify aggregations were completed
this.execute(new CallableWithoutResult() {
@Override
protected void callWithoutResult() {
final AggregationIntervalInfo fiveMinuteInfo = aggregationIntervalHelper.getIntervalInfo(AggregationInterval.FIVE_MINUTE, instant);
final AggregationIntervalInfo hourInfo = aggregationIntervalHelper.getIntervalInfo(AggregationInterval.HOUR, instant);
final Map<K, T> fiveMinGroup = getAggregationDao().getAggregationsForInterval(fiveMinuteInfo.getDateDimension(), fiveMinuteInfo.getTimeDimension(), fiveMinuteInfo.getAggregationInterval());
final Map<K, T> hourGroup = getAggregationDao().getAggregationsForInterval(hourInfo.getDateDimension(), hourInfo.getTimeDimension(), hourInfo.getAggregationInterval());
final Map<K, T> foundAggrs = new HashMap<K, T>(fiveMinGroup);
foundAggrs.putAll(hourGroup);
assertEquals("Aggregations not completed as expected", completeAggrs, foundAggrs);
}
});
}
Aggregations