use of org.apereo.portal.utils.PeriodicFlushingBufferedWriter in project uPortal by Jasig.
the class JaxbPortalDataHandlerService method importDataDirectory.
@Override
public void importDataDirectory(File directory, String pattern, final BatchImportOptions options) {
if (!directory.exists()) {
throw new IllegalArgumentException("The specified directory '" + directory + "' does not exist");
}
//Create the file filter to use when searching for files to import
final FileFilter fileFilter;
if (pattern != null) {
fileFilter = new AntPatternFileFilter(true, false, pattern, this.dataFileExcludes);
} else {
fileFilter = new AntPatternFileFilter(true, false, this.dataFileIncludes, this.dataFileExcludes);
}
//Determine the parent directory to log to
final File logDirectory = determineLogDirectory(options, "import");
//Setup reporting file
final File importReport = new File(logDirectory, "data-import.txt");
final PrintWriter reportWriter;
try {
reportWriter = new PrintWriter(new PeriodicFlushingBufferedWriter(500, new FileWriter(importReport)));
} catch (IOException e) {
throw new RuntimeException("Failed to create FileWriter for: " + importReport, e);
}
//Convert directory to URI String to provide better logging output
final URI directoryUri = directory.toURI();
final String directoryUriStr = directoryUri.toString();
IMPORT_BASE_DIR.set(directoryUriStr);
try {
//Scan the specified directory for files to import
logger.info("Scanning for files to Import from: {}", directory);
final PortalDataKeyFileProcessor fileProcessor = new PortalDataKeyFileProcessor(this.dataKeyTypes, options);
this.directoryScanner.scanDirectoryNoResults(directory, fileFilter, fileProcessor);
final long resourceCount = fileProcessor.getResourceCount();
logger.info("Found {} files to Import from: {}", resourceCount, directory);
//See if the import should fail on error
final boolean failOnError = options != null ? options.isFailOnError() : true;
//Map of files to import, grouped by type
final ConcurrentMap<PortalDataKey, Queue<Resource>> dataToImport = fileProcessor.getDataToImport();
//Import the data files
for (final PortalDataKey portalDataKey : this.dataKeyImportOrder) {
final Queue<Resource> files = dataToImport.remove(portalDataKey);
if (files == null) {
continue;
}
final Queue<ImportFuture<?>> importFutures = new LinkedList<ImportFuture<?>>();
final List<FutureHolder<?>> failedFutures = new LinkedList<FutureHolder<?>>();
final int fileCount = files.size();
logger.info("Importing {} files of type {}", fileCount, portalDataKey);
reportWriter.println(portalDataKey + "," + fileCount);
while (!files.isEmpty()) {
final Resource file = files.poll();
//Check for completed futures on every iteration, needed to fail as fast as possible on an import exception
final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory, false);
failedFutures.addAll(newFailed);
final AtomicLong importTime = new AtomicLong(-1);
//Create import task
final Callable<Object> task = new CallableWithoutResult() {
@Override
protected void callWithoutResult() {
IMPORT_BASE_DIR.set(directoryUriStr);
importTime.set(System.nanoTime());
try {
importData(file, portalDataKey);
} finally {
importTime.set(System.nanoTime() - importTime.get());
IMPORT_BASE_DIR.remove();
}
}
};
//Submit the import task
final Future<?> importFuture = this.importExportThreadPool.submit(task);
//Add the future for tracking
importFutures.offer(new ImportFuture(importFuture, file, portalDataKey, importTime));
}
//Wait for all of the imports on of this type to complete
final List<FutureHolder<?>> newFailed = waitForFutures(importFutures, reportWriter, logDirectory, true);
failedFutures.addAll(newFailed);
if (failOnError && !failedFutures.isEmpty()) {
throw new RuntimeException(failedFutures.size() + " " + portalDataKey + " entities failed to import.\n\n" + "\tPer entity exception logs and a full report can be found in " + logDirectory + "\n");
}
reportWriter.flush();
}
if (!dataToImport.isEmpty()) {
throw new IllegalStateException("The following PortalDataKeys are not listed in the dataTypeImportOrder List: " + dataToImport.keySet());
}
logger.info("For a detailed report on the data import see " + importReport);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while waiting for entities to import", e);
} finally {
IOUtils.closeQuietly(reportWriter);
IMPORT_BASE_DIR.remove();
}
}
Aggregations