use of org.apache.commons.compress.archivers.ArchiveInputStream in project stanbol by apache.
the class ConfigUtils method getArchiveInputStream.
public static ArchiveInputStream getArchiveInputStream(String solrArchiveName, InputStream is) throws IOException {
String archiveFormat;
String solrArchiveExtension = FilenameUtils.getExtension(solrArchiveName);
if (solrArchiveExtension == null || solrArchiveExtension.isEmpty()) {
// assume that the archiveExtension was parsed
archiveFormat = solrArchiveName;
} else {
archiveFormat = SUPPORTED_SOLR_ARCHIVE_FORMAT.get(solrArchiveExtension);
}
ArchiveInputStream ais;
if ("zip".equals(archiveFormat)) {
ais = new ZipArchiveInputStream(is);
} else {
if ("gz".equals(archiveFormat)) {
is = new GZIPInputStream(is);
} else if ("bz2".equals(archiveFormat)) {
is = new BZip2CompressorInputStream(is);
} else {
throw new IllegalStateException("Unsupported compression format " + archiveFormat + "!. " + "Please report this to stanbol-dev mailing list!");
}
ais = new TarArchiveInputStream(is);
}
return ais;
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project android by cSploit.
the class UpdateService method verifyArchiveIntegrity.
/**
* check if an archive is valid by reading it.
* @throws RuntimeException if trying to run this with no archive
*/
private void verifyArchiveIntegrity() throws RuntimeException, KeyException {
File f;
long total;
short old_percentage, percentage;
CountingInputStream counter;
ArchiveInputStream is;
byte[] buffer;
String rootDirectory;
Logger.info("verifying archive integrity");
if (mCurrentTask == null || mCurrentTask.path == null)
throw new RuntimeException("no archive to test");
mBuilder.setContentTitle(getString(R.string.checking)).setSmallIcon(android.R.drawable.ic_popup_sync).setContentText("").setContentInfo("").setProgress(100, 0, true);
mNotificationManager.notify(NOTIFICATION_ID, mBuilder.build());
f = new File(mCurrentTask.path);
try {
counter = new CountingInputStream(new FileInputStream(f));
} catch (FileNotFoundException e) {
throw new RuntimeException(String.format("archive '%s' does not exists", mCurrentTask.path));
}
try {
is = openArchiveStream(counter);
ArchiveEntry entry;
buffer = new byte[2048];
total = f.length();
old_percentage = -1;
rootDirectory = null;
// consume the archive
while (mRunning && (entry = is.getNextEntry()) != null) {
if (!mCurrentTask.skipRoot)
continue;
String name = entry.getName();
if (rootDirectory == null) {
if (name.contains("/")) {
rootDirectory = name.substring(0, name.indexOf('/'));
} else if (entry.isDirectory()) {
rootDirectory = name;
} else {
throw new IOException(String.format("archive '%s' contains files under it's root", mCurrentTask.path));
}
} else {
if (!name.startsWith(rootDirectory)) {
throw new IOException("multiple directories found in the archive root");
}
}
}
while (mRunning && is.read(buffer) > 0) {
percentage = (short) (((double) counter.getBytesRead() / total) * 100);
if (percentage != old_percentage) {
mBuilder.setProgress(100, percentage, false).setContentInfo(percentage + "%");
mNotificationManager.notify(NOTIFICATION_ID, mBuilder.build());
old_percentage = percentage;
}
}
} catch (IOException e) {
throw new KeyException("corrupted archive: " + e.getMessage());
} finally {
try {
counter.close();
} catch (IOException ignore) {
}
}
if (!mRunning)
throw new CancellationException("archive integrity check cancelled");
if (mCurrentTask.skipRoot && rootDirectory == null)
throw new KeyException(String.format("archive '%s' is empty", mCurrentTask.path));
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project neo4j by neo4j.
the class Loader method openArchiveIn.
private static ArchiveInputStream openArchiveIn(Path archive) throws IOException, IncorrectFormat {
InputStream input = Files.newInputStream(archive);
GzipCompressorInputStream compressor;
try {
compressor = new GzipCompressorInputStream(input);
} catch (IOException e) {
input.close();
throw new IncorrectFormat(archive, e);
}
return new TarArchiveInputStream(compressor);
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project uPortal by Jasig.
the class JaxbPortalDataHandlerService method importDataArchive.
/** Extracts the archive resource and then runs the batch-import process on it. */
protected void importDataArchive(final Resource resource, final ArchiveInputStream resourceStream, BatchImportOptions options) {
final File tempDir = Files.createTempDir();
try {
ArchiveEntry archiveEntry;
while ((archiveEntry = resourceStream.getNextEntry()) != null) {
final File entryFile = new File(tempDir, archiveEntry.getName());
if (archiveEntry.isDirectory()) {
entryFile.mkdirs();
} else {
entryFile.getParentFile().mkdirs();
Files.copy(new InputSupplier<InputStream>() {
@Override
public InputStream getInput() throws IOException {
return new CloseShieldInputStream(resourceStream);
}
}, entryFile);
}
}
importDataDirectory(tempDir, null, options);
} catch (IOException e) {
throw new RuntimeException("Failed to extract data from '" + resource + "' to '" + tempDir + "' for batch import.", e);
} finally {
FileUtils.deleteQuietly(tempDir);
}
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project POL-POM-5 by PlayOnLinux.
the class Tar method uncompress.
/**
* Uncompress a tar
*
* @param countingInputStream
* to count the number of byte extracted
* @param outputDir
* The directory where files should be extracted
* @return A list of extracted files
* @throws ArchiveException
* if the process fails
*/
private List<File> uncompress(final InputStream inputStream, CountingInputStream countingInputStream, final File outputDir, long finalSize, Consumer<ProgressEntity> stateCallback) {
final List<File> uncompressedFiles = new LinkedList<>();
try (ArchiveInputStream debInputStream = new ArchiveStreamFactory().createArchiveInputStream("tar", inputStream)) {
TarArchiveEntry entry;
while ((entry = (TarArchiveEntry) debInputStream.getNextEntry()) != null) {
final File outputFile = new File(outputDir, entry.getName());
if (entry.isDirectory()) {
LOGGER.info(String.format("Attempting to write output directory %s.", outputFile.getAbsolutePath()));
if (!outputFile.exists()) {
LOGGER.info(String.format("Attempting to createPrefix output directory %s.", outputFile.getAbsolutePath()));
Files.createDirectories(outputFile.toPath());
}
} else {
LOGGER.info(String.format("Creating output file %s (%s).", outputFile.getAbsolutePath(), entry.getMode()));
if (entry.isSymbolicLink()) {
Files.createSymbolicLink(Paths.get(outputFile.getAbsolutePath()), Paths.get(entry.getLinkName()));
} else {
try (final OutputStream outputFileStream = new FileOutputStream(outputFile)) {
IOUtils.copy(debInputStream, outputFileStream);
Files.setPosixFilePermissions(Paths.get(outputFile.getPath()), fileUtilities.octToPosixFilePermission(entry.getMode()));
}
}
}
uncompressedFiles.add(outputFile);
stateCallback.accept(new ProgressEntity.Builder().withPercent((double) countingInputStream.getCount() / (double) finalSize * (double) 100).withProgressText("Extracting " + outputFile.getName()).build());
}
return uncompressedFiles;
} catch (IOException | org.apache.commons.compress.archivers.ArchiveException e) {
throw new ArchiveException("Unable to extract the file", e);
}
}
Aggregations