use of org.apache.commons.compress.archivers.ArchiveInputStream in project haikudepotserver by haiku.
the class PkgScreenshotImportArchiveJobRunner method consumeScreenshotArchiveEntries.
private int consumeScreenshotArchiveEntries(ByteSource byteSource, Consumer<ArchiveEntryWithPkgNameAndOrdering> archiveEntryConsumer) throws IOException {
int counter = 0;
try (InputStream inputStream = byteSource.openStream();
GZIPInputStream gzipInputStream = new GZIPInputStream(inputStream);
ArchiveInputStream tarArchiveInputStream = new TarArchiveInputStream(gzipInputStream)) {
ArchiveEntry archiveEntry;
while (null != (archiveEntry = tarArchiveInputStream.getNextEntry())) {
Matcher matcher = PATTERN_PATH.matcher(archiveEntry.getName());
if (matcher.matches()) {
archiveEntryConsumer.accept(new ArchiveEntryWithPkgNameAndOrdering(tarArchiveInputStream, archiveEntry, matcher.group(GROUP_PKGNAME), Integer.parseInt(matcher.group(GROUP_LEAFNAME))));
counter++;
}
}
}
return counter;
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project invesdwin-context by invesdwin.
the class ADecompressingInputStream method read.
@Override
public int read() throws IOException {
int b = super.read();
final InputStream delegate = getDelegate();
if (b == -1 && delegate instanceof ArchiveInputStream) {
final ArchiveInputStream archiveIn = (ArchiveInputStream) delegate;
if (!oneArchiveEntryAlreadyExtraced) {
ArchiveEntry entry = archiveIn.getNextEntry();
while (entry != null && (entry.isDirectory() || Strings.isBlank(entry.getName()))) {
entry = archiveIn.getNextEntry();
}
if (entry != null) {
oneArchiveEntryAlreadyExtraced = true;
b = super.read();
}
}
}
return b;
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project structr by structr.
the class FileHelper method unarchive.
/**
* Unarchives a file in the (optional) folder with the given folder id.
*
* @param securityContext
* @param file
* @param parentFolderId
* @throws ArchiveException
* @throws IOException
* @throws FrameworkException
*/
public static void unarchive(final SecurityContext securityContext, final File file, final String parentFolderId) throws ArchiveException, IOException, FrameworkException {
if (file == null) {
logger.error("Unable to unarchive file (file parameter was null).");
return;
}
Folder existingParentFolder = null;
final App app = StructrApp.getInstance(securityContext);
final String fileName = file.getName();
if (parentFolderId != null) {
try (final Tx tx = app.tx(true, true, true)) {
// search for existing parent folder
existingParentFolder = app.get(Folder.class, parentFolderId);
String parentFolderName = null;
String msgString = "Unarchiving file {}";
if (existingParentFolder != null) {
parentFolderName = existingParentFolder.getName();
msgString += " into existing folder {}.";
}
logger.info(msgString, new Object[] { fileName, parentFolderName });
tx.success();
}
} else {
existingParentFolder = file.getParent();
}
final BufferedInputStream bufferedIs = new BufferedInputStream(file.getInputStream());
switch(ArchiveStreamFactory.detect(bufferedIs)) {
// 7z doesn't support streaming
case ArchiveStreamFactory.SEVEN_Z:
{
int overallCount = 0;
logger.info("7-Zip archive format detected");
try (final Tx outertx = app.tx()) {
SevenZFile sevenZFile = new SevenZFile(file.getFileOnDisk());
SevenZArchiveEntry sevenZEntry = sevenZFile.getNextEntry();
while (sevenZEntry != null) {
try (final Tx tx = app.tx(true, true, false)) {
int count = 0;
while (sevenZEntry != null && count++ < 50) {
final String entryPath = "/" + PathHelper.clean(sevenZEntry.getName());
logger.info("Entry path: {}", entryPath);
if (sevenZEntry.isDirectory()) {
handleDirectory(securityContext, existingParentFolder, entryPath);
} else {
byte[] buf = new byte[(int) sevenZEntry.getSize()];
sevenZFile.read(buf, 0, buf.length);
try (final ByteArrayInputStream in = new ByteArrayInputStream(buf)) {
handleFile(securityContext, in, existingParentFolder, entryPath);
}
}
sevenZEntry = sevenZFile.getNextEntry();
overallCount++;
}
logger.info("Committing transaction after {} entries.", overallCount);
tx.success();
}
}
logger.info("Unarchived {} files.", overallCount);
outertx.success();
}
break;
}
// ZIP needs special treatment to support "unsupported feature data descriptor"
case ArchiveStreamFactory.ZIP:
{
logger.info("Zip archive format detected");
try (final ZipArchiveInputStream in = new ZipArchiveInputStream(bufferedIs, null, false, true)) {
handleArchiveInputStream(in, app, securityContext, existingParentFolder);
}
break;
}
default:
{
logger.info("Default archive format detected");
try (final ArchiveInputStream in = new ArchiveStreamFactory().createArchiveInputStream(bufferedIs)) {
handleArchiveInputStream(in, app, securityContext, existingParentFolder);
}
}
}
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project jbehave-core by jbehave.
the class ZipFileArchiver method unarchive.
public void unarchive(File archive, File directory) {
InputStream is = null;
ArchiveInputStream in = null;
try {
is = new FileInputStream(archive);
in = factory.createArchiveInputStream(ARCHIVER_NAME, is);
ZipArchiveEntry entry = null;
while ((entry = (ZipArchiveEntry) in.getNextEntry()) != null) {
unzipEntry(entry, in, directory);
}
} catch (Exception e) {
throw new FileUnarchiveFailedException(archive, directory, e);
} finally {
close(is);
close(in);
}
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project phoenicis by PhoenicisOrg.
the class Tar method uncompress.
/**
* Uncompress a tar
*
* @param countingInputStream
* to count the number of byte extracted
* @param outputDir
* The directory where files should be extracted
* @return A list of extracted files
* @throws ArchiveException
* if the process fails
*/
private List<File> uncompress(final InputStream inputStream, CountingInputStream countingInputStream, final File outputDir, long finalSize, Consumer<ProgressEntity> stateCallback) {
final List<File> uncompressedFiles = new LinkedList<>();
try (ArchiveInputStream debInputStream = new ArchiveStreamFactory().createArchiveInputStream("tar", inputStream)) {
TarArchiveEntry entry;
while ((entry = (TarArchiveEntry) debInputStream.getNextEntry()) != null) {
final File outputFile = new File(outputDir, entry.getName());
if (entry.isDirectory()) {
LOGGER.info(String.format("Attempting to write output directory %s.", outputFile.getAbsolutePath()));
if (!outputFile.exists()) {
LOGGER.info(String.format("Attempting to createPrefix output directory %s.", outputFile.getAbsolutePath()));
Files.createDirectories(outputFile.toPath());
}
} else {
LOGGER.info(String.format("Creating output file %s (%s).", outputFile.getAbsolutePath(), entry.getMode()));
if (entry.isSymbolicLink()) {
Files.createSymbolicLink(Paths.get(outputFile.getAbsolutePath()), Paths.get(entry.getLinkName()));
} else {
try (final OutputStream outputFileStream = new FileOutputStream(outputFile)) {
IOUtils.copy(debInputStream, outputFileStream);
Files.setPosixFilePermissions(Paths.get(outputFile.getPath()), fileUtilities.octToPosixFilePermission(entry.getMode()));
}
}
}
uncompressedFiles.add(outputFile);
stateCallback.accept(new ProgressEntity.Builder().withPercent((double) countingInputStream.getCount() / (double) finalSize * (double) 100).withProgressText("Extracting " + outputFile.getName()).build());
}
return uncompressedFiles;
} catch (IOException | org.apache.commons.compress.archivers.ArchiveException e) {
throw new ArchiveException("Unable to extract the file", e);
}
}
Aggregations