use of org.apache.commons.compress.archivers.ArchiveInputStream in project phoenicis by PhoenicisOrg.
the class Tar method uncompress.
/**
* Uncompress a tar
*
* @param countingInputStream
* to count the number of byte extracted
* @param outputDir
* The directory where files should be extracted
* @return A list of extracted files
* @throws ArchiveException
* if the process fails
*/
private List<File> uncompress(final InputStream inputStream, CountingInputStream countingInputStream, final File outputDir, long finalSize, Consumer<ProgressEntity> stateCallback) {
final List<File> uncompressedFiles = new LinkedList<>();
try (ArchiveInputStream debInputStream = new ArchiveStreamFactory().createArchiveInputStream("tar", inputStream)) {
TarArchiveEntry entry;
while ((entry = (TarArchiveEntry) debInputStream.getNextEntry()) != null) {
final File outputFile = new File(outputDir, entry.getName());
if (entry.isDirectory()) {
LOGGER.info(String.format("Attempting to write output directory %s.", outputFile.getAbsolutePath()));
if (!outputFile.exists()) {
LOGGER.info(String.format("Attempting to createPrefix output directory %s.", outputFile.getAbsolutePath()));
Files.createDirectories(outputFile.toPath());
}
} else {
LOGGER.info(String.format("Creating output file %s (%s).", outputFile.getAbsolutePath(), entry.getMode()));
if (entry.isSymbolicLink()) {
Files.createSymbolicLink(Paths.get(outputFile.getAbsolutePath()), Paths.get(entry.getLinkName()));
} else {
try (final OutputStream outputFileStream = new FileOutputStream(outputFile)) {
IOUtils.copy(debInputStream, outputFileStream);
Files.setPosixFilePermissions(Paths.get(outputFile.getPath()), fileUtilities.octToPosixFilePermission(entry.getMode()));
}
}
}
uncompressedFiles.add(outputFile);
stateCallback.accept(new ProgressEntity.Builder().withPercent((double) countingInputStream.getCount() / (double) finalSize * (double) 100).withProgressText("Extracting " + outputFile.getName()).build());
}
return uncompressedFiles;
} catch (IOException | org.apache.commons.compress.archivers.ArchiveException e) {
throw new ArchiveException("Unable to extract the file", e);
}
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project jbehave-core by jbehave.
the class ZipFileArchiver method unarchive.
public void unarchive(File archive, File directory) {
InputStream is = null;
ArchiveInputStream in = null;
try {
is = new FileInputStream(archive);
in = factory.createArchiveInputStream(ARCHIVER_NAME, is);
ZipArchiveEntry entry = null;
while ((entry = (ZipArchiveEntry) in.getNextEntry()) != null) {
unzipEntry(entry, in, directory);
}
} catch (Exception e) {
throw new FileUnarchiveFailedException(archive, directory, e);
} finally {
close(is);
close(in);
}
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project hopsworks by logicalclocks.
the class WebDriverFactory method extractZip.
public static void extractZip(File sourceFilePath, File destinationFilePath) throws IOException, ArchiveException {
InputStream is = new FileInputStream(sourceFilePath);
ArchiveInputStream in = new ArchiveStreamFactory().createArchiveInputStream("zip", is);
ZipArchiveEntry entry = null;
while ((entry = (ZipArchiveEntry) in.getNextEntry()) != null) {
OutputStream out = new FileOutputStream(new File(destinationFilePath, entry.getName()));
IOUtils.copy(in, out);
out.close();
}
in.close();
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project hutool by dromara.
the class StreamExtractor method extractInternal.
/**
* 释放(解压)到指定目录
*
* @param targetDir 目标目录
* @param filter 解压文件过滤器,用于指定需要释放的文件,null表示不过滤。当{@link Filter#accept(Object)}为true时释放。
* @throws IOException IO异常
*/
private void extractInternal(File targetDir, Filter<ArchiveEntry> filter) throws IOException {
Assert.isTrue(null != targetDir && ((false == targetDir.exists()) || targetDir.isDirectory()), "target must be dir.");
final ArchiveInputStream in = this.in;
ArchiveEntry entry;
File outItemFile;
while (null != (entry = in.getNextEntry())) {
if (null != filter && false == filter.accept(entry)) {
continue;
}
if (false == in.canReadEntryData(entry)) {
// 无法读取的文件直接跳过
continue;
}
outItemFile = FileUtil.file(targetDir, entry.getName());
if (entry.isDirectory()) {
// 创建对应目录
// noinspection ResultOfMethodCallIgnored
outItemFile.mkdirs();
} else {
FileUtil.writeFromStream(in, outItemFile, false);
}
}
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project ozone by apache.
the class TarContainerPacker method unpackContainerData.
/**
* Given an input stream (tar file) extract the data to the specified
* directories.
*
* @param container container which defines the destination structure.
* @param input the input stream.
*/
@Override
public byte[] unpackContainerData(Container<KeyValueContainerData> container, InputStream input) throws IOException {
byte[] descriptorFileContent = null;
KeyValueContainerData containerData = container.getContainerData();
Path dbRoot = getDbPath(containerData);
Path chunksRoot = Paths.get(containerData.getChunksPath());
try (InputStream decompressed = decompress(input);
ArchiveInputStream archiveInput = untar(decompressed)) {
ArchiveEntry entry = archiveInput.getNextEntry();
while (entry != null) {
String name = entry.getName();
long size = entry.getSize();
if (name.startsWith(DB_DIR_NAME + "/")) {
Path destinationPath = dbRoot.resolve(name.substring(DB_DIR_NAME.length() + 1));
extractEntry(entry, archiveInput, size, dbRoot, destinationPath);
} else if (name.startsWith(CHUNKS_DIR_NAME + "/")) {
Path destinationPath = chunksRoot.resolve(name.substring(CHUNKS_DIR_NAME.length() + 1));
extractEntry(entry, archiveInput, size, chunksRoot, destinationPath);
} else if (CONTAINER_FILE_NAME.equals(name)) {
// Don't do anything. Container file should be unpacked in a
// separated step by unpackContainerDescriptor call.
descriptorFileContent = readEntry(archiveInput, size);
} else {
throw new IllegalArgumentException("Unknown entry in the tar file: " + "" + name);
}
entry = archiveInput.getNextEntry();
}
return descriptorFileContent;
} catch (CompressorException e) {
throw new IOException("Can't uncompress the given container: " + container.getContainerData().getContainerID(), e);
}
}
Aggregations