use of org.apache.commons.compress.archivers.ArchiveStreamFactory in project phoenicis by PhoenicisOrg.
the class Zip method uncompress.
/**
* Uncompress a tar
*
* @param countingInputStream to count the number of byte extracted
* @param outputDir The directory where files should be extracted
* @return A list of extracted files
* @throws ArchiveException if the process fails
*/
private List<File> uncompress(final InputStream inputStream, CountingInputStream countingInputStream, final File outputDir, long finalSize, Consumer<ProgressEntity> stateCallback) {
final List<File> uncompressedFiles = new LinkedList<>();
try (ArchiveInputStream debInputStream = new ArchiveStreamFactory().createArchiveInputStream("zip", inputStream)) {
ZipArchiveEntry entry;
while ((entry = (ZipArchiveEntry) debInputStream.getNextEntry()) != null) {
final File outputFile = new File(outputDir, entry.getName());
if (entry.isDirectory()) {
LOGGER.info(String.format("Attempting to write output directory %s.", outputFile.getAbsolutePath()));
if (!outputFile.exists()) {
LOGGER.info(String.format("Attempting to createPrefix output directory %s.", outputFile.getAbsolutePath()));
Files.createDirectories(outputFile.toPath());
}
} else {
LOGGER.info(String.format("Creating output file %s.", outputFile.getAbsolutePath()));
outputFile.getParentFile().mkdirs();
try (final OutputStream outputFileStream = new FileOutputStream(outputFile)) {
IOUtils.copy(debInputStream, outputFileStream);
}
}
uncompressedFiles.add(outputFile);
stateCallback.accept(new ProgressEntity.Builder().withPercent((double) countingInputStream.getCount() / (double) finalSize * (double) 100).withProgressText("Extracting " + outputFile.getName()).build());
}
return uncompressedFiles;
} catch (IOException | org.apache.commons.compress.archivers.ArchiveException e) {
throw new ArchiveException("Unable to extract the file", e);
}
}
use of org.apache.commons.compress.archivers.ArchiveStreamFactory in project jbpm-work-items by kiegroup.
the class ArchiveWorkItemHandler method executeWorkItem.
public void executeWorkItem(WorkItem workItem, WorkItemManager manager) {
String archive = (String) workItem.getParameter("Archive");
List<File> files = (List<File>) workItem.getParameter("Files");
try {
OutputStream outputStream = new FileOutputStream(new File(archive));
ArchiveOutputStream os = new ArchiveStreamFactory().createArchiveOutputStream("tar", outputStream);
if (files != null) {
for (File file : files) {
final TarArchiveEntry entry = new TarArchiveEntry("testdata/test1.xml");
entry.setModTime(0);
entry.setSize(file.length());
entry.setUserId(0);
entry.setGroupId(0);
entry.setMode(0100000);
os.putArchiveEntry(entry);
IOUtils.copy(new FileInputStream(file), os);
}
}
os.closeArchiveEntry();
os.close();
manager.completeWorkItem(workItem.getId(), null);
} catch (Throwable cause) {
handleException(cause);
manager.abortWorkItem(workItem.getId());
}
}
use of org.apache.commons.compress.archivers.ArchiveStreamFactory in project POL-POM-5 by PlayOnLinux.
the class Tar method uncompress.
/**
* Uncompress a tar
*
* @param countingInputStream
* to count the number of byte extracted
* @param outputDir
* The directory where files should be extracted
* @return A list of extracted files
* @throws ArchiveException
* if the process fails
*/
private List<File> uncompress(final InputStream inputStream, CountingInputStream countingInputStream, final File outputDir, long finalSize, Consumer<ProgressEntity> stateCallback) {
final List<File> uncompressedFiles = new LinkedList<>();
try (ArchiveInputStream debInputStream = new ArchiveStreamFactory().createArchiveInputStream("tar", inputStream)) {
TarArchiveEntry entry;
while ((entry = (TarArchiveEntry) debInputStream.getNextEntry()) != null) {
final File outputFile = new File(outputDir, entry.getName());
if (entry.isDirectory()) {
LOGGER.info(String.format("Attempting to write output directory %s.", outputFile.getAbsolutePath()));
if (!outputFile.exists()) {
LOGGER.info(String.format("Attempting to createPrefix output directory %s.", outputFile.getAbsolutePath()));
Files.createDirectories(outputFile.toPath());
}
} else {
LOGGER.info(String.format("Creating output file %s (%s).", outputFile.getAbsolutePath(), entry.getMode()));
if (entry.isSymbolicLink()) {
Files.createSymbolicLink(Paths.get(outputFile.getAbsolutePath()), Paths.get(entry.getLinkName()));
} else {
try (final OutputStream outputFileStream = new FileOutputStream(outputFile)) {
IOUtils.copy(debInputStream, outputFileStream);
Files.setPosixFilePermissions(Paths.get(outputFile.getPath()), fileUtilities.octToPosixFilePermission(entry.getMode()));
}
}
}
uncompressedFiles.add(outputFile);
stateCallback.accept(new ProgressEntity.Builder().withPercent((double) countingInputStream.getCount() / (double) finalSize * (double) 100).withProgressText("Extracting " + outputFile.getName()).build());
}
return uncompressedFiles;
} catch (IOException | org.apache.commons.compress.archivers.ArchiveException e) {
throw new ArchiveException("Unable to extract the file", e);
}
}
Aggregations