use of org.apache.commons.compress.archivers.ArchiveEntry in project packr by libgdx.
the class ArchiveUtilsTest method testArchiveDuplicateEntry.
/**
* Adds the same entry to a Zip file to ensure that extraction handles duplicates properly.
*/
@Test
public void testArchiveDuplicateEntry(@TempDir Path tempDir) throws IOException, ArchiveException, CompressorException {
String someFilename = "some-file.txt";
Path someFilePath = tempDir.resolve(someFilename);
Files.write(someFilePath, "Hello world\n".getBytes(StandardCharsets.UTF_8));
Path archiveZip = tempDir.resolve("archive.zip");
// Create an archive, add entry, update file, add same entry
try (OutputStream fileOutputStream = new BufferedOutputStream(Files.newOutputStream(archiveZip));
ArchiveOutputStream archiveOutputStream = new ArchiveStreamFactory().createArchiveOutputStream(ZIP.getCommonsCompressName(), fileOutputStream)) {
// Create an entry for some file
ArchiveEntry entry = archiveOutputStream.createArchiveEntry(someFilePath.toFile(), someFilename);
archiveOutputStream.putArchiveEntry(entry);
Files.copy(someFilePath, archiveOutputStream);
archiveOutputStream.closeArchiveEntry();
// Update some file, and put it into the archive again
Files.write(someFilePath, "Good bye\n".getBytes(StandardCharsets.UTF_8));
entry = archiveOutputStream.createArchiveEntry(someFilePath.toFile(), someFilename);
archiveOutputStream.putArchiveEntry(entry);
Files.copy(someFilePath, archiveOutputStream);
archiveOutputStream.closeArchiveEntry();
archiveOutputStream.finish();
}
Path extractionDirectory = tempDir.resolve("extract");
Files.createDirectories(extractionDirectory);
ArchiveUtils.extractArchive(archiveZip, extractionDirectory);
assertEquals(new String(Files.readAllBytes(tempDir.resolve(someFilename)), StandardCharsets.UTF_8), new String(Files.readAllBytes(extractionDirectory.resolve(someFilename)), StandardCharsets.UTF_8), "Extracted file contents should have matched original");
}
use of org.apache.commons.compress.archivers.ArchiveEntry in project camel by apache.
the class TarAggregationStrategy method addEntryToTar.
private void addEntryToTar(File source, String entryName, byte[] buffer, int length) throws IOException, ArchiveException {
File tmpTar = File.createTempFile(source.getName(), null, parentDir);
tmpTar.delete();
if (!source.renameTo(tmpTar)) {
throw new IOException("Cannot create temp file: " + source.getName());
}
FileInputStream fis = new FileInputStream(tmpTar);
TarArchiveInputStream tin = (TarArchiveInputStream) new ArchiveStreamFactory().createArchiveInputStream(ArchiveStreamFactory.TAR, fis);
TarArchiveOutputStream tos = new TarArchiveOutputStream(new FileOutputStream(source));
tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX);
tos.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_POSIX);
// copy the existing entries
ArchiveEntry nextEntry;
while ((nextEntry = tin.getNextEntry()) != null) {
tos.putArchiveEntry(nextEntry);
IOUtils.copy(tin, tos);
tos.closeArchiveEntry();
}
// Create new entry
TarArchiveEntry entry = new TarArchiveEntry(entryName);
entry.setSize(length);
tos.putArchiveEntry(entry);
tos.write(buffer, 0, length);
tos.closeArchiveEntry();
IOHelper.close(fis, tin, tos);
LOG.trace("Deleting temporary file: {}", tmpTar);
FileUtil.deleteFile(tmpTar);
}
use of org.apache.commons.compress.archivers.ArchiveEntry in project neo4j by neo4j.
the class Dumper method withEntry.
private void withEntry(ThrowingAction<IOException> operation, Path root, ArchiveOutputStream stream, Path file) throws IOException {
ArchiveEntry entry = createEntry(file, root, stream);
stream.putArchiveEntry(entry);
operation.apply();
stream.closeArchiveEntry();
}
use of org.apache.commons.compress.archivers.ArchiveEntry in project karaf by apache.
the class RunMojo method extract.
private static void extract(ArchiveInputStream is, File targetDir) throws IOException {
try {
if (targetDir.exists()) {
FileUtils.forceDelete(targetDir);
}
targetDir.mkdirs();
ArchiveEntry entry = is.getNextEntry();
while (entry != null) {
String name = entry.getName();
name = name.substring(name.indexOf("/") + 1);
File file = new File(targetDir, name);
if (entry.isDirectory()) {
file.mkdirs();
} else {
file.getParentFile().mkdirs();
OutputStream os = new FileOutputStream(file);
try {
IOUtils.copy(is, os);
} finally {
IOUtils.closeQuietly(os);
}
}
entry = is.getNextEntry();
}
} finally {
is.close();
}
}
use of org.apache.commons.compress.archivers.ArchiveEntry in project tika by apache.
the class CXFTestBase method readArchiveFromStream.
protected Map<String, String> readArchiveFromStream(ArchiveInputStream zip) throws IOException {
Map<String, String> data = new HashMap<String, String>();
while (true) {
ArchiveEntry entry = zip.getNextEntry();
if (entry == null) {
break;
}
ByteArrayOutputStream bos = new ByteArrayOutputStream();
IOUtils.copy(zip, bos);
data.put(entry.getName(), DigestUtils.md5Hex(bos.toByteArray()));
}
return data;
}
Aggregations