use of org.apache.commons.compress.archivers.ArchiveStreamFactory in project caffeine by ben-manes.
the class AbstractTraceReader method readFile.
/** Returns the input stream, decompressing if required. */
private InputStream readFile(String filePath) throws IOException {
BufferedInputStream input = new BufferedInputStream(openFile(filePath), BUFFER_SIZE);
input.mark(100);
try {
return new XZInputStream(input);
} catch (IOException e) {
input.reset();
}
try {
return new CompressorStreamFactory().createCompressorInputStream(input);
} catch (CompressorException e) {
input.reset();
}
try {
return new ArchiveStreamFactory().createArchiveInputStream(input);
} catch (ArchiveException e) {
input.reset();
}
return input;
}
use of org.apache.commons.compress.archivers.ArchiveStreamFactory in project camel by apache.
the class TarAggregationStrategy method addFileToTar.
private void addFileToTar(File source, File file, String fileName) throws IOException, ArchiveException {
File tmpTar = File.createTempFile(source.getName(), null, parentDir);
tmpTar.delete();
if (!source.renameTo(tmpTar)) {
throw new IOException("Could not make temp file (" + source.getName() + ")");
}
FileInputStream fis = new FileInputStream(tmpTar);
TarArchiveInputStream tin = (TarArchiveInputStream) new ArchiveStreamFactory().createArchiveInputStream(ArchiveStreamFactory.TAR, fis);
TarArchiveOutputStream tos = new TarArchiveOutputStream(new FileOutputStream(source));
tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX);
tos.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_POSIX);
InputStream in = new FileInputStream(file);
// copy the existing entries
ArchiveEntry nextEntry;
while ((nextEntry = tin.getNextEntry()) != null) {
tos.putArchiveEntry(nextEntry);
IOUtils.copy(tin, tos);
tos.closeArchiveEntry();
}
// Add the new entry
TarArchiveEntry entry = new TarArchiveEntry(fileName == null ? file.getName() : fileName);
entry.setSize(file.length());
tos.putArchiveEntry(entry);
IOUtils.copy(in, tos);
tos.closeArchiveEntry();
IOHelper.close(fis, in, tin, tos);
LOG.trace("Deleting temporary file: {}", tmpTar);
FileUtil.deleteFile(tmpTar);
}
use of org.apache.commons.compress.archivers.ArchiveStreamFactory in project tika by apache.
the class PackageParserTest method testCoverage.
@Test
public void testCoverage() throws Exception {
//test that the package parser covers all inputstreams handled
//by ArchiveStreamFactory. When we update commons-compress, and they add
//a new stream type, we want to make sure that we're handling it.
ArchiveStreamFactory archiveStreamFactory = new ArchiveStreamFactory(StandardCharsets.UTF_8.name());
PackageParser packageParser = new PackageParser();
ParseContext parseContext = new ParseContext();
for (String name : archiveStreamFactory.getInputStreamArchiveNames()) {
MediaType mt = PackageParser.getMediaType(name);
//name of the missing stream
if (mt.equals(MediaType.OCTET_STREAM)) {
fail("getting octet-stream for: " + name);
}
if (!packageParser.getSupportedTypes(parseContext).contains(mt)) {
fail("PackageParser should support: " + mt.toString());
}
}
}
use of org.apache.commons.compress.archivers.ArchiveStreamFactory in project halyard by spinnaker.
the class BackupService method untarHalconfig.
private void untarHalconfig(String halconfigDir, String halconfigTar) {
FileInputStream tarInput = null;
TarArchiveInputStream tarArchiveInputStream = null;
try {
tarInput = new FileInputStream(new File(halconfigTar));
tarArchiveInputStream = (TarArchiveInputStream) new ArchiveStreamFactory().createArchiveInputStream("tar", tarInput);
} catch (IOException | ArchiveException e) {
throw new HalException(Problem.Severity.FATAL, "Failed to open backup: " + e.getMessage(), e);
}
try {
ArchiveEntry archiveEntry = tarArchiveInputStream.getNextEntry();
while (archiveEntry != null) {
String entryName = archiveEntry.getName();
Path outputPath = Paths.get(halconfigDir, entryName);
File outputFile = outputPath.toFile();
if (!outputFile.getParentFile().exists()) {
outputFile.getParentFile().mkdirs();
}
if (archiveEntry.isDirectory()) {
outputFile.mkdir();
} else {
Files.copy(tarArchiveInputStream, outputPath, REPLACE_EXISTING);
}
archiveEntry = tarArchiveInputStream.getNextEntry();
}
} catch (IOException e) {
throw new HalException(Problem.Severity.FATAL, "Failed to read archive entry: " + e.getMessage(), e);
}
}
use of org.apache.commons.compress.archivers.ArchiveStreamFactory in project halyard by spinnaker.
the class RegistryBackedArchiveProfileBuilder method build.
public List<Profile> build(DeploymentConfiguration deploymentConfiguration, String baseOutputPath, SpinnakerArtifact artifact, String archiveName) {
String version = artifactService.getArtifactVersion(deploymentConfiguration.getName(), artifact);
InputStream is;
try {
is = profileRegistry.readArchiveProfile(artifact.getName(), version, archiveName);
} catch (IOException e) {
throw new HalException(Problem.Severity.FATAL, "Error retrieving contents of archive " + archiveName + ".tar.gz", e);
}
TarArchiveInputStream tis;
try {
tis = (TarArchiveInputStream) new ArchiveStreamFactory().createArchiveInputStream("tar", is);
} catch (ArchiveException e) {
throw new HalException(Problem.Severity.FATAL, "Failed to unpack tar archive", e);
}
try {
List<Profile> result = new ArrayList<>();
ArchiveEntry profileEntry = tis.getNextEntry();
while (profileEntry != null) {
if (profileEntry.isDirectory()) {
profileEntry = tis.getNextEntry();
continue;
}
String entryName = profileEntry.getName();
String profileName = String.join("/", artifact.getName(), archiveName, entryName);
String outputPath = Paths.get(baseOutputPath, archiveName, entryName).toString();
String contents = IOUtils.toString(tis);
result.add((new ProfileFactory() {
@Override
protected void setProfile(Profile profile, DeploymentConfiguration deploymentConfiguration, SpinnakerRuntimeSettings endpoints) {
profile.setContents(profile.getBaseContents());
}
@Override
protected Profile getBaseProfile(String name, String version, String outputFile) {
return new Profile(name, version, outputFile, contents);
}
@Override
protected boolean showEditWarning() {
return false;
}
@Override
protected ArtifactService getArtifactService() {
return artifactService;
}
@Override
public SpinnakerArtifact getArtifact() {
return artifact;
}
@Override
protected String commentPrefix() {
return null;
}
}).getProfile(profileName, outputPath, deploymentConfiguration, null));
profileEntry = tis.getNextEntry();
}
return result;
} catch (IOException e) {
throw new HalException(Problem.Severity.FATAL, "Failed to read profile entry", e);
}
}
Aggregations