use of org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream in project coprhd-controller by CoprHD.
the class CustomServicesAnsibleResourceDAO method getPlaybooks.
private StringSet getPlaybooks(final byte[] archive) {
try (final TarArchiveInputStream tarIn = new TarArchiveInputStream(new GzipCompressorInputStream(new ByteArrayInputStream(archive)))) {
TarArchiveEntry entry = tarIn.getNextTarEntry();
final StringSet playbooks = new StringSet();
while (entry != null) {
if (entry.isFile() && entry.getName().toLowerCase().endsWith(".yml")) {
final java.nio.file.Path playbookPath = FileSystems.getDefault().getPath(entry.getName()).normalize();
if (null != playbookPath && playbookPath.getNameCount() >= 0)
playbooks.add(playbookPath.toString());
}
entry = tarIn.getNextTarEntry();
}
return playbooks;
} catch (final IOException e) {
throw InternalServerErrorException.internalServerErrors.genericApisvcError("Invalid ansible archive. The archive needs to be in 'tar.gz' format. " + "Create the tar using the command 'tar -zcvf tar_name directory_path_to_tar' and then upload", e);
}
}
use of org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream in project coprhd-controller by CoprHD.
the class CustomServicesLocalAnsibleExecution method uncompressArchive.
private void uncompressArchive(final byte[] ansibleArchive, final List<String> fileList, final List<String> pathList) {
try (final TarArchiveInputStream tarIn = new TarArchiveInputStream(new GzipCompressorInputStream(new ByteArrayInputStream(ansibleArchive)))) {
TarArchiveEntry entry = tarIn.getNextTarEntry();
while (entry != null) {
final File curTarget = new File(orderDir, entry.getName());
if (entry.isDirectory()) {
curTarget.mkdirs();
} else {
final File parent = curTarget.getParentFile();
if (!parent.exists()) {
parent.mkdirs();
}
try (final OutputStream out = new FileOutputStream(curTarget)) {
IOUtils.copy(tarIn, out);
}
// Add file name and file path for softlinks
fileList.add(curTarget.getName());
pathList.add(curTarget.getAbsolutePath().replaceFirst(CustomServicesConstants.CHROOT_DIR + "/", ""));
}
entry = tarIn.getNextTarEntry();
}
} catch (final IOException e) {
ExecutionUtils.currentContext().logError("customServicesOperationExecution.logStatus", step.getId(), step.getFriendlyName(), "Invalid Ansible archive");
logger.error("Exception:", e);
throw InternalServerErrorException.internalServerErrors.genericApisvcError("Invalid Ansible archive", e);
}
}
use of org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream in project java by kubernetes-client.
the class Copy method copyDirectoryFromPodAsync.
public Future<Integer> copyDirectoryFromPodAsync(String namespace, String pod, String container, String srcPath, Path destination) throws IOException, ApiException {
final Process proc = this.exec(namespace, pod, new String[] { "sh", "-c", "tar cz - " + srcPath + " | base64" }, container, false, false);
try (InputStream is = new Base64InputStream(new BufferedInputStream(proc.getInputStream()));
ArchiveInputStream archive = new TarArchiveInputStream(new GzipCompressorInputStream(is))) {
for (ArchiveEntry entry = archive.getNextEntry(); entry != null; entry = archive.getNextEntry()) {
if (!archive.canReadEntryData(entry)) {
log.error("Can't read: " + entry);
continue;
}
String normalName = FilenameUtils.normalize(entry.getName());
if (normalName == null) {
throw new IOException("Invalid entry: " + entry.getName());
}
File f = new File(destination.toFile(), normalName);
if (entry.isDirectory()) {
if (!f.isDirectory() && !f.mkdirs()) {
throw new IOException("create directory failed: " + f);
}
} else {
File parent = f.getParentFile();
if (!parent.isDirectory() && !parent.mkdirs()) {
throw new IOException("create directory failed: " + parent);
}
try (OutputStream fs = new FileOutputStream(f)) {
Streams.copy(archive, fs);
fs.flush();
}
}
}
}
return new ProcessFuture(proc);
}
use of org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream in project flow by vaadin.
the class DefaultArchiveExtractor method extractGzipTarArchive.
private void extractGzipTarArchive(File archive, File destinationDirectory) throws IOException {
try (FileInputStream fis = new FileInputStream(archive);
GzipCompressorInputStream gis = new GzipCompressorInputStream(fis);
TarArchiveInputStream tarIn = new TarArchiveInputStream(gis)) {
TarArchiveEntry tarEntry = tarIn.getNextTarEntry();
String canonicalDestinationDirectory = destinationDirectory.getCanonicalPath();
while (tarEntry != null) {
// Create a file for this tarEntry
final File destPath = new File(destinationDirectory + File.separator + tarEntry.getName());
prepDestination(destPath, tarEntry.isDirectory());
if (!startsWithPath(destPath.getCanonicalPath(), canonicalDestinationDirectory)) {
throw new IOException("Expanding " + tarEntry.getName() + " would create file outside of " + canonicalDestinationDirectory);
}
copyTarFileContents(tarIn, tarEntry, destPath);
tarEntry = tarIn.getNextTarEntry();
}
}
}
use of org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream in project arctic-sea by 52North.
the class FileDownloader method gunzipFile.
public static void gunzipFile(String filePath) throws IOException {
File file = new File(filePath);
String outPath = null;
final byte[] buff = new byte[1024];
if (!file.getName().endsWith("gz")) {
throw new IOException("File is not ends with .gz extension");
} else {
outPath = file.getAbsolutePath().substring(0, file.getAbsolutePath().length() - 3);
}
try (FileOutputStream fout = new FileOutputStream(outPath);
FileInputStream fin = new FileInputStream(file);
BufferedInputStream bin = new BufferedInputStream(fin);
GzipCompressorInputStream gzipin = new GzipCompressorInputStream(bin)) {
int n = 0;
while (-1 != (n = gzipin.read(buff))) {
fout.write(buff, 0, n);
}
LOG.debug("Extracted file path {}", outPath);
} catch (IOException e) {
throw e;
}
}
Aggregations