Search in sources :

Example 1 with CompressorException

use of org.apache.commons.compress.compressors.CompressorException in project caffeine by ben-manes.

the class AbstractTraceReader method readFile.

/** Returns the input stream, decompressing if required. */
private InputStream readFile(String filePath) throws IOException {
    BufferedInputStream input = new BufferedInputStream(openFile(filePath), BUFFER_SIZE);
    input.mark(100);
    try {
        return new XZInputStream(input);
    } catch (IOException e) {
        input.reset();
    }
    try {
        return new CompressorStreamFactory().createCompressorInputStream(input);
    } catch (CompressorException e) {
        input.reset();
    }
    try {
        return new ArchiveStreamFactory().createArchiveInputStream(input);
    } catch (ArchiveException e) {
        input.reset();
    }
    return input;
}
Also used : ArchiveStreamFactory(org.apache.commons.compress.archivers.ArchiveStreamFactory) XZInputStream(org.tukaani.xz.XZInputStream) BufferedInputStream(java.io.BufferedInputStream) CompressorException(org.apache.commons.compress.compressors.CompressorException) CompressorStreamFactory(org.apache.commons.compress.compressors.CompressorStreamFactory) IOException(java.io.IOException) ArchiveException(org.apache.commons.compress.archivers.ArchiveException)

Example 2 with CompressorException

use of org.apache.commons.compress.compressors.CompressorException in project gitblit by gitblit.

the class CompressionUtils method tar.

/**
	 * Compresses/archives the contents of the tree at the (optionally)
	 * specified revision and the (optionally) specified basepath to the
	 * supplied outputstream.
	 *
	 * @param algorithm
	 *            compression algorithm for tar (optional)
	 * @param repository
	 * @param basePath
	 *            if unspecified, entire repository is assumed.
	 * @param objectId
	 *            if unspecified, HEAD is assumed.
	 * @param os
	 * @return true if repository was successfully zipped to supplied output
	 *         stream
	 */
private static boolean tar(String algorithm, Repository repository, IFilestoreManager filestoreManager, String basePath, String objectId, OutputStream os) {
    RevCommit commit = JGitUtils.getCommit(repository, objectId);
    if (commit == null) {
        return false;
    }
    OutputStream cos = os;
    if (!StringUtils.isEmpty(algorithm)) {
        try {
            cos = new CompressorStreamFactory().createCompressorOutputStream(algorithm, os);
        } catch (CompressorException e1) {
            error(e1, repository, "{0} failed to open {1} stream", algorithm);
        }
    }
    boolean success = false;
    RevWalk rw = new RevWalk(repository);
    TreeWalk tw = new TreeWalk(repository);
    try {
        tw.reset();
        tw.addTree(commit.getTree());
        TarArchiveOutputStream tos = new TarArchiveOutputStream(cos);
        tos.setAddPaxHeadersForNonAsciiNames(true);
        tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX);
        if (!StringUtils.isEmpty(basePath)) {
            PathFilter f = PathFilter.create(basePath);
            tw.setFilter(f);
        }
        tw.setRecursive(true);
        MutableObjectId id = new MutableObjectId();
        long modified = commit.getAuthorIdent().getWhen().getTime();
        while (tw.next()) {
            FileMode mode = tw.getFileMode(0);
            if (mode == FileMode.GITLINK || mode == FileMode.TREE) {
                continue;
            }
            tw.getObjectId(id, 0);
            ObjectLoader loader = repository.open(id);
            if (FileMode.SYMLINK == mode) {
                TarArchiveEntry entry = new TarArchiveEntry(tw.getPathString(), TarArchiveEntry.LF_SYMLINK);
                ByteArrayOutputStream bos = new ByteArrayOutputStream();
                loader.copyTo(bos);
                entry.setLinkName(bos.toString());
                entry.setModTime(modified);
                tos.putArchiveEntry(entry);
                tos.closeArchiveEntry();
            } else {
                TarArchiveEntry entry = new TarArchiveEntry(tw.getPathString());
                entry.setMode(mode.getBits());
                entry.setModTime(modified);
                FilestoreModel filestoreItem = null;
                if (JGitUtils.isPossibleFilestoreItem(loader.getSize())) {
                    filestoreItem = JGitUtils.getFilestoreItem(tw.getObjectReader().open(id));
                }
                final long size = (filestoreItem == null) ? loader.getSize() : filestoreItem.getSize();
                entry.setSize(size);
                tos.putArchiveEntry(entry);
                if (filestoreItem == null) {
                    //Copy repository stored file
                    loader.copyTo(tos);
                } else {
                    //Copy filestore file
                    try (FileInputStream streamIn = new FileInputStream(filestoreManager.getStoragePath(filestoreItem.oid))) {
                        IOUtils.copyLarge(streamIn, tos);
                    } catch (Throwable e) {
                        LOGGER.error(MessageFormat.format("Failed to archive filestore item {0}", filestoreItem.oid), e);
                        //Handle as per other errors 
                        throw e;
                    }
                }
                tos.closeArchiveEntry();
            }
        }
        tos.finish();
        tos.close();
        cos.close();
        success = true;
    } catch (IOException e) {
        error(e, repository, "{0} failed to {1} stream files from commit {2}", algorithm, commit.getName());
    } finally {
        tw.close();
        rw.dispose();
    }
    return success;
}
Also used : FileMode(org.eclipse.jgit.lib.FileMode) PathFilter(org.eclipse.jgit.treewalk.filter.PathFilter) FilestoreModel(com.gitblit.models.FilestoreModel) ByteArrayOutputStream(java.io.ByteArrayOutputStream) TarArchiveOutputStream(org.apache.commons.compress.archivers.tar.TarArchiveOutputStream) ZipArchiveOutputStream(org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream) OutputStream(java.io.OutputStream) CompressorStreamFactory(org.apache.commons.compress.compressors.CompressorStreamFactory) ByteArrayOutputStream(java.io.ByteArrayOutputStream) IOException(java.io.IOException) RevWalk(org.eclipse.jgit.revwalk.RevWalk) TarArchiveEntry(org.apache.commons.compress.archivers.tar.TarArchiveEntry) FileInputStream(java.io.FileInputStream) MutableObjectId(org.eclipse.jgit.lib.MutableObjectId) CompressorException(org.apache.commons.compress.compressors.CompressorException) ObjectLoader(org.eclipse.jgit.lib.ObjectLoader) TarArchiveOutputStream(org.apache.commons.compress.archivers.tar.TarArchiveOutputStream) TreeWalk(org.eclipse.jgit.treewalk.TreeWalk) RevCommit(org.eclipse.jgit.revwalk.RevCommit)

Example 3 with CompressorException

use of org.apache.commons.compress.compressors.CompressorException in project cloudstack by apache.

the class VhdProcessor method getTemplateVirtualSize.

protected long getTemplateVirtualSize(File file) throws IOException {
    byte[] currentSize = new byte[8];
    byte[] cookie = new byte[8];
    byte[] creatorApp = new byte[4];
    BufferedInputStream fileStream = new BufferedInputStream(new FileInputStream(file));
    InputStream strm = fileStream;
    boolean isCompressed = checkCompressed(file.getAbsolutePath());
    if (isCompressed) {
        try {
            strm = new CompressorStreamFactory().createCompressorInputStream(fileStream);
        } catch (CompressorException e) {
            s_logger.info("error opening compressed VHD file " + file.getName());
            return file.length();
        }
    }
    try {
        //read the backup footer present at the top of the VHD file
        strm.read(cookie);
        if (!new String(cookie).equals(vhdIdentifierCookie)) {
            strm.close();
            return file.length();
        }
        long skipped = strm.skip(vhdFooterCreatorAppOffset - vhdCookieOffset);
        if (skipped == -1) {
            throw new IOException("Unexpected end-of-file");
        }
        long read = strm.read(creatorApp);
        if (read == -1) {
            throw new IOException("Unexpected end-of-file");
        }
        skipped = strm.skip(vhdFooterCurrentSizeOffset - vhdFooterCreatorVerOffset - vhdCookieOffset);
        if (skipped == -1) {
            throw new IOException("Unexpected end-of-file");
        }
        read = strm.read(currentSize);
        if (read == -1) {
            throw new IOException("Unexpected end-of-file");
        }
    } catch (IOException e) {
        s_logger.warn("Error reading virtual size from VHD file " + e.getMessage() + " VHD: " + file.getName());
        return file.length();
    } finally {
        if (strm != null) {
            strm.close();
        }
    }
    return NumbersUtil.bytesToLong(currentSize);
}
Also used : BufferedInputStream(java.io.BufferedInputStream) BufferedInputStream(java.io.BufferedInputStream) CompressorInputStream(org.apache.commons.compress.compressors.CompressorInputStream) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) CompressorException(org.apache.commons.compress.compressors.CompressorException) CompressorStreamFactory(org.apache.commons.compress.compressors.CompressorStreamFactory) IOException(java.io.IOException) FileInputStream(java.io.FileInputStream)

Example 4 with CompressorException

use of org.apache.commons.compress.compressors.CompressorException in project logging-log4j2 by apache.

the class RollingAppenderSizeTest method testAppender.

@Test
public void testAppender() throws Exception {
    final Path path = Paths.get(DIR, "rollingtest.log");
    if (Files.exists(path) && createOnDemand) {
        Assert.fail(String.format("Unexpected file: %s (%s bytes)", path, Files.getAttribute(path, "size")));
    }
    for (int i = 0; i < 500; ++i) {
        logger.debug("This is test message number " + i);
    }
    try {
        Thread.sleep(100);
    } catch (final InterruptedException ie) {
    // Ignore the error.
    }
    final File dir = new File(DIR);
    assertTrue("Directory not created", dir.exists() && dir.listFiles().length > 0);
    final File[] files = dir.listFiles();
    assertNotNull(files);
    assertThat(files, hasItemInArray(that(hasName(that(endsWith(fileExtension))))));
    final FileExtension ext = FileExtension.lookup(fileExtension);
    if (ext == null || FileExtension.ZIP == ext || FileExtension.PACK200 == ext) {
        // Apache Commons Compress cannot deflate zip? TODO test decompressing these formats
        return;
    }
    // Stop the context to make sure all files are compressed and closed. Trying to remedy failures in CI builds.
    if (!loggerContextRule.getLoggerContext().stop(30, TimeUnit.SECONDS)) {
        System.err.println("Could not stop cleanly " + loggerContextRule + " for " + this);
    }
    for (final File file : files) {
        if (file.getName().endsWith(fileExtension)) {
            CompressorInputStream in = null;
            try (FileInputStream fis = new FileInputStream(file)) {
                try {
                    in = new CompressorStreamFactory().createCompressorInputStream(ext.name().toLowerCase(), fis);
                } catch (final CompressorException ce) {
                    ce.printStackTrace();
                    fail("Error creating intput stream from " + file.toString() + ": " + ce.getMessage());
                }
                final ByteArrayOutputStream baos = new ByteArrayOutputStream();
                assertNotNull("No input stream for " + file.getName(), in);
                try {
                    IOUtils.copy(in, baos);
                } catch (final Exception ex) {
                    ex.printStackTrace();
                    fail("Unable to decompress " + file.getAbsolutePath());
                }
                final String text = new String(baos.toByteArray(), Charset.defaultCharset());
                final String[] lines = text.split("[\\r\\n]+");
                for (final String line : lines) {
                    assertTrue(line.contains("DEBUG o.a.l.l.c.a.r.RollingAppenderSizeTest [main] This is test message number"));
                }
            } finally {
                Closer.close(in);
            }
        }
    }
}
Also used : Path(java.nio.file.Path) CompressorStreamFactory(org.apache.commons.compress.compressors.CompressorStreamFactory) CompressorInputStream(org.apache.commons.compress.compressors.CompressorInputStream) ByteArrayOutputStream(java.io.ByteArrayOutputStream) FileInputStream(java.io.FileInputStream) CompressorException(org.apache.commons.compress.compressors.CompressorException) IOException(java.io.IOException) CompressorException(org.apache.commons.compress.compressors.CompressorException) File(java.io.File) Test(org.junit.Test)

Example 5 with CompressorException

use of org.apache.commons.compress.compressors.CompressorException in project stanbol by apache.

the class ManagementUtils method getArchiveInputStream.

/**
     * Tries to create an {@link ArchiveInputStream} based on the parsed {@link InputStream}.
     * First the provided resource name is used to detect the type of the archive.
     * if that does not work, or the parsed resource name is <code>null</code> the
     * stream is created by using the auto-detection of the archive type.
     * @param resourceName the name of the resource or <code>null</code>
     * @param is the {@link InputStream}
     * @return the {@link ArchiveInputStream}
     * @throws ArchiveException if the {@link InputStream} does not represented any
     * supported Archive type
     */
public static ArchiveInputStream getArchiveInputStream(String resourceName, InputStream is) throws ArchiveException {
    if (is == null) {
        return null;
    }
    String extension = resourceName == null ? null : FilenameUtils.getExtension(resourceName);
    if (!is.markSupported()) {
        is = new BufferedInputStream(is);
    }
    InputStream as;
    if (!"zip".equalsIgnoreCase(extension)) {
        //we need to first check if this is a compressed stream
        try {
            as = compressorStreamFactory.createCompressorInputStream(extension, is);
            // assume tar archives
            extension = "tar";
        } catch (CompressorException e) {
            try {
                as = compressorStreamFactory.createCompressorInputStream(is);
                // assume tar archives
                extension = "tar";
            } catch (CompressorException e1) {
                //not a compression stream?
                as = is;
            }
        }
    } else {
        //zip ... this is already an archive stream
        as = is;
    }
    if (extension != null) {
        try {
            return archiveStreamFactory.createArchiveInputStream(extension, as);
        } catch (ArchiveException e) {
        //ignore
        }
    }
    //try to detect
    return archiveStreamFactory.createArchiveInputStream(is);
}
Also used : BufferedInputStream(java.io.BufferedInputStream) BufferedInputStream(java.io.BufferedInputStream) ArchiveInputStream(org.apache.commons.compress.archivers.ArchiveInputStream) InputStream(java.io.InputStream) CompressorException(org.apache.commons.compress.compressors.CompressorException) ArchiveException(org.apache.commons.compress.archivers.ArchiveException)

Aggregations

CompressorException (org.apache.commons.compress.compressors.CompressorException)8 CompressorStreamFactory (org.apache.commons.compress.compressors.CompressorStreamFactory)7 BufferedInputStream (java.io.BufferedInputStream)5 FileInputStream (java.io.FileInputStream)5 IOException (java.io.IOException)5 CompressorInputStream (org.apache.commons.compress.compressors.CompressorInputStream)4 ByteArrayOutputStream (java.io.ByteArrayOutputStream)2 InputStream (java.io.InputStream)2 ArchiveException (org.apache.commons.compress.archivers.ArchiveException)2 FilestoreModel (com.gitblit.models.FilestoreModel)1 BufferedOutputStream (java.io.BufferedOutputStream)1 File (java.io.File)1 FileNotFoundException (java.io.FileNotFoundException)1 FileOutputStream (java.io.FileOutputStream)1 OutputStream (java.io.OutputStream)1 Path (java.nio.file.Path)1 MemoryLimitException (org.apache.commons.compress.MemoryLimitException)1 ArchiveInputStream (org.apache.commons.compress.archivers.ArchiveInputStream)1 ArchiveStreamFactory (org.apache.commons.compress.archivers.ArchiveStreamFactory)1 TarArchiveEntry (org.apache.commons.compress.archivers.tar.TarArchiveEntry)1