Search in sources :

Example 16 with GzipCompressorInputStream

use of org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream in project tycho by eclipse.

the class TarGzArchiverTest method getTarEntries.

private Map<String, TarArchiveEntry> getTarEntries() throws IOException, FileNotFoundException {
    TarArchiveInputStream tarStream = new TarArchiveInputStream(new GzipCompressorInputStream(new FileInputStream(tarGzArchive)));
    Map<String, TarArchiveEntry> entries = new HashMap<>();
    try {
        TarArchiveEntry tarEntry = null;
        while ((tarEntry = tarStream.getNextTarEntry()) != null) {
            entries.put(tarEntry.getName(), tarEntry);
        }
    } finally {
        tarStream.close();
    }
    return entries;
}
Also used : TarArchiveInputStream(org.apache.commons.compress.archivers.tar.TarArchiveInputStream) GzipCompressorInputStream(org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream) HashMap(java.util.HashMap) FileInputStream(java.io.FileInputStream) TarArchiveEntry(org.apache.commons.compress.archivers.tar.TarArchiveEntry)

Example 17 with GzipCompressorInputStream

use of org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream in project jbosstools-openshift by jbosstools.

the class FileHelper method extractTarGz.

public static void extractTarGz(File archive, File outputDirectory) {
    InputStream inputStream = null;
    try {
        logger.info("Opening stream to gzip archive");
        inputStream = new GzipCompressorInputStream(new FileInputStream(archive));
    } catch (IOException ex) {
        throw new OpenShiftToolsException("Exception occured while processing tar.gz file.\n" + ex.getMessage());
    }
    logger.info("Opening stream to tar archive");
    BufferedOutputStream outputStream = null;
    TarArchiveInputStream tarArchiveInputStream = new TarArchiveInputStream(inputStream);
    TarArchiveEntry currentEntry = null;
    try {
        while ((currentEntry = tarArchiveInputStream.getNextTarEntry()) != null) {
            if (currentEntry.isDirectory()) {
                logger.info("Creating directory: " + currentEntry.getName());
                createDirectory(new File(outputDirectory, currentEntry.getName()));
            } else {
                File outputFile = new File(outputDirectory, currentEntry.getName());
                if (!outputFile.getParentFile().exists()) {
                    logger.info("Creating directory: " + outputFile.getParentFile());
                    createDirectory(outputFile.getParentFile());
                }
                outputStream = new BufferedOutputStream(new FileOutputStream(outputFile));
                logger.info("Extracting file: " + currentEntry.getName());
                copy(tarArchiveInputStream, outputStream, (int) currentEntry.getSize());
                outputStream.close();
                outputFile.setExecutable(true);
                outputFile.setReadable(true);
                outputFile.setWritable(true);
            }
        }
    } catch (IOException e) {
        throw new OpenShiftToolsException("Exception occured while processing tar.gz file.\n" + e.getMessage());
    } finally {
        try {
            tarArchiveInputStream.close();
        } catch (Exception ex) {
        }
        try {
            outputStream.close();
        } catch (Exception ex) {
        }
    }
}
Also used : GzipCompressorInputStream(org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream) TarArchiveInputStream(org.apache.commons.compress.archivers.tar.TarArchiveInputStream) BufferedInputStream(java.io.BufferedInputStream) TarArchiveInputStream(org.apache.commons.compress.archivers.tar.TarArchiveInputStream) FileInputStream(java.io.FileInputStream) GzipCompressorInputStream(org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream) InputStream(java.io.InputStream) FileOutputStream(java.io.FileOutputStream) IOException(java.io.IOException) BufferedOutputStream(java.io.BufferedOutputStream) File(java.io.File) ZipFile(java.util.zip.ZipFile) FileInputStream(java.io.FileInputStream) OpenShiftToolsException(org.jboss.tools.openshift.reddeer.exception.OpenShiftToolsException) TarArchiveEntry(org.apache.commons.compress.archivers.tar.TarArchiveEntry) OpenShiftToolsException(org.jboss.tools.openshift.reddeer.exception.OpenShiftToolsException) IOException(java.io.IOException)

Example 18 with GzipCompressorInputStream

use of org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream in project incubator-heron by apache.

the class Extractor method extract.

static void extract(InputStream in, Path destination) throws IOException {
    try (final BufferedInputStream bufferedInputStream = new BufferedInputStream(in);
        final GzipCompressorInputStream gzipInputStream = new GzipCompressorInputStream(bufferedInputStream);
        final TarArchiveInputStream tarInputStream = new TarArchiveInputStream(gzipInputStream)) {
        final String destinationAbsolutePath = destination.toFile().getAbsolutePath();
        TarArchiveEntry entry;
        while ((entry = (TarArchiveEntry) tarInputStream.getNextEntry()) != null) {
            if (entry.isDirectory()) {
                File f = Paths.get(destinationAbsolutePath, entry.getName()).toFile();
                f.mkdirs();
            } else {
                Path fileDestinationPath = Paths.get(destinationAbsolutePath, entry.getName());
                Files.copy(tarInputStream, fileDestinationPath, StandardCopyOption.REPLACE_EXISTING);
            }
        }
    }
}
Also used : GzipCompressorInputStream(org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream) TarArchiveInputStream(org.apache.commons.compress.archivers.tar.TarArchiveInputStream) Path(java.nio.file.Path) BufferedInputStream(java.io.BufferedInputStream) File(java.io.File) TarArchiveEntry(org.apache.commons.compress.archivers.tar.TarArchiveEntry)

Example 19 with GzipCompressorInputStream

use of org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream in project alluxio by Alluxio.

the class BackupManager method initFromBackup.

/**
 * Restores master state from the specified backup.
 *
 * @param is an input stream to read from the backup
 */
public void initFromBackup(InputStream is) throws IOException {
    try (GzipCompressorInputStream gzIn = new GzipCompressorInputStream(is);
        JournalEntryStreamReader reader = new JournalEntryStreamReader(gzIn)) {
        List<Master> masters = mRegistry.getServers();
        // Executor for applying backup.
        CompletionService<Boolean> completionService = new ExecutorCompletionService<>(Executors.newFixedThreadPool(2, ThreadFactoryUtils.build("master-backup-%d", true)));
        // List of active tasks.
        Set<Future<?>> activeTasks = new HashSet<>();
        // Entry queue will be used as a buffer and synchronization between readers and appliers.
        LinkedBlockingQueue<JournalEntry> journalEntryQueue = new LinkedBlockingQueue<>(ServerConfiguration.getInt(PropertyKey.MASTER_BACKUP_ENTRY_BUFFER_COUNT));
        // Whether still reading from backup.
        AtomicBoolean readingActive = new AtomicBoolean(true);
        // Index masters by name.
        Map<String, Master> mastersByName = Maps.uniqueIndex(masters, Master::getName);
        // Shows how many entries have been applied.
        AtomicLong appliedEntryCount = new AtomicLong(0);
        // Progress executor
        ScheduledExecutorService traceExecutor = Executors.newScheduledThreadPool(1, ThreadFactoryUtils.build("master-backup-tracer-%d", true));
        traceExecutor.scheduleAtFixedRate(() -> {
            LOG.info("{} entries from backup applied so far...", appliedEntryCount.get());
        }, 30, 30, TimeUnit.SECONDS);
        // Start the timer for backup metrics.
        long startRestoreTime = System.currentTimeMillis();
        // Create backup reader task.
        activeTasks.add(completionService.submit(() -> {
            try {
                JournalEntry entry;
                while ((entry = reader.readEntry()) != null) {
                    journalEntryQueue.put(entry);
                }
                // Put termination entry for signaling the applier.
                journalEntryQueue.put(JournalEntry.newBuilder().setSequenceNumber(TERMINATION_SEQ).build());
                return true;
            } catch (InterruptedException ie) {
                // Continue interrupt chain.
                Thread.currentThread().interrupt();
                throw new RuntimeException("Thread interrupted while reading from backup stream.", ie);
            } finally {
                readingActive.set(false);
            }
        }));
        // Create applier task.
        activeTasks.add(completionService.submit(() -> {
            try {
                // Read entries from backup.
                while (readingActive.get() || journalEntryQueue.size() > 0) {
                    // Drain current elements.
                    // Draining entries makes it possible to allow writes while current ones are
                    // being applied.
                    List<JournalEntry> drainedEntries = new LinkedList<>();
                    if (0 == journalEntryQueue.drainTo(drainedEntries)) {
                        // No elements at the moment. Fall back to polling.
                        JournalEntry entry = journalEntryQueue.poll(10, TimeUnit.MILLISECONDS);
                        if (entry == null) {
                            // No entry yet.
                            continue;
                        }
                        drainedEntries.add(entry);
                    }
                    // Apply drained entries.
                    // Map for storing journal contexts.
                    Map<Master, JournalContext> masterJCMap = new HashMap<>();
                    try {
                        // They should be closed after applying drained entries.
                        for (Master master : masters) {
                            masterJCMap.put(master, master.createJournalContext());
                        }
                        // Apply entries.
                        for (JournalEntry entry : drainedEntries) {
                            // Check for termination entry.
                            if (entry.getSequenceNumber() == TERMINATION_SEQ) {
                                // Reading finished.
                                return true;
                            }
                            String masterName;
                            try {
                                masterName = JournalEntryAssociation.getMasterForEntry(entry);
                            } catch (IllegalStateException ise) {
                                ProcessUtils.fatalError(LOG, ise, "Unrecognized journal entry: %s", entry);
                                throw ise;
                            }
                            try {
                                Master master = mastersByName.get(masterName);
                                master.applyAndJournal(masterJCMap.get(master), entry);
                                appliedEntryCount.incrementAndGet();
                            } catch (Exception e) {
                                JournalUtils.handleJournalReplayFailure(LOG, e, "Failed to apply " + "journal entry to master %s. Entry: %s", masterName, entry);
                            }
                        }
                    } finally {
                        // before next round.
                        for (JournalContext journalContext : masterJCMap.values()) {
                            journalContext.close();
                        }
                    }
                }
                return true;
            } catch (InterruptedException ie) {
                // Continue interrupt chain.
                Thread.currentThread().interrupt();
                throw new RuntimeException("Thread interrupted while applying backup content.", ie);
            }
        }));
        // Wait until backup tasks are completed and stop metrics timer.
        try {
            safeWaitTasks(activeTasks, completionService);
        } finally {
            mRestoreTimeMs = System.currentTimeMillis() - startRestoreTime;
            mRestoreEntriesCount = appliedEntryCount.get();
            traceExecutor.shutdownNow();
        }
        LOG.info("Restored {} entries from backup", appliedEntryCount.get());
    }
}
Also used : GzipCompressorInputStream(org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream) JournalEntryStreamReader(alluxio.master.journal.JournalEntryStreamReader) JournalContext(alluxio.master.journal.JournalContext) ExecutorCompletionService(java.util.concurrent.ExecutorCompletionService) LinkedBlockingQueue(java.util.concurrent.LinkedBlockingQueue) JournalEntry(alluxio.proto.journal.Journal.JournalEntry) LinkedList(java.util.LinkedList) List(java.util.List) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) HashSet(java.util.HashSet) ScheduledExecutorService(java.util.concurrent.ScheduledExecutorService) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) AtomicLong(java.util.concurrent.atomic.AtomicLong) Future(java.util.concurrent.Future) HashMap(java.util.HashMap) Map(java.util.Map)

Example 20 with GzipCompressorInputStream

use of org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream in project alluxio by Alluxio.

the class TarUtils method readTarGz.

/**
 * Reads a gzipped tar archive from a stream and writes it to the given path.
 *
 * @param dirPath the path to write the archive to
 * @param input the input stream
 */
public static void readTarGz(Path dirPath, InputStream input) throws IOException {
    InputStream zipStream = new GzipCompressorInputStream(input);
    TarArchiveInputStream archiveStream = new TarArchiveInputStream(zipStream);
    TarArchiveEntry entry;
    while ((entry = (TarArchiveEntry) archiveStream.getNextEntry()) != null) {
        File outputFile = new File(dirPath.toFile(), entry.getName());
        if (entry.isDirectory()) {
            outputFile.mkdirs();
        } else {
            outputFile.getParentFile().mkdirs();
            try (FileOutputStream fileOut = new FileOutputStream(outputFile)) {
                IOUtils.copy(archiveStream, fileOut);
            }
        }
    }
}
Also used : GzipCompressorInputStream(org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream) TarArchiveInputStream(org.apache.commons.compress.archivers.tar.TarArchiveInputStream) TarArchiveInputStream(org.apache.commons.compress.archivers.tar.TarArchiveInputStream) GzipCompressorInputStream(org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream) InputStream(java.io.InputStream) FileOutputStream(java.io.FileOutputStream) File(java.io.File) TarArchiveEntry(org.apache.commons.compress.archivers.tar.TarArchiveEntry)

Aggregations

GzipCompressorInputStream (org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream)58 TarArchiveInputStream (org.apache.commons.compress.archivers.tar.TarArchiveInputStream)46 TarArchiveEntry (org.apache.commons.compress.archivers.tar.TarArchiveEntry)40 IOException (java.io.IOException)29 FileInputStream (java.io.FileInputStream)26 File (java.io.File)23 BufferedInputStream (java.io.BufferedInputStream)22 FileOutputStream (java.io.FileOutputStream)20 InputStream (java.io.InputStream)16 OutputStream (java.io.OutputStream)10 Path (java.nio.file.Path)9 ArrayList (java.util.ArrayList)8 BufferedOutputStream (java.io.BufferedOutputStream)7 ByteArrayInputStream (java.io.ByteArrayInputStream)7 BZip2CompressorInputStream (org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream)6 BufferedReader (java.io.BufferedReader)4 ByteArrayOutputStream (java.io.ByteArrayOutputStream)4 InputStreamReader (java.io.InputStreamReader)4 URL (java.net.URL)4 ArchiveEntry (org.apache.commons.compress.archivers.ArchiveEntry)4