use of org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream in project tycho by eclipse.
the class TarGzArchiverTest method getTarEntries.
private Map<String, TarArchiveEntry> getTarEntries() throws IOException, FileNotFoundException {
TarArchiveInputStream tarStream = new TarArchiveInputStream(new GzipCompressorInputStream(new FileInputStream(tarGzArchive)));
Map<String, TarArchiveEntry> entries = new HashMap<>();
try {
TarArchiveEntry tarEntry = null;
while ((tarEntry = tarStream.getNextTarEntry()) != null) {
entries.put(tarEntry.getName(), tarEntry);
}
} finally {
tarStream.close();
}
return entries;
}
use of org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream in project jbosstools-openshift by jbosstools.
the class FileHelper method extractTarGz.
public static void extractTarGz(File archive, File outputDirectory) {
InputStream inputStream = null;
try {
logger.info("Opening stream to gzip archive");
inputStream = new GzipCompressorInputStream(new FileInputStream(archive));
} catch (IOException ex) {
throw new OpenShiftToolsException("Exception occured while processing tar.gz file.\n" + ex.getMessage());
}
logger.info("Opening stream to tar archive");
BufferedOutputStream outputStream = null;
TarArchiveInputStream tarArchiveInputStream = new TarArchiveInputStream(inputStream);
TarArchiveEntry currentEntry = null;
try {
while ((currentEntry = tarArchiveInputStream.getNextTarEntry()) != null) {
if (currentEntry.isDirectory()) {
logger.info("Creating directory: " + currentEntry.getName());
createDirectory(new File(outputDirectory, currentEntry.getName()));
} else {
File outputFile = new File(outputDirectory, currentEntry.getName());
if (!outputFile.getParentFile().exists()) {
logger.info("Creating directory: " + outputFile.getParentFile());
createDirectory(outputFile.getParentFile());
}
outputStream = new BufferedOutputStream(new FileOutputStream(outputFile));
logger.info("Extracting file: " + currentEntry.getName());
copy(tarArchiveInputStream, outputStream, (int) currentEntry.getSize());
outputStream.close();
outputFile.setExecutable(true);
outputFile.setReadable(true);
outputFile.setWritable(true);
}
}
} catch (IOException e) {
throw new OpenShiftToolsException("Exception occured while processing tar.gz file.\n" + e.getMessage());
} finally {
try {
tarArchiveInputStream.close();
} catch (Exception ex) {
}
try {
outputStream.close();
} catch (Exception ex) {
}
}
}
use of org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream in project incubator-heron by apache.
the class Extractor method extract.
static void extract(InputStream in, Path destination) throws IOException {
try (final BufferedInputStream bufferedInputStream = new BufferedInputStream(in);
final GzipCompressorInputStream gzipInputStream = new GzipCompressorInputStream(bufferedInputStream);
final TarArchiveInputStream tarInputStream = new TarArchiveInputStream(gzipInputStream)) {
final String destinationAbsolutePath = destination.toFile().getAbsolutePath();
TarArchiveEntry entry;
while ((entry = (TarArchiveEntry) tarInputStream.getNextEntry()) != null) {
if (entry.isDirectory()) {
File f = Paths.get(destinationAbsolutePath, entry.getName()).toFile();
f.mkdirs();
} else {
Path fileDestinationPath = Paths.get(destinationAbsolutePath, entry.getName());
Files.copy(tarInputStream, fileDestinationPath, StandardCopyOption.REPLACE_EXISTING);
}
}
}
}
use of org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream in project alluxio by Alluxio.
the class BackupManager method initFromBackup.
/**
* Restores master state from the specified backup.
*
* @param is an input stream to read from the backup
*/
public void initFromBackup(InputStream is) throws IOException {
try (GzipCompressorInputStream gzIn = new GzipCompressorInputStream(is);
JournalEntryStreamReader reader = new JournalEntryStreamReader(gzIn)) {
List<Master> masters = mRegistry.getServers();
// Executor for applying backup.
CompletionService<Boolean> completionService = new ExecutorCompletionService<>(Executors.newFixedThreadPool(2, ThreadFactoryUtils.build("master-backup-%d", true)));
// List of active tasks.
Set<Future<?>> activeTasks = new HashSet<>();
// Entry queue will be used as a buffer and synchronization between readers and appliers.
LinkedBlockingQueue<JournalEntry> journalEntryQueue = new LinkedBlockingQueue<>(ServerConfiguration.getInt(PropertyKey.MASTER_BACKUP_ENTRY_BUFFER_COUNT));
// Whether still reading from backup.
AtomicBoolean readingActive = new AtomicBoolean(true);
// Index masters by name.
Map<String, Master> mastersByName = Maps.uniqueIndex(masters, Master::getName);
// Shows how many entries have been applied.
AtomicLong appliedEntryCount = new AtomicLong(0);
// Progress executor
ScheduledExecutorService traceExecutor = Executors.newScheduledThreadPool(1, ThreadFactoryUtils.build("master-backup-tracer-%d", true));
traceExecutor.scheduleAtFixedRate(() -> {
LOG.info("{} entries from backup applied so far...", appliedEntryCount.get());
}, 30, 30, TimeUnit.SECONDS);
// Start the timer for backup metrics.
long startRestoreTime = System.currentTimeMillis();
// Create backup reader task.
activeTasks.add(completionService.submit(() -> {
try {
JournalEntry entry;
while ((entry = reader.readEntry()) != null) {
journalEntryQueue.put(entry);
}
// Put termination entry for signaling the applier.
journalEntryQueue.put(JournalEntry.newBuilder().setSequenceNumber(TERMINATION_SEQ).build());
return true;
} catch (InterruptedException ie) {
// Continue interrupt chain.
Thread.currentThread().interrupt();
throw new RuntimeException("Thread interrupted while reading from backup stream.", ie);
} finally {
readingActive.set(false);
}
}));
// Create applier task.
activeTasks.add(completionService.submit(() -> {
try {
// Read entries from backup.
while (readingActive.get() || journalEntryQueue.size() > 0) {
// Drain current elements.
// Draining entries makes it possible to allow writes while current ones are
// being applied.
List<JournalEntry> drainedEntries = new LinkedList<>();
if (0 == journalEntryQueue.drainTo(drainedEntries)) {
// No elements at the moment. Fall back to polling.
JournalEntry entry = journalEntryQueue.poll(10, TimeUnit.MILLISECONDS);
if (entry == null) {
// No entry yet.
continue;
}
drainedEntries.add(entry);
}
// Apply drained entries.
// Map for storing journal contexts.
Map<Master, JournalContext> masterJCMap = new HashMap<>();
try {
// They should be closed after applying drained entries.
for (Master master : masters) {
masterJCMap.put(master, master.createJournalContext());
}
// Apply entries.
for (JournalEntry entry : drainedEntries) {
// Check for termination entry.
if (entry.getSequenceNumber() == TERMINATION_SEQ) {
// Reading finished.
return true;
}
String masterName;
try {
masterName = JournalEntryAssociation.getMasterForEntry(entry);
} catch (IllegalStateException ise) {
ProcessUtils.fatalError(LOG, ise, "Unrecognized journal entry: %s", entry);
throw ise;
}
try {
Master master = mastersByName.get(masterName);
master.applyAndJournal(masterJCMap.get(master), entry);
appliedEntryCount.incrementAndGet();
} catch (Exception e) {
JournalUtils.handleJournalReplayFailure(LOG, e, "Failed to apply " + "journal entry to master %s. Entry: %s", masterName, entry);
}
}
} finally {
// before next round.
for (JournalContext journalContext : masterJCMap.values()) {
journalContext.close();
}
}
}
return true;
} catch (InterruptedException ie) {
// Continue interrupt chain.
Thread.currentThread().interrupt();
throw new RuntimeException("Thread interrupted while applying backup content.", ie);
}
}));
// Wait until backup tasks are completed and stop metrics timer.
try {
safeWaitTasks(activeTasks, completionService);
} finally {
mRestoreTimeMs = System.currentTimeMillis() - startRestoreTime;
mRestoreEntriesCount = appliedEntryCount.get();
traceExecutor.shutdownNow();
}
LOG.info("Restored {} entries from backup", appliedEntryCount.get());
}
}
use of org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream in project alluxio by Alluxio.
the class TarUtils method readTarGz.
/**
* Reads a gzipped tar archive from a stream and writes it to the given path.
*
* @param dirPath the path to write the archive to
* @param input the input stream
*/
public static void readTarGz(Path dirPath, InputStream input) throws IOException {
InputStream zipStream = new GzipCompressorInputStream(input);
TarArchiveInputStream archiveStream = new TarArchiveInputStream(zipStream);
TarArchiveEntry entry;
while ((entry = (TarArchiveEntry) archiveStream.getNextEntry()) != null) {
File outputFile = new File(dirPath.toFile(), entry.getName());
if (entry.isDirectory()) {
outputFile.mkdirs();
} else {
outputFile.getParentFile().mkdirs();
try (FileOutputStream fileOut = new FileOutputStream(outputFile)) {
IOUtils.copy(archiveStream, fileOut);
}
}
}
}
Aggregations