use of org.apache.commons.compress.archivers.ArchiveInputStream in project weave by continuuity.
the class KafkaTest method extractKafka.
private static File extractKafka() throws IOException, ArchiveException, CompressorException {
File kafkaExtract = TMP_FOLDER.newFolder();
InputStream kakfaResource = KafkaTest.class.getClassLoader().getResourceAsStream("kafka-0.7.2.tgz");
ArchiveInputStream archiveInput = new ArchiveStreamFactory().createArchiveInputStream(ArchiveStreamFactory.TAR, new CompressorStreamFactory().createCompressorInputStream(CompressorStreamFactory.GZIP, kakfaResource));
try {
ArchiveEntry entry = archiveInput.getNextEntry();
while (entry != null) {
File file = new File(kafkaExtract, entry.getName());
if (entry.isDirectory()) {
file.mkdirs();
} else {
ByteStreams.copy(archiveInput, Files.newOutputStreamSupplier(file));
}
entry = archiveInput.getNextEntry();
}
} finally {
archiveInput.close();
}
return kafkaExtract;
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project BWAPI4J by OpenBW.
the class DummyDataUtils method readMultiLinesAsStringTokensFromArchiveFile.
public static List<List<String>> readMultiLinesAsStringTokensFromArchiveFile(final String archiveFilename, final String mapHash, final String regex) throws IOException {
final InputStream inputStream = createInputStreamForDummyDataSet(archiveFilename);
final String mapShortHash = determineMapShortHash(mapHash);
try (final ArchiveInputStream tarIn = new TarArchiveInputStream(new BZip2CompressorInputStream(inputStream));
final BufferedReader buffer = new BufferedReader(new InputStreamReader(tarIn))) {
final ArchiveEntry nextEntry = getArchiveEntry(tarIn, mapShortHash);
Assert.assertNotNull(nextEntry);
final List<List<String>> data = new ArrayList<>();
String line;
while ((line = buffer.readLine()) != null) {
if (line.isEmpty()) {
continue;
}
final String[] tokens = line.split(regex);
final List<String> strTokens = new ArrayList<>();
for (final String token : tokens) {
final String tokenTrimmed = token.trim();
if (tokenTrimmed.isEmpty()) {
continue;
}
strTokens.add(tokenTrimmed);
}
data.add(strTokens);
}
int valuesReadCount = 0;
for (final List<String> list : data) {
valuesReadCount += list.size();
}
logger.debug("Read " + valuesReadCount + " values from " + archiveFilename);
return data;
}
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project BWAPI4J by OpenBW.
the class DummyDataUtils method readMultiLineIntegerArraysFromArchiveFile.
public static List<List<Integer>> readMultiLineIntegerArraysFromArchiveFile(final String archiveFilename, final String mapHash, final String regex) throws IOException {
final InputStream inputStream = createInputStreamForDummyDataSet(archiveFilename);
final String mapShortHash = determineMapShortHash(mapHash);
try (final ArchiveInputStream tarIn = new TarArchiveInputStream(new BZip2CompressorInputStream(inputStream));
final BufferedReader buffer = new BufferedReader(new InputStreamReader(tarIn))) {
final ArchiveEntry nextEntry = getArchiveEntry(tarIn, mapShortHash);
Assert.assertNotNull(nextEntry);
final List<List<Integer>> data = new ArrayList<>();
String line;
while ((line = buffer.readLine()) != null) {
if (line.isEmpty()) {
continue;
}
final String[] tokens = line.split(regex);
final List<Integer> intTokens = new ArrayList<>();
for (final String token : tokens) {
final String tokenTrimmed = token.trim();
if (tokenTrimmed.isEmpty()) {
continue;
}
int intToken = Integer.parseInt(tokenTrimmed);
intTokens.add(intToken);
}
data.add(intTokens);
}
int valuesReadCount = 0;
for (final List<Integer> list : data) {
valuesReadCount += list.size();
}
logger.debug("Read " + valuesReadCount + " values from " + archiveFilename);
return data;
}
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project BWAPI4J by OpenBW.
the class DummyDataUtils method readIntegerArrayFromArchiveFile.
public static int[] readIntegerArrayFromArchiveFile(final String archiveFilename, final String mapHash, final String regex) throws IOException {
final InputStream inputStream = createInputStreamForDummyDataSet(archiveFilename);
try (final ArchiveInputStream tarIn = new TarArchiveInputStream(new BZip2CompressorInputStream(inputStream));
final BufferedReader buffer = new BufferedReader(new InputStreamReader(tarIn))) {
final String mapShortHash = determineMapShortHash(mapHash);
final ArchiveEntry nextEntry = getArchiveEntry(tarIn, mapShortHash);
Assert.assertNotNull(nextEntry);
final int[] read = buffer.lines().flatMap(line -> (Stream<String>) Stream.of(line.split(regex))).map(String::trim).mapToInt(Integer::parseInt).toArray();
logger.debug("Read " + read.length + " values from " + archiveFilename);
return read;
}
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project phoenicis by PhoenicisOrg.
the class Tar method uncompress.
/**
* Uncompress a tar
*
* @param countingInputStream
* to count the number of byte extracted
* @param outputDir
* The directory where files should be extracted
* @return A list of extracted files
* @throws ArchiveException
* if the process fails
*/
private List<File> uncompress(final InputStream inputStream, CountingInputStream countingInputStream, final File outputDir, long finalSize, Consumer<ProgressEntity> stateCallback) {
final List<File> uncompressedFiles = new LinkedList<>();
try (ArchiveInputStream debInputStream = new ArchiveStreamFactory().createArchiveInputStream("tar", inputStream)) {
TarArchiveEntry entry;
while ((entry = (TarArchiveEntry) debInputStream.getNextEntry()) != null) {
final File outputFile = new File(outputDir, entry.getName());
if (entry.isDirectory()) {
LOGGER.info(String.format("Attempting to write output directory %s.", outputFile.getAbsolutePath()));
if (!outputFile.exists()) {
LOGGER.info(String.format("Attempting to createPrefix output directory %s.", outputFile.getAbsolutePath()));
Files.createDirectories(outputFile.toPath());
}
} else {
LOGGER.info(String.format("Creating output file %s (%s).", outputFile.getAbsolutePath(), entry.getMode()));
if (entry.isSymbolicLink()) {
Files.createSymbolicLink(Paths.get(outputFile.getAbsolutePath()), Paths.get(entry.getLinkName()));
} else {
try (final OutputStream outputFileStream = new FileOutputStream(outputFile)) {
IOUtils.copy(debInputStream, outputFileStream);
Files.setPosixFilePermissions(Paths.get(outputFile.getPath()), fileUtilities.octToPosixFilePermission(entry.getMode()));
}
}
}
uncompressedFiles.add(outputFile);
stateCallback.accept(new ProgressEntity.Builder().withPercent((double) countingInputStream.getCount() / (double) finalSize * (double) 100).withProgressText("Extracting " + outputFile.getName()).build());
}
return uncompressedFiles;
} catch (IOException | org.apache.commons.compress.archivers.ArchiveException e) {
throw new ArchiveException("Unable to extract the file", e);
}
}
Aggregations