use of org.apache.commons.compress.archivers.ArchiveEntry in project BWAPI4J by OpenBW.
the class DummyDataUtils method readMultiLineIntegerArraysFromArchiveFile.
public static List<List<Integer>> readMultiLineIntegerArraysFromArchiveFile(final String archiveFilename, final String mapHash, final String regex) throws IOException {
final InputStream inputStream = createInputStreamForDummyDataSet(archiveFilename);
final String mapShortHash = determineMapShortHash(mapHash);
try (final ArchiveInputStream tarIn = new TarArchiveInputStream(new BZip2CompressorInputStream(inputStream));
final BufferedReader buffer = new BufferedReader(new InputStreamReader(tarIn))) {
final ArchiveEntry nextEntry = getArchiveEntry(tarIn, mapShortHash);
Assert.assertNotNull(nextEntry);
final List<List<Integer>> data = new ArrayList<>();
String line;
while ((line = buffer.readLine()) != null) {
if (line.isEmpty()) {
continue;
}
final String[] tokens = line.split(regex);
final List<Integer> intTokens = new ArrayList<>();
for (final String token : tokens) {
final String tokenTrimmed = token.trim();
if (tokenTrimmed.isEmpty()) {
continue;
}
int intToken = Integer.parseInt(tokenTrimmed);
intTokens.add(intToken);
}
data.add(intTokens);
}
int valuesReadCount = 0;
for (final List<Integer> list : data) {
valuesReadCount += list.size();
}
logger.debug("Read " + valuesReadCount + " values from " + archiveFilename);
return data;
}
}
use of org.apache.commons.compress.archivers.ArchiveEntry in project BWAPI4J by OpenBW.
the class DummyDataUtils method readIntegerArrayFromArchiveFile.
public static int[] readIntegerArrayFromArchiveFile(final String archiveFilename, final String mapHash, final String regex) throws IOException {
final InputStream inputStream = createInputStreamForDummyDataSet(archiveFilename);
try (final ArchiveInputStream tarIn = new TarArchiveInputStream(new BZip2CompressorInputStream(inputStream));
final BufferedReader buffer = new BufferedReader(new InputStreamReader(tarIn))) {
final String mapShortHash = determineMapShortHash(mapHash);
final ArchiveEntry nextEntry = getArchiveEntry(tarIn, mapShortHash);
Assert.assertNotNull(nextEntry);
final int[] read = buffer.lines().flatMap(line -> (Stream<String>) Stream.of(line.split(regex))).map(String::trim).mapToInt(Integer::parseInt).toArray();
logger.debug("Read " + read.length + " values from " + archiveFilename);
return read;
}
}
use of org.apache.commons.compress.archivers.ArchiveEntry in project fabric-sdk-java by hyperledger.
the class Utils method generateTarGz.
/**
* Compress the contents of given directory using Tar and Gzip to an in-memory byte array.
*
* @param sourceDirectory the source directory.
* @param pathPrefix a path to be prepended to every file name in the .tar.gz output, or {@code null} if no prefix is required.
* @param chaincodeMetaInf
* @return the compressed directory contents.
* @throws IOException
*/
public static byte[] generateTarGz(File sourceDirectory, String pathPrefix, File chaincodeMetaInf) throws IOException {
logger.trace(format("generateTarGz: sourceDirectory: %s, pathPrefix: %s, chaincodeMetaInf: %s", sourceDirectory == null ? "null" : sourceDirectory.getAbsolutePath(), pathPrefix, chaincodeMetaInf == null ? "null" : chaincodeMetaInf.getAbsolutePath()));
ByteArrayOutputStream bos = new ByteArrayOutputStream(500000);
String sourcePath = sourceDirectory.getAbsolutePath();
TarArchiveOutputStream archiveOutputStream = new TarArchiveOutputStream(new GzipCompressorOutputStream(bos));
archiveOutputStream.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
try {
Collection<File> childrenFiles = org.apache.commons.io.FileUtils.listFiles(sourceDirectory, null, true);
ArchiveEntry archiveEntry;
FileInputStream fileInputStream;
for (File childFile : childrenFiles) {
String childPath = childFile.getAbsolutePath();
String relativePath = childPath.substring((sourcePath.length() + 1), childPath.length());
if (pathPrefix != null) {
relativePath = Utils.combinePaths(pathPrefix, relativePath);
}
relativePath = FilenameUtils.separatorsToUnix(relativePath);
if (TRACE_ENABED) {
logger.trace(format("generateTarGz: Adding '%s' entry from source '%s' to archive.", relativePath, childFile.getAbsolutePath()));
}
archiveEntry = new TarArchiveEntry(childFile, relativePath);
fileInputStream = new FileInputStream(childFile);
archiveOutputStream.putArchiveEntry(archiveEntry);
try {
IOUtils.copy(fileInputStream, archiveOutputStream);
} finally {
IOUtils.closeQuietly(fileInputStream);
archiveOutputStream.closeArchiveEntry();
}
}
if (null != chaincodeMetaInf) {
childrenFiles = org.apache.commons.io.FileUtils.listFiles(chaincodeMetaInf, null, true);
final URI metabase = chaincodeMetaInf.toURI();
for (File childFile : childrenFiles) {
final String relativePath = Paths.get("META-INF", metabase.relativize(childFile.toURI()).getPath()).toString();
if (TRACE_ENABED) {
logger.trace(format("generateTarGz: Adding '%s' entry from source '%s' to archive.", relativePath, childFile.getAbsolutePath()));
}
archiveEntry = new TarArchiveEntry(childFile, relativePath);
fileInputStream = new FileInputStream(childFile);
archiveOutputStream.putArchiveEntry(archiveEntry);
try {
IOUtils.copy(fileInputStream, archiveOutputStream);
} finally {
IOUtils.closeQuietly(fileInputStream);
archiveOutputStream.closeArchiveEntry();
}
}
}
} finally {
IOUtils.closeQuietly(archiveOutputStream);
}
return bos.toByteArray();
}
use of org.apache.commons.compress.archivers.ArchiveEntry in project fabric-sdk-java by hyperledger.
the class Util method generateTarGzInputStream.
/**
* Generate a targz inputstream from source folder.
*
* @param src Source location
* @param pathPrefix prefix to add to the all files found.
* @return return inputstream.
* @throws IOException
*/
public static InputStream generateTarGzInputStream(File src, String pathPrefix) throws IOException {
File sourceDirectory = src;
ByteArrayOutputStream bos = new ByteArrayOutputStream(500000);
String sourcePath = sourceDirectory.getAbsolutePath();
TarArchiveOutputStream archiveOutputStream = new TarArchiveOutputStream(new GzipCompressorOutputStream(new BufferedOutputStream(bos)));
archiveOutputStream.setLongFileMode(TarArchiveOutputStream.LONGFILE_GNU);
try {
Collection<File> childrenFiles = org.apache.commons.io.FileUtils.listFiles(sourceDirectory, null, true);
ArchiveEntry archiveEntry;
FileInputStream fileInputStream;
for (File childFile : childrenFiles) {
String childPath = childFile.getAbsolutePath();
String relativePath = childPath.substring((sourcePath.length() + 1), childPath.length());
if (pathPrefix != null) {
relativePath = Utils.combinePaths(pathPrefix, relativePath);
}
relativePath = FilenameUtils.separatorsToUnix(relativePath);
archiveEntry = new TarArchiveEntry(childFile, relativePath);
fileInputStream = new FileInputStream(childFile);
archiveOutputStream.putArchiveEntry(archiveEntry);
try {
IOUtils.copy(fileInputStream, archiveOutputStream);
} finally {
IOUtils.closeQuietly(fileInputStream);
archiveOutputStream.closeArchiveEntry();
}
}
} finally {
IOUtils.closeQuietly(archiveOutputStream);
}
return new ByteArrayInputStream(bos.toByteArray());
}
use of org.apache.commons.compress.archivers.ArchiveEntry in project vespa by vespa-engine.
the class CompressedApplicationInputStream method decompressInto.
private void decompressInto(File application) throws IOException {
log.log(LogLevel.DEBUG, "Application is in " + application.getAbsolutePath());
int entries = 0;
ArchiveEntry entry;
while ((entry = ais.getNextEntry()) != null) {
log.log(LogLevel.DEBUG, "Unpacking " + entry.getName());
File outFile = new File(application, entry.getName());
// FIXME/TODO: write more tests that break this logic. I have a feeling it is not very robust.
if (entry.isDirectory()) {
if (!(outFile.exists() && outFile.isDirectory())) {
log.log(LogLevel.DEBUG, "Creating dir: " + outFile.getAbsolutePath());
boolean res = outFile.mkdirs();
if (!res) {
log.log(LogLevel.WARNING, "Could not create dir " + entry.getName());
}
}
} else {
log.log(LogLevel.DEBUG, "Creating output file: " + outFile.getAbsolutePath());
// Create parent dir if necessary
String parent = outFile.getParent();
new File(parent).mkdirs();
FileOutputStream fos = new FileOutputStream(outFile);
ByteStreams.copy(ais, fos);
fos.close();
}
entries++;
}
if (entries == 0) {
log.log(LogLevel.WARNING, "Not able to read any entries from " + application.getName());
}
}
Aggregations