use of org.apache.commons.compress.archivers.ArchiveEntry in project vespa by vespa-engine.
the class CompressedFileReference method decompress.
private static void decompress(ArchiveInputStream archiveInputStream, File outputFile) throws IOException {
int entries = 0;
ArchiveEntry entry;
while ((entry = archiveInputStream.getNextEntry()) != null) {
log.log(LogLevel.DEBUG, "Unpacking " + entry.getName());
File outFile = new File(outputFile, entry.getName());
if (entry.isDirectory()) {
if (!(outFile.exists() && outFile.isDirectory())) {
log.log(LogLevel.DEBUG, () -> "Creating dir: " + outFile.getAbsolutePath());
if (!outFile.mkdirs()) {
log.log(LogLevel.WARNING, "Could not create dir " + entry.getName());
}
}
} else {
// Create parent dir if necessary
File parent = new File(outFile.getParent());
if (!parent.exists() && !parent.mkdirs()) {
log.log(LogLevel.WARNING, "Could not create dir " + parent.getAbsolutePath());
}
FileOutputStream fos = new FileOutputStream(outFile);
ByteStreams.copy(archiveInputStream, fos);
fos.close();
}
entries++;
}
if (entries == 0) {
throw new IllegalArgumentException("Not able to read any entries from stream (" + archiveInputStream.getBytesRead() + " bytes read from stream)");
}
}
use of org.apache.commons.compress.archivers.ArchiveEntry in project stanbol by apache.
the class MultiThreadedTestBase method initTestData.
/**
* Helper method that initialises the test data based on the parsed parameter
* @param settings the settings of the Test.
* @return the Iterator over the contents in the test data
* @throws IOException on any error while accessing the parsed test data
*/
private Iterator<String> initTestData(TestSettings settings) throws IOException {
log.info("Read Testdata from '{}'", settings.getTestData());
File testFile = new File(settings.getTestData());
InputStream is = null;
if (testFile.isFile()) {
log.info(" ... init from File");
is = new FileInputStream(testFile);
}
if (is == null) {
is = MultiThreadedTest.class.getClassLoader().getResourceAsStream(settings.getTestData());
}
if (is == null) {
is = ClassLoader.getSystemResourceAsStream(settings.getTestData());
}
if (is == null) {
try {
is = new URL(settings.getTestData()).openStream();
log.info(" ... init from URL");
} catch (MalformedURLException e) {
// not a URL
}
} else {
log.info(" ... init via Classpath");
}
Assert.assertNotNull("Unable to load the parsed TestData '" + settings.getTestData() + "'!", is);
log.info(" - InputStream: {}", is == null ? null : is.getClass().getSimpleName());
String name = FilenameUtils.getName(settings.getTestData());
if ("gz".equalsIgnoreCase(FilenameUtils.getExtension(name))) {
is = new GZIPInputStream(is);
name = FilenameUtils.removeExtension(name);
log.debug(" - from GZIP Archive");
} else if ("bz2".equalsIgnoreCase(FilenameUtils.getExtension(name))) {
is = new BZip2CompressorInputStream(is);
name = FilenameUtils.removeExtension(name);
log.debug(" - from BZip2 Archive");
} else if ("zip".equalsIgnoreCase(FilenameUtils.getExtension(name))) {
ZipArchiveInputStream zipin = new ZipArchiveInputStream(is);
ArchiveEntry entry = zipin.getNextEntry();
log.info("For ZIP archives only the 1st Entry will be processed!");
name = FilenameUtils.getName(entry.getName());
log.info(" - processed Entry: {}", entry.getName());
} else {
// else uncompressed data ...
log.info(" - uncompressed source: {}", name);
}
String mediaType;
if (settings.getTestDataMediaType() != null) {
mediaType = settings.getTestDataMediaType();
} else {
// parse based on extension
String ext = FilenameUtils.getExtension(name);
if ("txt".equalsIgnoreCase(ext)) {
mediaType = TEXT_PLAIN;
} else if ("rdf".equalsIgnoreCase(ext)) {
mediaType = SupportedFormat.RDF_XML;
} else if ("xml".equalsIgnoreCase(ext)) {
mediaType = SupportedFormat.RDF_XML;
} else if ("ttl".equalsIgnoreCase(ext)) {
mediaType = SupportedFormat.TURTLE;
} else if ("n3".equalsIgnoreCase(ext)) {
mediaType = SupportedFormat.N3;
} else if ("nt".equalsIgnoreCase(ext)) {
mediaType = SupportedFormat.N_TRIPLE;
} else if ("json".equalsIgnoreCase(ext)) {
mediaType = SupportedFormat.RDF_JSON;
} else if (name.indexOf('.') < 0) {
// no extension
// try plain text
mediaType = TEXT_PLAIN;
} else {
log.info("Unkown File Extension {} for resource name {}", ext, name);
mediaType = null;
}
}
Assert.assertNotNull("Unable to detect MediaType for RDFTerm '" + name + "'. Please use the property '" + PROPERTY_TEST_DATA_TYPE + "' to manually parse the MediaType!", mediaType);
log.info(" - Media-Type: {}", mediaType);
// now init the iterator for the test data
return TEXT_PLAIN.equalsIgnoreCase(mediaType) ? createTextDataIterator(is, mediaType) : createRdfDataIterator(is, mediaType, settings.getContentProperty());
}
use of org.apache.commons.compress.archivers.ArchiveEntry in project baker-android by bakerframework.
the class UnzipperTask method extract.
private void extract(final String inputFile, final String outputDir) throws IOException {
FileInputStream fileInputStream = null;
ZipArchiveInputStream zipArchiveInputStream = null;
FileOutputStream fileOutputStream = null;
try {
Log.d(this.getClass().getName(), "Will extract " + inputFile + " to " + outputDir);
byte[] buffer = new byte[8192];
fileInputStream = new FileInputStream(inputFile);
// We use null as encoding.
zipArchiveInputStream = new ZipArchiveInputStream(fileInputStream, null, true);
ArchiveEntry entry;
while ((entry = zipArchiveInputStream.getNextEntry()) != null) {
Log.d(this.getClass().getName(), "Extracting entry " + entry.getName());
File file = new File(outputDir, entry.getName());
if (entry.isDirectory()) {
file.mkdirs();
} else {
file.getParentFile().mkdirs();
fileOutputStream = new FileOutputStream(file);
int bytesRead;
while ((bytesRead = zipArchiveInputStream.read(buffer, 0, buffer.length)) != -1) fileOutputStream.write(buffer, 0, bytesRead);
fileOutputStream.close();
fileOutputStream = null;
}
}
if (this.isDeleteZipFile()) {
// Delete the zip file
File zipFile = new File(inputFile);
zipFile.delete();
}
} finally {
try {
zipArchiveInputStream.close();
fileInputStream.close();
if (fileOutputStream != null) {
fileOutputStream.close();
}
} catch (NullPointerException ex) {
Log.e(this.getClass().getName(), "Error closing the file streams.", ex);
} catch (IOException ex) {
Log.e(this.getClass().getName(), "Error closing the file streams.", ex);
}
}
}
use of org.apache.commons.compress.archivers.ArchiveEntry in project android by cSploit.
the class UpdateService method verifyArchiveIntegrity.
/**
* check if an archive is valid by reading it.
* @throws RuntimeException if trying to run this with no archive
*/
private void verifyArchiveIntegrity() throws RuntimeException, KeyException {
File f;
long total;
short old_percentage, percentage;
CountingInputStream counter;
ArchiveInputStream is;
byte[] buffer;
String rootDirectory;
Logger.info("verifying archive integrity");
if (mCurrentTask == null || mCurrentTask.path == null)
throw new RuntimeException("no archive to test");
mBuilder.setContentTitle(getString(R.string.checking)).setSmallIcon(android.R.drawable.ic_popup_sync).setContentText("").setContentInfo("").setProgress(100, 0, true);
mNotificationManager.notify(NOTIFICATION_ID, mBuilder.build());
f = new File(mCurrentTask.path);
try {
counter = new CountingInputStream(new FileInputStream(f));
} catch (FileNotFoundException e) {
throw new RuntimeException(String.format("archive '%s' does not exists", mCurrentTask.path));
}
try {
is = openArchiveStream(counter);
ArchiveEntry entry;
buffer = new byte[2048];
total = f.length();
old_percentage = -1;
rootDirectory = null;
// consume the archive
while (mRunning && (entry = is.getNextEntry()) != null) {
if (!mCurrentTask.skipRoot)
continue;
String name = entry.getName();
if (rootDirectory == null) {
if (name.contains("/")) {
rootDirectory = name.substring(0, name.indexOf('/'));
} else if (entry.isDirectory()) {
rootDirectory = name;
} else {
throw new IOException(String.format("archive '%s' contains files under it's root", mCurrentTask.path));
}
} else {
if (!name.startsWith(rootDirectory)) {
throw new IOException("multiple directories found in the archive root");
}
}
}
while (mRunning && is.read(buffer) > 0) {
percentage = (short) (((double) counter.getBytesRead() / total) * 100);
if (percentage != old_percentage) {
mBuilder.setProgress(100, percentage, false).setContentInfo(percentage + "%");
mNotificationManager.notify(NOTIFICATION_ID, mBuilder.build());
old_percentage = percentage;
}
}
} catch (IOException e) {
throw new KeyException("corrupted archive: " + e.getMessage());
} finally {
try {
counter.close();
} catch (IOException ignore) {
}
}
if (!mRunning)
throw new CancellationException("archive integrity check cancelled");
if (mCurrentTask.skipRoot && rootDirectory == null)
throw new KeyException(String.format("archive '%s' is empty", mCurrentTask.path));
}
use of org.apache.commons.compress.archivers.ArchiveEntry in project packr by libgdx.
the class ArchiveUtils method extractGenericArchive.
/**
* Extracts an archive using {@link ArchiveStreamFactory#createArchiveInputStream(InputStream)} with no special handling of symbolic links or file
* permissions.
*
* @param inputStream the archive input stream
* @param extractToDirectory the directory to extract the archive into
* @throws ArchiveException if an archive error occurs
* @throws IOException if an IO error occurs
*/
private static void extractGenericArchive(InputStream inputStream, Path extractToDirectory) throws ArchiveException, IOException {
final ArchiveInputStream archiveInputStream = new ArchiveStreamFactory().createArchiveInputStream(inputStream);
ArchiveEntry entry;
while ((entry = archiveInputStream.getNextEntry()) != null) {
if (!archiveInputStream.canReadEntryData(entry)) {
LOG.error("Failed to read archive entry " + entry);
continue;
}
Path entryExtractPath = extractToDirectory.resolve(getEntryAsPath(entry));
if (entry.isDirectory()) {
Files.createDirectories(entryExtractPath);
} else {
Files.createDirectories(entryExtractPath.getParent());
Files.copy(archiveInputStream, entryExtractPath, StandardCopyOption.REPLACE_EXISTING);
}
Files.setLastModifiedTime(entryExtractPath, FileTime.fromMillis(entry.getLastModifiedDate().getTime()));
}
}
Aggregations