use of org.apache.commons.compress.archivers.ArchiveInputStream in project weave by continuuity.
the class KafkaTest method extractKafka.
private static File extractKafka() throws IOException, ArchiveException, CompressorException {
File kafkaExtract = TMP_FOLDER.newFolder();
InputStream kakfaResource = KafkaTest.class.getClassLoader().getResourceAsStream("kafka-0.7.2.tgz");
ArchiveInputStream archiveInput = new ArchiveStreamFactory().createArchiveInputStream(ArchiveStreamFactory.TAR, new CompressorStreamFactory().createCompressorInputStream(CompressorStreamFactory.GZIP, kakfaResource));
try {
ArchiveEntry entry = archiveInput.getNextEntry();
while (entry != null) {
File file = new File(kafkaExtract, entry.getName());
if (entry.isDirectory()) {
file.mkdirs();
} else {
ByteStreams.copy(archiveInput, Files.newOutputStreamSupplier(file));
}
entry = archiveInput.getNextEntry();
}
} finally {
archiveInput.close();
}
return kafkaExtract;
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project neo4j by neo4j.
the class Loader method load.
public void load(Path archive, DatabaseLayout databaseLayout) throws IOException, IncorrectFormat {
Path databaseDestination = databaseLayout.databaseDirectory();
Path transactionLogsDirectory = databaseLayout.getTransactionLogsDirectory();
validatePath(databaseDestination, false);
validatePath(transactionLogsDirectory, true);
createDestination(databaseDestination);
createDestination(transactionLogsDirectory);
checkDatabasePresence(databaseLayout);
try (ArchiveInputStream stream = openArchiveIn(archive);
Resource ignore = progressPrinter.startPrinting()) {
ArchiveEntry entry;
while ((entry = nextEntry(stream, archive)) != null) {
Path destination = determineEntryDestination(entry, databaseDestination, transactionLogsDirectory);
loadEntry(destination, stream, entry);
}
}
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project android by cSploit.
the class UpdateService method extract.
/**
* extract an archive into a directory
*
* @throws IOException if some I/O error occurs
* @throws java.util.concurrent.CancellationException if task is cancelled by user
* @throws java.lang.InterruptedException when the the running thread get cancelled.
*/
private void extract() throws RuntimeException, IOException, InterruptedException, ChildManager.ChildNotStartedException {
ArchiveInputStream is = null;
ArchiveEntry entry;
CountingInputStream counter;
OutputStream outputStream = null;
File f, inFile;
File[] list;
String name;
String envPath;
final StringBuffer sb = new StringBuffer();
int mode;
int count;
long total;
boolean isTar, r, w, x, isElf, isScript;
short percentage, old_percentage;
Child which;
if (mCurrentTask.path == null || mCurrentTask.outputDir == null)
return;
mBuilder.setContentTitle(getString(R.string.extracting)).setContentText("").setContentInfo("").setSmallIcon(android.R.drawable.ic_popup_sync).setProgress(100, 0, false);
mNotificationManager.notify(NOTIFICATION_ID, mBuilder.build());
Logger.info(String.format("extracting '%s' to '%s'", mCurrentTask.path, mCurrentTask.outputDir));
envPath = null;
which = null;
try {
if (mCurrentTask.fixShebang) {
which = System.getTools().raw.async("which env", new Raw.RawReceiver() {
@Override
public void onNewLine(String line) {
sb.delete(0, sb.length());
sb.append(line);
}
});
}
inFile = new File(mCurrentTask.path);
total = inFile.length();
counter = new CountingInputStream(new FileInputStream(inFile));
is = openArchiveStream(counter);
isTar = mCurrentTask.archiver.equals(archiveAlgorithm.tar);
old_percentage = -1;
f = new File(mCurrentTask.outputDir);
if (f.exists() && f.isDirectory() && (list = f.listFiles()) != null && list.length > 2)
wipe();
if (mCurrentTask.fixShebang) {
if (execShell(which, "cancelled while retrieving env path") != 0) {
throw new RuntimeException("cannot find 'env' executable");
}
envPath = sb.toString();
}
while (mRunning && (entry = is.getNextEntry()) != null) {
name = entry.getName().replaceFirst("^\\./?", "");
if (mCurrentTask.skipRoot) {
if (name.contains("/"))
name = name.substring(name.indexOf('/') + 1);
else if (entry.isDirectory())
continue;
}
f = new File(mCurrentTask.outputDir, name);
isElf = isScript = false;
if (entry.isDirectory()) {
if (!f.exists()) {
if (!f.mkdirs()) {
throw new IOException(String.format("Couldn't create directory '%s'.", f.getAbsolutePath()));
}
}
} else {
byte[] buffer = null;
byte[] writeMe = null;
outputStream = new FileOutputStream(f);
// check il file is an ELF or a script
if ((!isTar || mCurrentTask.fixShebang) && entry.getSize() > 4) {
writeMe = buffer = new byte[4];
IOUtils.readFully(is, buffer);
if (buffer[0] == 0x7F && buffer[1] == 0x45 && buffer[2] == 0x4C && buffer[3] == 0x46) {
isElf = true;
} else if (buffer[0] == '#' && buffer[1] == '!') {
isScript = true;
ByteArrayOutputStream firstLine = new ByteArrayOutputStream();
int newline = -1;
// assume that '\n' is more far then 4 chars.
firstLine.write(buffer);
buffer = new byte[1024];
count = 0;
while (mRunning && (count = is.read(buffer)) >= 0 && (newline = Arrays.binarySearch(buffer, 0, count, (byte) 0x0A)) < 0) {
firstLine.write(buffer, 0, count);
}
if (!mRunning) {
throw new CancellationException("cancelled while searching for newline.");
} else if (count < 0) {
newline = count = 0;
} else if (newline < 0) {
newline = count;
}
firstLine.write(buffer, 0, newline);
firstLine.close();
byte[] newFirstLine = new String(firstLine.toByteArray()).replace("/usr/bin/env", envPath).getBytes();
writeMe = new byte[newFirstLine.length + (count - newline)];
java.lang.System.arraycopy(newFirstLine, 0, writeMe, 0, newFirstLine.length);
java.lang.System.arraycopy(buffer, newline, writeMe, newFirstLine.length, count - newline);
}
}
if (writeMe != null) {
outputStream.write(writeMe);
}
IOUtils.copy(is, outputStream);
outputStream.close();
outputStream = null;
percentage = (short) (((double) counter.getBytesRead() / total) * 100);
if (percentage != old_percentage) {
mBuilder.setProgress(100, percentage, false).setContentInfo(percentage + "%");
mNotificationManager.notify(NOTIFICATION_ID, mBuilder.build());
old_percentage = percentage;
}
}
// Zip does not store file permissions.
if (isTar) {
mode = ((TarArchiveEntry) entry).getMode();
r = (mode & 0400) > 0;
w = (mode & 0200) > 0;
x = (mode & 0100) > 0;
} else if (isElf || isScript) {
r = w = x = true;
} else {
continue;
}
if (!f.setExecutable(x, true)) {
Logger.warning(String.format("cannot set executable permission of '%s'", name));
}
if (!f.setWritable(w, true)) {
Logger.warning(String.format("cannot set writable permission of '%s'", name));
}
if (!f.setReadable(r, true)) {
Logger.warning(String.format("cannot set readable permission of '%s'", name));
}
}
if (!mRunning)
throw new CancellationException("extraction cancelled.");
Logger.info("extraction completed");
f = new File(mCurrentTask.outputDir, ".nomedia");
if (f.createNewFile())
Logger.info(".nomedia created");
mBuilder.setContentInfo("").setProgress(100, 100, true);
mNotificationManager.notify(NOTIFICATION_ID, mBuilder.build());
} finally {
if (is != null)
is.close();
if (outputStream != null)
outputStream.close();
}
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project stanbol by apache.
the class ManagementUtils method getArchiveInputStream.
/**
* Tries to create an {@link ArchiveInputStream} based on the parsed {@link InputStream}.
* First the provided resource name is used to detect the type of the archive.
* if that does not work, or the parsed resource name is <code>null</code> the
* stream is created by using the auto-detection of the archive type.
* @param resourceName the name of the resource or <code>null</code>
* @param is the {@link InputStream}
* @return the {@link ArchiveInputStream}
* @throws ArchiveException if the {@link InputStream} does not represented any
* supported Archive type
*/
public static ArchiveInputStream getArchiveInputStream(String resourceName, InputStream is) throws ArchiveException {
if (is == null) {
return null;
}
String extension = resourceName == null ? null : FilenameUtils.getExtension(resourceName);
if (!is.markSupported()) {
is = new BufferedInputStream(is);
}
InputStream as;
if (!"zip".equalsIgnoreCase(extension)) {
// we need to first check if this is a compressed stream
try {
as = compressorStreamFactory.createCompressorInputStream(extension, is);
// assume tar archives
extension = "tar";
} catch (CompressorException e) {
try {
as = compressorStreamFactory.createCompressorInputStream(is);
// assume tar archives
extension = "tar";
} catch (CompressorException e1) {
// not a compression stream?
as = is;
}
}
} else {
// zip ... this is already an archive stream
as = is;
}
if (extension != null) {
try {
return archiveStreamFactory.createArchiveInputStream(extension, as);
} catch (ArchiveException e) {
// ignore
}
}
// try to detect
return archiveStreamFactory.createArchiveInputStream(is);
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project stanbol by apache.
the class ConfigUtils method getArchiveInputStream.
public static ArchiveInputStream getArchiveInputStream(String solrArchiveName, InputStream is) throws IOException {
String archiveFormat;
String solrArchiveExtension = FilenameUtils.getExtension(solrArchiveName);
if (solrArchiveExtension == null || solrArchiveExtension.isEmpty()) {
// assume that the archiveExtension was parsed
archiveFormat = solrArchiveName;
} else {
archiveFormat = SUPPORTED_SOLR_ARCHIVE_FORMAT.get(solrArchiveExtension);
}
ArchiveInputStream ais;
if ("zip".equals(archiveFormat)) {
ais = new ZipArchiveInputStream(is);
} else {
if ("gz".equals(archiveFormat)) {
is = new GZIPInputStream(is);
} else if ("bz2".equals(archiveFormat)) {
is = new BZip2CompressorInputStream(is);
} else {
throw new IllegalStateException("Unsupported compression format " + archiveFormat + "!. " + "Please report this to stanbol-dev mailing list!");
}
ais = new TarArchiveInputStream(is);
}
return ais;
}
Aggregations