use of org.apache.commons.compress.archivers.zip.ZipFile in project atlas by alibaba.
the class ZipUtils method extractZipFileToFolder.
/**
* 解压zip文件中的某个文件到指定地方
*
* @param zipFile
* @param path
* @param destFolder
* @throws IOException
*/
public static File extractZipFileToFolder(File zipFile, String path, File destFolder) {
ZipFile zip;
File destFile = null;
try {
zip = new ZipFile(zipFile);
ZipArchiveEntry zipArchiveEntry = zip.getEntry(path);
if (null != zipArchiveEntry) {
String name = zipArchiveEntry.getName();
name = FilenameUtils.getName(name);
destFile = new File(destFolder, name);
destFolder.mkdirs();
destFile.createNewFile();
InputStream is = zip.getInputStream(zipArchiveEntry);
FileOutputStream fos = new FileOutputStream(destFile);
int length = 0;
byte[] b = new byte[1024];
while ((length = is.read(b, 0, 1024)) != -1) {
fos.write(b, 0, length);
}
is.close();
fos.close();
}
if (null != zip)
ZipFile.closeQuietly(zip);
} catch (IOException e) {
e.printStackTrace();
}
return destFile;
}
use of org.apache.commons.compress.archivers.zip.ZipFile in project tika by apache.
the class CXFTestBase method readZipArchive.
protected Map<String, String> readZipArchive(InputStream inputStream) throws IOException {
Map<String, String> data = new HashMap<String, String>();
Path tempFile = writeTemporaryArchiveFile(inputStream, "zip");
ZipFile zip = new ZipFile(tempFile.toFile());
Enumeration<ZipArchiveEntry> entries = zip.getEntries();
while (entries.hasMoreElements()) {
ZipArchiveEntry entry = entries.nextElement();
ByteArrayOutputStream bos = new ByteArrayOutputStream();
IOUtils.copy(zip.getInputStream(entry), bos);
data.put(entry.getName(), DigestUtils.md5Hex(bos.toByteArray()));
}
zip.close();
Files.delete(tempFile);
return data;
}
use of org.apache.commons.compress.archivers.zip.ZipFile in project stanbol by apache.
the class ConfigUtils method copyResource.
/**
* Variant of the copyResource method that used an entry of an archive as source.
*
* @param rootDir
* the directory used as target
* @param archive
* the archive containing the parsed entry
* @param entry
* the entry to copy to the target directory
* @param context
* the context used to calculate the relative path of the resource within the target directory
* @param override
* if an existing resource within the target directory should be deleted
* @throws IOException
* in case of an error while reading or writing the resource
*/
private static void copyResource(File rootDir, ZipFile archive, ZipArchiveEntry entry, String context, boolean override) throws IOException {
File file = prepairCopy(entry.getName(), rootDir, context);
if (file != null) {
boolean overrideState = false;
if (file.exists() && override) {
FileUtils.deleteQuietly(file);
overrideState = true;
}
if (!file.exists()) {
OutputStream os = null;
InputStream is = null;
try {
os = FileUtils.openOutputStream(file);
is = archive.getInputStream(entry);
IOUtils.copy(is, os);
log.debug(String.format(" > %s %s", overrideState ? "override" : "copy", file));
} finally {
IOUtils.closeQuietly(is);
IOUtils.closeQuietly(os);
}
}
}
// else can not cppy logging already provided
}
use of org.apache.commons.compress.archivers.zip.ZipFile in project stanbol by apache.
the class ResourceLoader method loadResource.
/**
* Loads a resource from a file
* @param file the file resource
*/
private void loadResource(String file) {
synchronized (files) {
//sync to files to avoid two threads loading the same file
ResourceState state = files.get(file);
if (state == null || state != ResourceState.REGISTERED) {
log.info("Do not load File {} because of its state {} (null means removed from list)", file, state);
//someone removed it in between
return;
} else {
//set to loading
setResourceState(file, ResourceState.LOADING, null);
}
}
long startFile = System.currentTimeMillis();
log.info(" > loading '{}' ...", file);
String extension = FilenameUtils.getExtension(file);
if (loadEntriesWithinZipArchives && ("zip".equalsIgnoreCase(extension) || "jar".equalsIgnoreCase(extension))) {
log.info(" - processing {}-archive entries:", extension);
ZipFile zipArchive;
try {
zipArchive = new ZipFile(file);
} catch (IOException e) {
zipArchive = null;
setResourceState(file, ResourceState.ERROR, e);
}
if (zipArchive != null) {
boolean isError = false;
Enumeration<ZipArchiveEntry> entries = zipArchive.getEntries();
while (entries.hasMoreElements()) {
ZipArchiveEntry entry = entries.nextElement();
if (!entry.isDirectory()) {
String entryName = entry.getName();
log.info(" o loading entry '{}'", entryName);
try {
ResourceState state = resourceImporter.importResource(zipArchive.getInputStream(entry), FilenameUtils.getName(entryName));
if (state == ResourceState.ERROR) {
isError = true;
}
} catch (IOException e) {
isError = true;
}
}
}
//set the state for the Archive as a whole
setResourceState(file, isError ? ResourceState.ERROR : ResourceState.LOADED, null);
}
} else {
InputStream is;
try {
is = new FileInputStream(file);
ResourceState state = resourceImporter.importResource(is, FilenameUtils.getName(file));
setResourceState(file, state, null);
} catch (FileNotFoundException e) {
//during init it is checked that files exists and are files
//and there is read access so this can only happen if
//someone deletes the file in between
log.warn("Unable to load resource " + file, e);
setResourceState(file, ResourceState.ERROR, e);
} catch (IOException e) {
log.error("Unable to load resource " + file, e);
setResourceState(file, ResourceState.ERROR, e);
} catch (Exception e) {
log.error("Unable to load resource " + file, e);
setResourceState(file, ResourceState.ERROR, e);
}
}
log.info(" - completed in {} seconds", (System.currentTimeMillis() - startFile) / 1000);
}
use of org.apache.commons.compress.archivers.zip.ZipFile in project vcell by virtualcell.
the class DataSet method readChomboExtrapolatedValues.
static double[] readChomboExtrapolatedValues(String varName, File pdeFile, File zipFile) throws IOException {
double[] data = null;
if (zipFile != null && isChombo(zipFile)) {
File tempFile = null;
FileFormat solFile = null;
try {
tempFile = createTempHdf5File(zipFile, pdeFile.getName());
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ);
solFile.open();
data = readChomboExtrapolatedValues(varName, solFile);
} catch (Exception e) {
throw new IOException(e.getMessage(), e);
} finally {
try {
if (solFile != null) {
solFile.close();
}
if (tempFile != null) {
if (!tempFile.delete()) {
System.err.println("couldn't delete temp file " + tempFile.getAbsolutePath());
}
}
} catch (Exception e) {
// ignore
}
}
}
return data;
}
Aggregations