use of org.apache.commons.compress.archivers.zip.ZipFile in project stanbol by apache.
the class ConfigUtils method copyDefaultConfig.
/**
* Initialises the default configuration for the SolrYard based on data in the parsed bundle. The
* configuration will be copied to the parsed root directory.
*
* @param clazzInArchive
* This class is used to identify the archive containing the default configuration. Parsing
* <code>null</code> causes this class to be used and therefore initialises the default
* configuration contained by the SolrYard bundle.
* @param rootDir
* the target directory for the configuration.
* @param override
* if true existing configurations are overridden.
* @return the root directory of the solr configuration (same as parsed as rootDir)
* @throws IOException
* On any IO error while coping the configuration
* @throws IllegalStateException
* If the parsed rootDir does exist but is not a directory.
* @throws IllegalArgumentException
* iIf <code>null</code> is parsed as rootDir or if the parsed bundle does not contain the
* required information to set up an configuration
*/
public static File copyDefaultConfig(Class<?> clazzInArchive, File rootDir, boolean override) throws IOException, IllegalStateException, IllegalArgumentException {
if (rootDir == null) {
throw new IllegalArgumentException("The parsed root directory MUST NOT be NULL!");
}
if (rootDir.exists() && !rootDir.isDirectory()) {
throw new IllegalStateException("The parsed root directory " + rootDir.getAbsolutePath() + " extists but is not a directory!");
}
File sourceRoot = getSource(clazzInArchive != null ? clazzInArchive : ConfigUtils.class);
log.info("Init Solr Managed Directory form {} to {} (override={})", new Object[] { sourceRoot, rootDir, override });
if (sourceRoot.isFile()) {
ZipFile archive = new ZipFile(sourceRoot);
log.info(" - read from jar-file");
try {
for (@SuppressWarnings("unchecked") Enumeration<ZipArchiveEntry> entries = archive.getEntries(); entries.hasMoreElements(); ) {
ZipArchiveEntry entry = entries.nextElement();
if (!entry.isDirectory() && entry.getName().startsWith(CONFIG_DIR)) {
copyResource(rootDir, archive, entry, CONFIG_DIR, override);
}
}
} finally {
// regardless what happens we need to close the archive!
ZipFile.closeQuietly(archive);
}
} else {
// load from file
log.info(" - read from directory");
File source = new File(sourceRoot, CONFIG_DIR);
if (source.exists() && source.isDirectory()) {
FileUtils.copyDirectory(source, rootDir);
} else {
throw new FileNotFoundException("The SolrIndex default config was not found in directory " + source.getAbsolutePath());
}
}
return rootDir;
}
use of org.apache.commons.compress.archivers.zip.ZipFile in project vcell by virtualcell.
the class DataSet method read.
/**
* This method was created by a SmartGuide.
*/
public void read(File file, File zipFile) throws IOException, OutOfMemoryError {
ZipFile zipZipFile = null;
DataInputStream dataInputStream = null;
try {
this.fileName = file.getPath();
InputStream is = null;
long length = 0;
if (zipFile != null) {
System.out.println("DataSet.read() open " + zipFile + " for " + file.getName());
zipZipFile = openZipFile(zipFile);
java.util.zip.ZipEntry dataEntry = zipZipFile.getEntry(file.getName());
is = zipZipFile.getInputStream((ZipArchiveEntry) dataEntry);
length = dataEntry.getSize();
} else {
if (!file.exists()) {
File compressedFile = new File(fileName + ".Z");
if (compressedFile.exists()) {
Runtime.getRuntime().exec("uncompress " + fileName + ".Z");
file = new File(fileName);
if (!file.exists()) {
throw new IOException("file " + fileName + ".Z could not be uncompressed");
}
} else {
throw new FileNotFoundException("file " + fileName + " does not exist");
}
}
System.out.println("DataSet.read() open '" + fileName + "'");
is = new FileInputStream(file);
length = file.length();
}
if (is != null && zipFile != null && isChombo(zipFile)) {
try {
readHdf5SolutionMetaData(is);
} catch (Exception e) {
e.printStackTrace();
throw new IOException(e.getMessage(), e);
}
} else {
BufferedInputStream bis = new BufferedInputStream(is);
dataInputStream = new DataInputStream(bis);
fileHeader.read(dataInputStream);
for (int i = 0; i < fileHeader.numBlocks; i++) {
DataBlock dataBlock = new DataBlock();
dataBlock.readBlockHeader(dataInputStream);
dataBlockList.addElement(dataBlock);
}
}
} finally {
if (dataInputStream != null) {
try {
dataInputStream.close();
} catch (Exception e) {
e.printStackTrace();
}
}
if (zipZipFile != null) {
try {
zipZipFile.close();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
use of org.apache.commons.compress.archivers.zip.ZipFile in project vcell by virtualcell.
the class DataSet method getData.
public double[] getData(String varName, File zipFile) throws IOException {
double[] data = null;
if (zipFile != null && isChombo(zipFile)) {
throw new RuntimeException("chombo files not read with " + getClass().getName() + ", should use Chombo utilities");
} else {
for (int i = 0; i < dataBlockList.size(); i++) {
DataBlock dataBlock = (DataBlock) dataBlockList.elementAt(i);
if (varName.trim().equals(dataBlock.getVarName().trim())) {
File pdeFile = new File(fileName);
InputStream is = null;
long length = 0;
ZipFile zipZipFile = null;
if (zipFile == null && !pdeFile.exists()) {
throw new FileNotFoundException("file " + fileName + " does not exist");
}
if (zipFile != null) {
zipZipFile = openZipFile(zipFile);
java.util.zip.ZipEntry dataEntry = zipZipFile.getEntry(pdeFile.getName());
length = dataEntry.getSize();
is = zipZipFile.getInputStream((ZipArchiveEntry) dataEntry);
} else {
length = pdeFile.length();
is = new FileInputStream(pdeFile);
}
// read data from zip file
DataInputStream dis = null;
try {
BufferedInputStream bis = new BufferedInputStream(is);
dis = new DataInputStream(bis);
dis.skip(dataBlock.getDataOffset());
int size = dataBlock.getSize();
data = new double[size];
for (int j = 0; j < size; j++) {
data[j] = dis.readDouble();
}
} finally {
try {
if (dis != null) {
dis.close();
}
if (zipZipFile != null) {
zipZipFile.close();
}
} catch (Exception ex) {
// ignore
}
}
break;
}
}
}
if (data == null) {
throw new IOException("DataSet.getData(), data not found for variable '" + varName + "'");
}
return data;
}
use of org.apache.commons.compress.archivers.zip.ZipFile in project vcell by virtualcell.
the class DataSet method getData.
double[] getData(String varName, File zipFile, Double time, SolverDataType solverDataType) throws IOException {
double[] data = null;
if (solverDataType == SolverDataType.MBSData) {
try {
data = readMBSData(varName, time);
} catch (Exception e) {
throw new IOException(e.getMessage(), e);
}
} else {
if (zipFile != null && isChombo(zipFile)) {
try {
data = readHdf5VariableSolution(zipFile, new File(fileName).getName(), varName);
} catch (Exception e) {
throw new IOException(e.getMessage(), e);
}
} else {
for (int i = 0; i < dataBlockList.size(); i++) {
DataBlock dataBlock = (DataBlock) dataBlockList.elementAt(i);
if (varName.trim().equals(dataBlock.getVarName().trim())) {
File pdeFile = new File(fileName);
InputStream is = null;
long length = 0;
org.apache.commons.compress.archivers.zip.ZipFile zipZipFile = null;
if (zipFile == null && !pdeFile.exists()) {
throw new FileNotFoundException("file " + fileName + " does not exist");
}
if (zipFile != null) {
zipZipFile = openZipFile(zipFile);
java.util.zip.ZipEntry dataEntry = zipZipFile.getEntry(pdeFile.getName());
length = dataEntry.getSize();
is = zipZipFile.getInputStream((ZipArchiveEntry) dataEntry);
} else {
length = pdeFile.length();
is = new FileInputStream(pdeFile);
}
// read data from zip file
DataInputStream dis = null;
try {
BufferedInputStream bis = new BufferedInputStream(is);
dis = new DataInputStream(bis);
dis.skip(dataBlock.getDataOffset());
int size = dataBlock.getSize();
data = new double[size];
for (int j = 0; j < size; j++) {
data[j] = dis.readDouble();
}
} finally {
try {
if (dis != null) {
dis.close();
}
if (zipZipFile != null) {
zipZipFile.close();
}
} catch (Exception ex) {
// ignore
}
}
break;
}
}
}
}
if (data == null) {
throw new IOException("DataSet.getData(), data not found for variable '" + varName + "'");
}
return data;
}
use of org.apache.commons.compress.archivers.zip.ZipFile in project vcell by virtualcell.
the class DataSet method readHdf5VariableSolution.
static double[] readHdf5VariableSolution(File zipfile, String fileName, String varName) throws Exception {
File tempFile = null;
FileFormat solFile = null;
try {
tempFile = createTempHdf5File(zipfile, fileName);
FileFormat fileFormat = FileFormat.getFileFormat(FileFormat.FILE_TYPE_HDF5);
solFile = fileFormat.createInstance(tempFile.getAbsolutePath(), FileFormat.READ);
solFile.open();
if (varName != null) {
String varPath = Hdf5Utils.getVarSolutionPath(varName);
HObject solObj = FileFormat.findObject(solFile, varPath);
if (solObj instanceof Dataset) {
Dataset dataset = (Dataset) solObj;
return (double[]) dataset.read();
}
}
} finally {
try {
if (solFile != null) {
solFile.close();
}
if (tempFile != null) {
if (!tempFile.delete()) {
System.err.println("couldn't delete temp file " + tempFile.getAbsolutePath());
}
}
} catch (Exception e) {
// ignore
}
}
return null;
}
Aggregations