use of org.apache.commons.compress.archivers.ArchiveInputStream in project ongdb by graphfoundation.
the class Loader method openArchiveIn.
private static ArchiveInputStream openArchiveIn(Path archive) throws IOException, IncorrectFormat {
InputStream input = Files.newInputStream(archive);
GzipCompressorInputStream compressor;
try {
compressor = new GzipCompressorInputStream(input);
} catch (IOException e) {
input.close();
throw new IncorrectFormat(archive, e);
}
return new TarArchiveInputStream(compressor);
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project ongdb by graphfoundation.
the class Loader method load.
public void load(Path archive, Path databaseDestination, Path transactionLogsDirectory) throws IOException, IncorrectFormat {
validatePath(databaseDestination);
validatePath(transactionLogsDirectory);
createDestination(databaseDestination);
createDestination(transactionLogsDirectory);
try (ArchiveInputStream stream = openArchiveIn(archive)) {
ArchiveEntry entry;
while ((entry = nextEntry(stream, archive)) != null) {
Path destination = determineEntryDestination(entry, databaseDestination, transactionLogsDirectory);
loadEntry(destination, stream, entry);
}
}
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project layrry by moditect.
the class FilesHelper method unpack.
public static void unpack(Path src, Path dest) {
File destinationDir = dest.toFile();
try (InputStream fi = Files.newInputStream(src);
InputStream bi = new BufferedInputStream(fi);
ArchiveInputStream in = new ArchiveStreamFactory().createArchiveInputStream(bi)) {
String filename = src.getFileName().toString();
// subtract .zip, .tar
filename = filename.substring(0, filename.length() - 4);
unpack(filename + "/", destinationDir, in);
} catch (ArchiveException | IOException e) {
throw new RuntimeException(e.getMessage(), e);
}
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project hutool by looly.
the class StreamExtractor method extractInternal.
/**
* 释放(解压)到指定目录
*
* @param targetDir 目标目录
* @param filter 解压文件过滤器,用于指定需要释放的文件,null表示不过滤。当{@link Filter#accept(Object)}为true时释放。
* @throws IOException IO异常
*/
private void extractInternal(File targetDir, Filter<ArchiveEntry> filter) throws IOException {
Assert.isTrue(null != targetDir && ((false == targetDir.exists()) || targetDir.isDirectory()), "target must be dir.");
final ArchiveInputStream in = this.in;
ArchiveEntry entry;
File outItemFile;
while (null != (entry = in.getNextEntry())) {
if (null != filter && false == filter.accept(entry)) {
continue;
}
if (false == in.canReadEntryData(entry)) {
// 无法读取的文件直接跳过
continue;
}
outItemFile = FileUtil.file(targetDir, entry.getName());
if (entry.isDirectory()) {
// 创建对应目录
// noinspection ResultOfMethodCallIgnored
outItemFile.mkdirs();
} else {
FileUtil.writeFromStream(in, outItemFile, false);
}
}
}
use of org.apache.commons.compress.archivers.ArchiveInputStream in project structr by structr.
the class FileHelper method unarchive.
/**
* Unarchives a file in the (optional) folder with the given folder id.
*
* @param securityContext
* @param file
* @param parentFolderId
* @throws ArchiveException
* @throws IOException
* @throws FrameworkException
*/
public static void unarchive(final SecurityContext securityContext, final File file, final String parentFolderId) throws ArchiveException, IOException, FrameworkException {
if (file == null) {
logger.error("Unable to unarchive file (file parameter was null).");
return;
}
Folder existingParentFolder = null;
final App app = StructrApp.getInstance(securityContext);
final String fileName = file.getName();
if (parentFolderId != null) {
try (final Tx tx = app.tx(true, true, true)) {
// search for existing parent folder
existingParentFolder = app.get(Folder.class, parentFolderId);
String parentFolderName = null;
String msgString = "Unarchiving file {}";
if (existingParentFolder != null) {
parentFolderName = existingParentFolder.getName();
msgString += " into existing folder {}.";
}
logger.info(msgString, new Object[] { fileName, parentFolderName });
tx.success();
}
} else {
existingParentFolder = file.getParent();
}
BufferedInputStream bufferedIs = null;
try (final Tx tx = app.tx()) {
bufferedIs = new BufferedInputStream(file.getInputStream());
tx.success();
}
switch(ArchiveStreamFactory.detect(bufferedIs)) {
// 7z doesn't support streaming
case ArchiveStreamFactory.SEVEN_Z:
{
int overallCount = 0;
logger.info("7-Zip archive format detected");
try (final Tx outertx = app.tx()) {
SevenZFile sevenZFile = new SevenZFile(file.getFileOnDisk());
SevenZArchiveEntry sevenZEntry = sevenZFile.getNextEntry();
while (sevenZEntry != null) {
try (final Tx tx = app.tx(true, true, false)) {
int count = 0;
while (sevenZEntry != null && count++ < 50) {
final String entryPath = "/" + PathHelper.clean(sevenZEntry.getName());
logger.info("Entry path: {}", entryPath);
if (sevenZEntry.isDirectory()) {
handleDirectory(securityContext, existingParentFolder, entryPath);
} else {
byte[] buf = new byte[(int) sevenZEntry.getSize()];
sevenZFile.read(buf, 0, buf.length);
try (final ByteArrayInputStream in = new ByteArrayInputStream(buf)) {
handleFile(securityContext, in, existingParentFolder, entryPath);
}
}
sevenZEntry = sevenZFile.getNextEntry();
overallCount++;
}
logger.info("Committing transaction after {} entries.", overallCount);
tx.success();
}
}
logger.info("Unarchived {} files.", overallCount);
outertx.success();
}
break;
}
// ZIP needs special treatment to support "unsupported feature data descriptor"
case ArchiveStreamFactory.ZIP:
{
logger.info("Zip archive format detected");
try (final ZipArchiveInputStream in = new ZipArchiveInputStream(bufferedIs, null, false, true)) {
handleArchiveInputStream(in, app, securityContext, existingParentFolder);
}
break;
}
default:
{
logger.info("Default archive format detected");
try (final ArchiveInputStream in = new ArchiveStreamFactory().createArchiveInputStream(bufferedIs)) {
handleArchiveInputStream(in, app, securityContext, existingParentFolder);
}
}
}
}
Aggregations