use of org.apache.commons.compress.archivers.ArchiveEntry in project tika by apache.
the class PackageParser method parse.
public void parse(InputStream stream, ContentHandler handler, Metadata metadata, ParseContext context) throws IOException, SAXException, TikaException {
//lazily load the MediaTypeRegistry at parse time
//only want to call getDefaultConfig() once, and can't
//load statically because of the ForkParser
TikaConfig config = context.get(TikaConfig.class);
MediaTypeRegistry mediaTypeRegistry = null;
if (config != null) {
mediaTypeRegistry = config.getMediaTypeRegistry();
} else {
if (bufferedMediaTypeRegistry == null) {
//buffer this for next time.
synchronized (lock) {
//now that we're locked, check again
if (bufferedMediaTypeRegistry == null) {
bufferedMediaTypeRegistry = TikaConfig.getDefaultConfig().getMediaTypeRegistry();
}
}
}
mediaTypeRegistry = bufferedMediaTypeRegistry;
}
// Ensure that the stream supports the mark feature
if (!stream.markSupported()) {
stream = new BufferedInputStream(stream);
}
TemporaryResources tmp = new TemporaryResources();
ArchiveInputStream ais = null;
try {
ArchiveStreamFactory factory = context.get(ArchiveStreamFactory.class, new ArchiveStreamFactory());
// At the end we want to close the archive stream to release
// any associated resources, but the underlying document stream
// should not be closed
ais = factory.createArchiveInputStream(new CloseShieldInputStream(stream));
} catch (StreamingNotSupportedException sne) {
// Most archive formats work on streams, but a few need files
if (sne.getFormat().equals(ArchiveStreamFactory.SEVEN_Z)) {
// Rework as a file, and wrap
stream.reset();
TikaInputStream tstream = TikaInputStream.get(stream, tmp);
// Seven Zip suports passwords, was one given?
String password = null;
PasswordProvider provider = context.get(PasswordProvider.class);
if (provider != null) {
password = provider.getPassword(metadata);
}
SevenZFile sevenz;
if (password == null) {
sevenz = new SevenZFile(tstream.getFile());
} else {
sevenz = new SevenZFile(tstream.getFile(), password.getBytes("UnicodeLittleUnmarked"));
}
// Pending a fix for COMPRESS-269 / TIKA-1525, this bit is a little nasty
ais = new SevenZWrapper(sevenz);
} else {
tmp.close();
throw new TikaException("Unknown non-streaming format " + sne.getFormat(), sne);
}
} catch (ArchiveException e) {
tmp.close();
throw new TikaException("Unable to unpack document stream", e);
}
updateMediaType(ais, mediaTypeRegistry, metadata);
// Use the delegate parser to parse the contained document
EmbeddedDocumentExtractor extractor = EmbeddedDocumentUtil.getEmbeddedDocumentExtractor(context);
XHTMLContentHandler xhtml = new XHTMLContentHandler(handler, metadata);
xhtml.startDocument();
try {
ArchiveEntry entry = ais.getNextEntry();
while (entry != null) {
if (!entry.isDirectory()) {
parseEntry(ais, entry, extractor, metadata, xhtml);
}
entry = ais.getNextEntry();
}
} catch (UnsupportedZipFeatureException zfe) {
// If it's an encrypted document of unknown password, report as such
if (zfe.getFeature() == Feature.ENCRYPTION) {
throw new EncryptedDocumentException(zfe);
}
// Otherwise throw the exception
throw new TikaException("UnsupportedZipFeature", zfe);
} catch (PasswordRequiredException pre) {
throw new EncryptedDocumentException(pre);
} finally {
ais.close();
tmp.close();
}
xhtml.endDocument();
}
use of org.apache.commons.compress.archivers.ArchiveEntry in project oap by oaplatform.
the class Archiver method unpack.
@SneakyThrows
public void unpack(Path archive, Path dest, ArchiveType type) {
switch(type) {
case TAR_GZ:
try (TarArchiveInputStream tar = new TarArchiveInputStream(IoStreams.in(archive, GZIP))) {
ArchiveEntry entry;
while ((entry = tar.getNextEntry()) != null) {
Path path = dest.resolve(entry.getName());
if (entry.isDirectory())
path.toFile().mkdirs();
else
IoStreams.write(path, PLAIN, tar);
}
tar.close();
}
}
}
use of org.apache.commons.compress.archivers.ArchiveEntry in project cuba by cuba-platform.
the class FoldersServiceBean method exportFolder.
@Override
public byte[] exportFolder(Folder folder) throws IOException {
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
ZipArchiveOutputStream zipOutputStream = new ZipArchiveOutputStream(byteArrayOutputStream);
zipOutputStream.setMethod(ZipArchiveOutputStream.STORED);
zipOutputStream.setEncoding(StandardCharsets.UTF_8.name());
String xml = createXStream().toXML(folder);
byte[] xmlBytes = xml.getBytes(StandardCharsets.UTF_8);
ArchiveEntry zipEntryDesign = newStoredEntry("folder.xml", xmlBytes);
zipOutputStream.putArchiveEntry(zipEntryDesign);
zipOutputStream.write(xmlBytes);
try {
zipOutputStream.closeArchiveEntry();
} catch (Exception ex) {
throw new RuntimeException(String.format("Exception occurred while exporting folder %s.", folder.getName()));
}
zipOutputStream.close();
return byteArrayOutputStream.toByteArray();
}
use of org.apache.commons.compress.archivers.ArchiveEntry in project winery by eclipse.
the class CsarExporter method addFileToZipArchive.
/**
* Adds a file to an archive
*
* @param zos Output stream of the archive
* @param ref Reference to the file that should be added to the archive
* @param archivePath Path inside the archive to the file
*/
private void addFileToZipArchive(ArchiveOutputStream zos, IGenericRepository repository, RepositoryFileReference ref, String archivePath) {
try (InputStream is = repository.newInputStream(ref)) {
ArchiveEntry archiveEntry = new ZipArchiveEntry(archivePath);
zos.putArchiveEntry(archiveEntry);
IOUtils.copy(is, zos);
} catch (Exception e) {
CsarExporter.LOGGER.error("Could not copy file content to ZIP outputstream", e);
}
}
use of org.apache.commons.compress.archivers.ArchiveEntry in project xwiki-platform by xwiki.
the class Packager method importXARToWiki.
private XarMergeResult importXARToWiki(String comment, InputStream xarInputStream, WikiReference wikiReference, PackageConfiguration configuration) throws IOException, XarException, XWikiException {
XarMergeResult mergeResult = new XarMergeResult();
ZipArchiveInputStream zis = new ZipArchiveInputStream(xarInputStream);
XWikiContext xcontext = this.xcontextProvider.get();
String currentWiki = xcontext.getWikiId();
try {
xcontext.setWikiId(wikiReference.getName());
this.observation.notify(new XARImportingEvent(), null, xcontext);
for (ArchiveEntry entry = zis.getNextEntry(); entry != null; entry = zis.getNextEntry()) {
if (!entry.isDirectory()) {
// Only import what should be imported
if (!entry.getName().equals(XarModel.PATH_PACKAGE) && (configuration.getEntriesToImport() == null || configuration.getEntriesToImport().contains(entry.getName()))) {
XarEntryMergeResult entityMergeResult = importDocumentToWiki(comment, wikiReference, zis, configuration);
if (entityMergeResult != null) {
mergeResult.addMergeResult(entityMergeResult);
}
}
}
}
} finally {
this.observation.notify(new XARImportedEvent(), null, xcontext);
xcontext.setWikiId(currentWiki);
}
return mergeResult;
}
Aggregations