use of org.apache.commons.compress.archivers.ArchiveStreamFactory in project tika by apache.
the class ZipParserTest method testCustomEncoding.
// TIKA-936
@Test
public void testCustomEncoding() throws Exception {
ArchiveStreamFactory factory = new ArchiveStreamFactory();
factory.setEntryEncoding("SJIS");
trackingContext.set(ArchiveStreamFactory.class, factory);
try (InputStream stream = TikaInputStream.get(Base64.decodeBase64("UEsDBBQAAAAIAI+CvUCDo3+zIgAAACgAAAAOAAAAk/qWe4zqg4GDgi50" + "eHRr2tj0qulsc2pzRHN609Gm7Y1OvFxNYLHJv6ZV97yCiQEAUEsBAh" + "QLFAAAAAgAj4K9QIOjf7MiAAAAKAAAAA4AAAAAAAAAAAAgAAAAAAAA" + "AJP6lnuM6oOBg4IudHh0UEsFBgAAAAABAAEAPAAAAE4AAAAAAA=="))) {
autoDetectParser.parse(stream, new DefaultHandler(), new Metadata(), trackingContext);
}
assertEquals(1, tracker.filenames.size());
assertEquals("日本語メモ.txt", tracker.filenames.get(0));
}
use of org.apache.commons.compress.archivers.ArchiveStreamFactory in project winery by eclipse.
the class FilebasedRepository method getZippedContents.
@Override
public void getZippedContents(final GenericId id, OutputStream out) throws WineryRepositoryException {
Objects.requireNonNull(id);
Objects.requireNonNull(out);
SortedSet<RepositoryFileReference> containedFiles = this.getContainedFiles(id);
try (final ArchiveOutputStream zos = new ArchiveStreamFactory().createArchiveOutputStream("zip", out)) {
for (RepositoryFileReference ref : containedFiles) {
ZipArchiveEntry zipArchiveEntry;
final Optional<Path> subDirectory = ref.getSubDirectory();
if (subDirectory.isPresent()) {
zipArchiveEntry = new ZipArchiveEntry(subDirectory.get().resolve(ref.getFileName()).toString());
} else {
zipArchiveEntry = new ZipArchiveEntry(ref.getFileName());
}
zos.putArchiveEntry(zipArchiveEntry);
try (InputStream is = RepositoryFactory.getRepository().newInputStream(ref)) {
IOUtils.copy(is, zos);
}
zos.closeArchiveEntry();
}
} catch (ArchiveException e) {
throw new WineryRepositoryException("Internal error while generating archive", e);
} catch (IOException e) {
throw new WineryRepositoryException("I/O exception during export", e);
}
}
use of org.apache.commons.compress.archivers.ArchiveStreamFactory in project winery by eclipse.
the class CsarExporter method writeCsar.
/**
* Writes a complete CSAR containing all necessary things reachable from the given service template
*
* @param entryId the id of the service template to export
* @param out the output stream to write to
*/
public void writeCsar(IRepository repository, DefinitionsChildId entryId, OutputStream out) throws ArchiveException, IOException, JAXBException, RepositoryCorruptException {
CsarExporter.LOGGER.trace("Starting CSAR export with {}", entryId.toString());
Map<RepositoryFileReference, String> refMap = new HashMap<>();
Collection<String> definitionNames = new ArrayList<>();
try (final ArchiveOutputStream zos = new ArchiveStreamFactory().createArchiveOutputStream("zip", out)) {
ToscaExportUtil exporter = new ToscaExportUtil();
Map<String, Object> conf = new HashMap<>();
ExportedState exportedState = new ExportedState();
DefinitionsChildId currentId = entryId;
do {
String defName = CsarExporter.getDefinitionsPathInsideCSAR(repository, currentId);
definitionNames.add(defName);
zos.putArchiveEntry(new ZipArchiveEntry(defName));
Collection<DefinitionsChildId> referencedIds;
referencedIds = exporter.exportTOSCA(repository, currentId, zos, refMap, conf);
zos.closeArchiveEntry();
exportedState.flagAsExported(currentId);
exportedState.flagAsExportRequired(referencedIds);
currentId = exportedState.pop();
} while (currentId != null);
// if we export a ServiceTemplate, data for the self-service portal might exist
if (entryId instanceof ServiceTemplateId) {
ServiceTemplateId serviceTemplateId = (ServiceTemplateId) entryId;
this.addSelfServiceMetaData(repository, serviceTemplateId, refMap);
this.addSelfServiceFiles(repository, serviceTemplateId, refMap, zos);
}
// now, refMap contains all files to be added to the CSAR
// write manifest directly after the definitions to have it more at the beginning of the ZIP rather than having it at the very end
this.addManifest(repository, entryId, definitionNames, refMap, zos);
// used for generated XSD schemas
TransformerFactory tFactory = TransformerFactory.newInstance();
Transformer transformer;
try {
transformer = tFactory.newTransformer();
} catch (TransformerConfigurationException e1) {
CsarExporter.LOGGER.debug(e1.getMessage(), e1);
throw new IllegalStateException("Could not instantiate transformer", e1);
}
// write all referenced files
for (RepositoryFileReference ref : refMap.keySet()) {
String archivePath = refMap.get(ref);
CsarExporter.LOGGER.trace("Creating {}", archivePath);
if (ref instanceof DummyRepositoryFileReferenceForGeneratedXSD) {
addDummyRepositoryFileReferenceForGeneratedXSD(zos, transformer, (DummyRepositoryFileReferenceForGeneratedXSD) ref, archivePath);
} else {
if (ref.getParent() instanceof DirectoryId) {
// special handling for artifact template directories "source" and "files"
addArtifactTemplateToZipFile(zos, repository, ref, archivePath);
} else {
addFileToZipArchive(zos, repository, ref, archivePath);
zos.closeArchiveEntry();
}
}
}
this.addNamespacePrefixes(zos, repository);
}
}
use of org.apache.commons.compress.archivers.ArchiveStreamFactory in project POL-POM-5 by PhoenicisOrg.
the class Tar method uncompress.
/**
* Uncompress a tar
*
* @param countingInputStream
* to count the number of byte extracted
* @param outputDir
* The directory where files should be extracted
* @return A list of extracted files
* @throws ArchiveException
* if the process fails
*/
private List<File> uncompress(final InputStream inputStream, CountingInputStream countingInputStream, final File outputDir, long finalSize, Consumer<ProgressEntity> stateCallback) {
final List<File> uncompressedFiles = new LinkedList<>();
try (ArchiveInputStream debInputStream = new ArchiveStreamFactory().createArchiveInputStream("tar", inputStream)) {
TarArchiveEntry entry;
while ((entry = (TarArchiveEntry) debInputStream.getNextEntry()) != null) {
final File outputFile = new File(outputDir, entry.getName());
if (entry.isDirectory()) {
LOGGER.info(String.format("Attempting to write output directory %s.", outputFile.getAbsolutePath()));
if (!outputFile.exists()) {
LOGGER.info(String.format("Attempting to createPrefix output directory %s.", outputFile.getAbsolutePath()));
Files.createDirectories(outputFile.toPath());
}
} else {
LOGGER.info(String.format("Creating output file %s (%s).", outputFile.getAbsolutePath(), entry.getMode()));
if (entry.isSymbolicLink()) {
Files.createSymbolicLink(Paths.get(outputFile.getAbsolutePath()), Paths.get(entry.getLinkName()));
} else {
try (final OutputStream outputFileStream = new FileOutputStream(outputFile)) {
IOUtils.copy(debInputStream, outputFileStream);
Files.setPosixFilePermissions(Paths.get(outputFile.getPath()), fileUtilities.octToPosixFilePermission(entry.getMode()));
}
}
}
uncompressedFiles.add(outputFile);
stateCallback.accept(new ProgressEntity.Builder().withPercent((double) countingInputStream.getCount() / (double) finalSize * (double) 100).withProgressText("Extracting " + outputFile.getName()).build());
}
return uncompressedFiles;
} catch (IOException | org.apache.commons.compress.archivers.ArchiveException e) {
throw new ArchiveException("Unable to extract the file", e);
}
}
use of org.apache.commons.compress.archivers.ArchiveStreamFactory in project POL-POM-5 by PhoenicisOrg.
the class Zip method uncompress.
/**
* Uncompress a tar
*
* @param countingInputStream to count the number of byte extracted
* @param outputDir The directory where files should be extracted
* @return A list of extracted files
* @throws ArchiveException if the process fails
*/
private List<File> uncompress(final InputStream inputStream, CountingInputStream countingInputStream, final File outputDir, long finalSize, Consumer<ProgressEntity> stateCallback) {
final List<File> uncompressedFiles = new LinkedList<>();
try (ArchiveInputStream debInputStream = new ArchiveStreamFactory().createArchiveInputStream("zip", inputStream)) {
ZipArchiveEntry entry;
while ((entry = (ZipArchiveEntry) debInputStream.getNextEntry()) != null) {
final File outputFile = new File(outputDir, entry.getName());
if (entry.isDirectory()) {
LOGGER.info(String.format("Attempting to write output directory %s.", outputFile.getAbsolutePath()));
if (!outputFile.exists()) {
LOGGER.info(String.format("Attempting to createPrefix output directory %s.", outputFile.getAbsolutePath()));
Files.createDirectories(outputFile.toPath());
}
} else {
LOGGER.info(String.format("Creating output file %s.", outputFile.getAbsolutePath()));
outputFile.getParentFile().mkdirs();
try (final OutputStream outputFileStream = new FileOutputStream(outputFile)) {
IOUtils.copy(debInputStream, outputFileStream);
}
}
uncompressedFiles.add(outputFile);
stateCallback.accept(new ProgressEntity.Builder().withPercent((double) countingInputStream.getCount() / (double) finalSize * (double) 100).withProgressText("Extracting " + outputFile.getName()).build());
}
return uncompressedFiles;
} catch (IOException | org.apache.commons.compress.archivers.ArchiveException e) {
throw new ArchiveException("Unable to extract the file", e);
}
}
Aggregations