use of org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream in project gitblit by gitblit.
the class CompressionUtils method zip.
/**
* Zips the contents of the tree at the (optionally) specified revision and
* the (optionally) specified basepath to the supplied outputstream.
*
* @param repository
* @param basePath
* if unspecified, entire repository is assumed.
* @param objectId
* if unspecified, HEAD is assumed.
* @param os
* @return true if repository was successfully zipped to supplied output
* stream
*/
public static boolean zip(Repository repository, IFilestoreManager filestoreManager, String basePath, String objectId, OutputStream os) {
RevCommit commit = JGitUtils.getCommit(repository, objectId);
if (commit == null) {
return false;
}
boolean success = false;
RevWalk rw = new RevWalk(repository);
TreeWalk tw = new TreeWalk(repository);
try {
tw.reset();
tw.addTree(commit.getTree());
ZipArchiveOutputStream zos = new ZipArchiveOutputStream(os);
zos.setComment("Generated by Gitblit");
if (!StringUtils.isEmpty(basePath)) {
PathFilter f = PathFilter.create(basePath);
tw.setFilter(f);
}
tw.setRecursive(true);
MutableObjectId id = new MutableObjectId();
ObjectReader reader = tw.getObjectReader();
long modified = commit.getAuthorIdent().getWhen().getTime();
while (tw.next()) {
FileMode mode = tw.getFileMode(0);
if (mode == FileMode.GITLINK || mode == FileMode.TREE) {
continue;
}
tw.getObjectId(id, 0);
ObjectLoader loader = repository.open(id);
ZipArchiveEntry entry = new ZipArchiveEntry(tw.getPathString());
FilestoreModel filestoreItem = null;
if (JGitUtils.isPossibleFilestoreItem(loader.getSize())) {
filestoreItem = JGitUtils.getFilestoreItem(tw.getObjectReader().open(id));
}
final long size = (filestoreItem == null) ? loader.getSize() : filestoreItem.getSize();
entry.setSize(size);
entry.setComment(commit.getName());
entry.setUnixMode(mode.getBits());
entry.setTime(modified);
zos.putArchiveEntry(entry);
if (filestoreItem == null) {
//Copy repository stored file
loader.copyTo(zos);
} else {
//Copy filestore file
try (FileInputStream streamIn = new FileInputStream(filestoreManager.getStoragePath(filestoreItem.oid))) {
IOUtils.copyLarge(streamIn, zos);
} catch (Throwable e) {
LOGGER.error(MessageFormat.format("Failed to archive filestore item {0}", filestoreItem.oid), e);
//Handle as per other errors
throw e;
}
}
zos.closeArchiveEntry();
}
zos.finish();
success = true;
} catch (IOException e) {
error(e, repository, "{0} failed to zip files from commit {1}", commit.getName());
} finally {
tw.close();
rw.dispose();
}
return success;
}
use of org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream in project gitblit by gitblit.
the class PtServlet method doGet.
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
try {
response.setContentType("application/octet-stream");
response.setDateHeader("Last-Modified", lastModified);
response.setHeader("Cache-Control", "none");
response.setHeader("Pragma", "no-cache");
response.setDateHeader("Expires", 0);
boolean windows = false;
try {
String useragent = request.getHeader("user-agent").toString();
windows = useragent.toLowerCase().contains("windows");
} catch (Exception e) {
}
byte[] pyBytes;
File file = runtimeManager.getFileOrFolder("tickets.pt", "${baseFolder}/pt.py");
if (file.exists()) {
// custom script
pyBytes = readAll(new FileInputStream(file));
} else {
// default script
pyBytes = readAll(getClass().getResourceAsStream("/pt.py"));
}
if (windows) {
// windows: download zip file with pt.py and pt.cmd
response.setHeader("Content-Disposition", "attachment; filename=\"pt.zip\"");
OutputStream os = response.getOutputStream();
ZipArchiveOutputStream zos = new ZipArchiveOutputStream(os);
// add the Python script
ZipArchiveEntry pyEntry = new ZipArchiveEntry("pt.py");
pyEntry.setSize(pyBytes.length);
pyEntry.setUnixMode(FileMode.EXECUTABLE_FILE.getBits());
pyEntry.setTime(lastModified);
zos.putArchiveEntry(pyEntry);
zos.write(pyBytes);
zos.closeArchiveEntry();
// add a Python launch cmd file
byte[] cmdBytes = readAll(getClass().getResourceAsStream("/pt.cmd"));
ZipArchiveEntry cmdEntry = new ZipArchiveEntry("pt.cmd");
cmdEntry.setSize(cmdBytes.length);
cmdEntry.setUnixMode(FileMode.REGULAR_FILE.getBits());
cmdEntry.setTime(lastModified);
zos.putArchiveEntry(cmdEntry);
zos.write(cmdBytes);
zos.closeArchiveEntry();
// add a brief readme
byte[] txtBytes = readAll(getClass().getResourceAsStream("/pt.txt"));
ZipArchiveEntry txtEntry = new ZipArchiveEntry("readme.txt");
txtEntry.setSize(txtBytes.length);
txtEntry.setUnixMode(FileMode.REGULAR_FILE.getBits());
txtEntry.setTime(lastModified);
zos.putArchiveEntry(txtEntry);
zos.write(txtBytes);
zos.closeArchiveEntry();
// cleanup
zos.finish();
zos.close();
os.flush();
} else {
// unix: download a tar.gz file with pt.py set with execute permissions
response.setHeader("Content-Disposition", "attachment; filename=\"pt.tar.gz\"");
OutputStream os = response.getOutputStream();
CompressorOutputStream cos = new CompressorStreamFactory().createCompressorOutputStream(CompressorStreamFactory.GZIP, os);
TarArchiveOutputStream tos = new TarArchiveOutputStream(cos);
tos.setAddPaxHeadersForNonAsciiNames(true);
tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX);
// add the Python script
TarArchiveEntry pyEntry = new TarArchiveEntry("pt");
pyEntry.setMode(FileMode.EXECUTABLE_FILE.getBits());
pyEntry.setModTime(lastModified);
pyEntry.setSize(pyBytes.length);
tos.putArchiveEntry(pyEntry);
tos.write(pyBytes);
tos.closeArchiveEntry();
// add a brief readme
byte[] txtBytes = readAll(getClass().getResourceAsStream("/pt.txt"));
TarArchiveEntry txtEntry = new TarArchiveEntry("README");
txtEntry.setMode(FileMode.REGULAR_FILE.getBits());
txtEntry.setModTime(lastModified);
txtEntry.setSize(txtBytes.length);
tos.putArchiveEntry(txtEntry);
tos.write(txtBytes);
tos.closeArchiveEntry();
// cleanup
tos.finish();
tos.close();
cos.close();
os.flush();
}
} catch (Exception e) {
e.printStackTrace();
}
}
use of org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream in project buck by facebook.
the class UnzipTest method testExtractZipFilePreservesExecutePermissionsAndModificationTime.
@Test
public void testExtractZipFilePreservesExecutePermissionsAndModificationTime() throws IOException {
// getFakeTime returs time with some non-zero millis. By doing division and multiplication by
// 1000 we get rid of that.
final long time = ZipConstants.getFakeTime() / 1000 * 1000;
// Create a simple zip archive using apache's commons-compress to store executable info.
try (ZipArchiveOutputStream zip = new ZipArchiveOutputStream(zipFile.toFile())) {
ZipArchiveEntry entry = new ZipArchiveEntry("test.exe");
entry.setUnixMode((int) MorePosixFilePermissions.toMode(PosixFilePermissions.fromString("r-x------")));
entry.setSize(DUMMY_FILE_CONTENTS.length);
entry.setMethod(ZipEntry.STORED);
entry.setTime(time);
zip.putArchiveEntry(entry);
zip.write(DUMMY_FILE_CONTENTS);
zip.closeArchiveEntry();
}
// Now run `Unzip.extractZipFile` on our test zip and verify that the file is executable.
Path extractFolder = tmpFolder.newFolder();
ImmutableList<Path> result = Unzip.extractZipFile(zipFile.toAbsolutePath(), extractFolder.toAbsolutePath(), Unzip.ExistingFileMode.OVERWRITE);
Path exe = extractFolder.toAbsolutePath().resolve("test.exe");
assertTrue(Files.exists(exe));
assertThat(Files.getLastModifiedTime(exe).toMillis(), Matchers.equalTo(time));
assertTrue(Files.isExecutable(exe));
assertEquals(ImmutableList.of(extractFolder.resolve("test.exe")), result);
}
use of org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream in project karaf by apache.
the class ArchiveMojo method archive.
public //ArchiverException,
File archive(//ArchiverException,
File source, //ArchiverException,
File dest, //ArchiverException,
Artifact artifact) throws IOException {
String serverName = null;
if (targetFile != null) {
serverName = targetFile.getName();
} else {
serverName = artifact.getArtifactId() + "-" + artifact.getVersion();
}
dest = new File(dest, serverName + "." + artifact.getType());
String prefix = "";
if (usePathPrefix) {
prefix = pathPrefix.trim();
if (prefix.length() > 0 && !prefix.endsWith("/")) {
prefix += "/";
}
}
if ("tar.gz".equals(artifact.getType())) {
try (OutputStream fOut = Files.newOutputStream(dest.toPath());
OutputStream bOut = new BufferedOutputStream(fOut);
OutputStream gzOut = new GzipCompressorOutputStream(bOut);
TarArchiveOutputStream tOut = new TarArchiveOutputStream(gzOut);
DirectoryStream<Path> children = Files.newDirectoryStream(source.toPath())) {
tOut.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX);
tOut.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_POSIX);
for (Path child : children) {
addFileToTarGz(tOut, child, prefix);
}
}
} else if ("zip".equals(artifact.getType())) {
try (OutputStream fOut = Files.newOutputStream(dest.toPath());
OutputStream bOut = new BufferedOutputStream(fOut);
ZipArchiveOutputStream tOut = new ZipArchiveOutputStream(bOut);
DirectoryStream<Path> children = Files.newDirectoryStream(source.toPath())) {
for (Path child : children) {
addFileToZip(tOut, child, prefix);
}
}
} else {
throw new IllegalArgumentException("Unknown target type: " + artifact.getType());
}
return dest;
}
use of org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream in project tika by apache.
the class FSOutputStreamFactory method getOutputStream.
/**
* This tries to create a file based on the {@link org.apache.tika.batch.fs.FSUtil.HANDLE_EXISTING}
* value that was passed in during initialization.
* <p>
* If {@link #handleExisting} is set to "SKIP" and the output file already exists,
* this will return null.
* <p>
* If an output file can be found, this will try to mkdirs for that output file.
* If mkdirs() fails, this will throw an IOException.
* <p>
* Finally, this will open an output stream for the appropriate output file.
* @param metadata must have a value set for FSMetadataProperties.FS_ABSOLUTE_PATH or
* else NullPointerException will be thrown!
* @return OutputStream
* @throws java.io.IOException, NullPointerException
*/
@Override
public OutputStream getOutputStream(Metadata metadata) throws IOException {
String initialRelativePath = metadata.get(FSProperties.FS_REL_PATH);
Path outputPath = FSUtil.getOutputPath(outputRoot, initialRelativePath, handleExisting, suffix);
if (outputPath == null) {
return null;
}
if (!Files.isDirectory(outputPath.getParent())) {
Files.createDirectories(outputPath.getParent());
//TODO: shouldn't need this any more in java 7, right?
if (!Files.isDirectory(outputPath.getParent())) {
throw new IOException("Couldn't create parent directory for:" + outputPath.toAbsolutePath());
}
}
OutputStream os = Files.newOutputStream(outputPath);
switch(compression) {
case BZIP2:
os = new BZip2CompressorOutputStream(os);
break;
case GZIP:
os = new GZIPOutputStream(os);
break;
case ZIP:
os = new ZipArchiveOutputStream(os);
break;
}
return new BufferedOutputStream(os);
}
Aggregations