use of org.apache.commons.compress.archivers.tar.TarArchiveInputStream in project camel by apache.
the class TarAggregationStrategyTest method testSplitter.
@Test
public void testSplitter() throws Exception {
MockEndpoint mock = getMockEndpoint("mock:aggregateToTarEntry");
mock.expectedMessageCount(1);
mock.expectedHeaderReceived("foo", "bar");
assertMockEndpointsSatisfied();
Thread.sleep(500);
File[] files = new File("target/out").listFiles();
assertTrue(files != null);
assertTrue("Should be a file in target/out directory", files.length > 0);
File resultFile = files[0];
TarArchiveInputStream tin = new TarArchiveInputStream(new FileInputStream(resultFile));
try {
int fileCount = 0;
for (TarArchiveEntry te = tin.getNextTarEntry(); te != null; te = tin.getNextTarEntry()) {
fileCount = fileCount + 1;
}
assertEquals("Tar file should contains " + TarAggregationStrategyTest.EXPECTED_NO_FILES + " files", TarAggregationStrategyTest.EXPECTED_NO_FILES, fileCount);
} finally {
IOHelper.close(tin);
}
}
use of org.apache.commons.compress.archivers.tar.TarArchiveInputStream in project camel by apache.
the class TarAggregationStrategy method addFileToTar.
private void addFileToTar(File source, File file, String fileName) throws IOException, ArchiveException {
File tmpTar = File.createTempFile(source.getName(), null, parentDir);
tmpTar.delete();
if (!source.renameTo(tmpTar)) {
throw new IOException("Could not make temp file (" + source.getName() + ")");
}
FileInputStream fis = new FileInputStream(tmpTar);
TarArchiveInputStream tin = (TarArchiveInputStream) new ArchiveStreamFactory().createArchiveInputStream(ArchiveStreamFactory.TAR, fis);
TarArchiveOutputStream tos = new TarArchiveOutputStream(new FileOutputStream(source));
tos.setLongFileMode(TarArchiveOutputStream.LONGFILE_POSIX);
tos.setBigNumberMode(TarArchiveOutputStream.BIGNUMBER_POSIX);
InputStream in = new FileInputStream(file);
// copy the existing entries
ArchiveEntry nextEntry;
while ((nextEntry = tin.getNextEntry()) != null) {
tos.putArchiveEntry(nextEntry);
IOUtils.copy(tin, tos);
tos.closeArchiveEntry();
}
// Add the new entry
TarArchiveEntry entry = new TarArchiveEntry(fileName == null ? file.getName() : fileName);
entry.setSize(file.length());
tos.putArchiveEntry(entry);
IOUtils.copy(in, tos);
tos.closeArchiveEntry();
IOHelper.close(fis, in, tin, tos);
LOG.trace("Deleting temporary file: {}", tmpTar);
FileUtil.deleteFile(tmpTar);
}
use of org.apache.commons.compress.archivers.tar.TarArchiveInputStream in project nifi by apache.
the class TestMergeContent method testTar.
@Test
public void testTar() throws IOException {
final TestRunner runner = TestRunners.newTestRunner(new MergeContent());
runner.setProperty(MergeContent.MAX_BIN_AGE, "1 sec");
runner.setProperty(MergeContent.MERGE_FORMAT, MergeContent.MERGE_FORMAT_TAR);
final Map<String, String> attributes = new HashMap<>();
attributes.put(CoreAttributes.MIME_TYPE.key(), "application/plain-text");
attributes.put(CoreAttributes.FILENAME.key(), "AShortFileName");
runner.enqueue("Hello".getBytes("UTF-8"), attributes);
attributes.put(CoreAttributes.FILENAME.key(), "ALongerrrFileName");
runner.enqueue(", ".getBytes("UTF-8"), attributes);
attributes.put(CoreAttributes.FILENAME.key(), "AReallyLongggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggFileName");
runner.enqueue("World!".getBytes("UTF-8"), attributes);
runner.run();
runner.assertQueueEmpty();
runner.assertTransferCount(MergeContent.REL_MERGED, 1);
runner.assertTransferCount(MergeContent.REL_FAILURE, 0);
runner.assertTransferCount(MergeContent.REL_ORIGINAL, 3);
final MockFlowFile bundle = runner.getFlowFilesForRelationship(MergeContent.REL_MERGED).get(0);
try (final InputStream rawIn = new ByteArrayInputStream(runner.getContentAsByteArray(bundle));
final TarArchiveInputStream in = new TarArchiveInputStream(rawIn)) {
ArchiveEntry entry = in.getNextEntry();
Assert.assertNotNull(entry);
assertEquals("AShortFileName", entry.getName());
final byte[] part1 = IOUtils.toByteArray(in);
Assert.assertTrue(Arrays.equals("Hello".getBytes("UTF-8"), part1));
entry = in.getNextEntry();
assertEquals("ALongerrrFileName", entry.getName());
final byte[] part2 = IOUtils.toByteArray(in);
Assert.assertTrue(Arrays.equals(", ".getBytes("UTF-8"), part2));
entry = in.getNextEntry();
assertEquals("AReallyLongggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggggFileName", entry.getName());
final byte[] part3 = IOUtils.toByteArray(in);
Assert.assertTrue(Arrays.equals("World!".getBytes("UTF-8"), part3));
}
bundle.assertAttributeEquals(CoreAttributes.MIME_TYPE.key(), "application/tar");
}
use of org.apache.commons.compress.archivers.tar.TarArchiveInputStream in project nifi by apache.
the class FlowFileUnpackagerV1 method unpackageFlowFile.
@Override
public Map<String, String> unpackageFlowFile(final InputStream in, final OutputStream out) throws IOException {
flowFilesRead++;
final TarArchiveInputStream tarIn = new TarArchiveInputStream(in);
final TarArchiveEntry attribEntry = tarIn.getNextTarEntry();
if (attribEntry == null) {
return null;
}
final Map<String, String> attributes;
if (attribEntry.getName().equals(FlowFilePackagerV1.FILENAME_ATTRIBUTES)) {
attributes = getAttributes(tarIn);
} else {
throw new IOException("Expected two tar entries: " + FlowFilePackagerV1.FILENAME_CONTENT + " and " + FlowFilePackagerV1.FILENAME_ATTRIBUTES);
}
final TarArchiveEntry contentEntry = tarIn.getNextTarEntry();
if (contentEntry != null && contentEntry.getName().equals(FlowFilePackagerV1.FILENAME_CONTENT)) {
// 512KB
final byte[] buffer = new byte[512 << 10];
int bytesRead = 0;
while ((bytesRead = tarIn.read(buffer)) != -1) {
// still more data to read
if (bytesRead > 0) {
out.write(buffer, 0, bytesRead);
}
}
out.flush();
} else {
throw new IOException("Expected two tar entries: " + FlowFilePackagerV1.FILENAME_CONTENT + " and " + FlowFilePackagerV1.FILENAME_ATTRIBUTES);
}
return attributes;
}
use of org.apache.commons.compress.archivers.tar.TarArchiveInputStream in project incubator-systemml by apache.
the class ValidateLicAndNotice method extractFileFromTGZ.
/**
* This will return the file from tgz file and store it in specified location.
*
* @param tgzFileName is the name of tgz file from which file to be extracted.
* @param fileName is the name of the file to be extracted.
* @param strDestLoc is the location where file will be extracted.
* @param bFirstDirLevel to indicate to get file from first directory level.
* @return Sucess or Failure
*/
public static boolean extractFileFromTGZ(String tgzFileName, String fileName, String strDestLoc, boolean bFirstDirLevel) {
boolean bRetCode = Constants.bFAILURE;
TarArchiveInputStream tarIn = null;
try {
tarIn = new TarArchiveInputStream(new GzipCompressorInputStream(new BufferedInputStream(new FileInputStream(tgzFileName))));
} catch (Exception e) {
Utility.debugPrint(Constants.DEBUG_ERROR, "Exception in unzipping tar file: " + e);
return bRetCode;
}
try {
BufferedOutputStream bufOut = null;
BufferedInputStream bufIn = null;
TarArchiveEntry tarEntry = null;
while ((tarEntry = tarIn.getNextTarEntry()) != null) {
if (!tarEntry.getName().endsWith(fileName))
continue;
// Get file at root (in single directory) level. This is for License in root location.
if (bFirstDirLevel && (tarEntry.getName().indexOf('/') != tarEntry.getName().lastIndexOf('/')))
continue;
bufIn = new BufferedInputStream(tarIn);
int count;
byte[] data = new byte[Constants.BUFFER];
String strOutFileName = strDestLoc == null ? tarEntry.getName() : strDestLoc + "/" + fileName;
FileOutputStream fos = new FileOutputStream(strOutFileName);
bufOut = new BufferedOutputStream(fos, Constants.BUFFER);
while ((count = bufIn.read(data, 0, Constants.BUFFER)) != -1) {
bufOut.write(data, 0, count);
}
bufOut.flush();
bufOut.close();
bufIn.close();
bRetCode = Constants.bSUCCESS;
break;
}
} catch (Exception e) {
e.printStackTrace();
}
return bRetCode;
}
Aggregations