use of org.codehaus.plexus.archiver.ArchiverException in project maven-plugins by apache.
the class AssemblyFileUtils method unpack.
/**
* Unpacks the archive file.
*
* @param source File to be unpacked.
* @param destDir Location where to put the unpacked files.
*/
public static void unpack(File source, File destDir, ArchiverManager archiverManager) throws ArchiveExpansionException, NoSuchArchiverException {
try {
UnArchiver unArchiver = archiverManager.getUnArchiver(source);
unArchiver.setSourceFile(source);
unArchiver.setDestDirectory(destDir);
unArchiver.extract();
} catch (ArchiverException e) {
throw new ArchiveExpansionException("Error unpacking file: " + source + "to: " + destDir, e);
}
}
use of org.codehaus.plexus.archiver.ArchiverException in project maven-plugins by apache.
the class AddArtifactTaskTest method testShouldAddArchiveFileWithUnpackAndModes.
public void testShouldAddArchiveFileWithUnpackAndModes() throws ArchiveCreationException, AssemblyFormattingException, IOException {
int directoryMode = TypeConversionUtils.modeToInt("777", new ConsoleLogger(Logger.LEVEL_DEBUG, "test"));
int fileMode = TypeConversionUtils.modeToInt("777", new ConsoleLogger(Logger.LEVEL_DEBUG, "test"));
mac.expectModeChange(-1, -1, directoryMode, fileMode, 2);
mac.expectInterpolators();
ArtifactMock artifactMock = new ArtifactMock(mockManager, "group", "artifact", "version", "jar", false);
artifactMock.setNewFile();
mac.expectGetDestFile(new File("junk"));
try {
mac.archiver.addArchivedFileSet((ArchivedFileSet) anyObject(), (Charset) anyObject());
} catch (ArchiverException e) {
fail("Should never happen.");
}
mockManager.replayAll();
AddArtifactTask task = createTask(artifactMock.getArtifact());
task.setUnpack(true);
task.setDirectoryMode(directoryMode);
task.setFileMode(fileMode);
task.execute(mac.archiver, mac.configSource);
mockManager.verifyAll();
}
use of org.codehaus.plexus.archiver.ArchiverException in project maven-plugins by apache.
the class EjbMojo method generateEjbClient.
private File generateEjbClient() throws MojoExecutionException {
File clientJarFile = EjbHelper.getJarFile(outputDirectory, jarName, getClientClassifier());
getLog().info("Building EJB client " + clientJarFile.getPath());
MavenArchiver clientArchiver = new MavenArchiver();
clientArchiver.setArchiver(clientJarArchiver);
clientArchiver.setOutputFile(clientJarFile);
try {
List<String> defaultExcludes = DEFAULT_CLIENT_EXCLUDES_LIST;
List<String> defaultIncludes = DEFAULT_INCLUDES_LIST;
IncludesExcludes ie = new IncludesExcludes(clientIncludes, clientExcludes, defaultIncludes, defaultExcludes);
clientArchiver.getArchiver().addDirectory(sourceDirectory, ie.resultingIncludes(), ie.resultingExcludes());
clientArchiver.createArchive(session, project, archive);
} catch (ArchiverException e) {
throw new MojoExecutionException("There was a problem creating the EJB client archive: " + e.getMessage(), e);
} catch (ManifestException e) {
throw new MojoExecutionException("There was a problem creating the EJB client archive: " + e.getMessage(), e);
} catch (IOException e) {
throw new MojoExecutionException("There was a problem creating the EJB client archive: " + e.getMessage(), e);
} catch (DependencyResolutionRequiredException e) {
throw new MojoExecutionException("There was a problem creating the EJB client archive: " + e.getMessage(), e);
}
return clientJarFile;
}
use of org.codehaus.plexus.archiver.ArchiverException in project maven-plugins by apache.
the class AbstractJavadocMojo method copyAdditionalJavadocResources.
/**
* Method that copy additional Javadoc resources from given artifacts.
*
* @param anOutputDirectory the output directory
* @throws MavenReportException if any
* @see #resourcesArtifacts
*/
private void copyAdditionalJavadocResources(File anOutputDirectory) throws MavenReportException {
Set<ResourcesArtifact> resourcesArtifacts = collectResourcesArtifacts();
if (isEmpty(resourcesArtifacts)) {
return;
}
UnArchiver unArchiver;
try {
unArchiver = archiverManager.getUnArchiver("jar");
} catch (NoSuchArchiverException e) {
throw new MavenReportException("Unable to extract resources artifact. " + "No archiver for 'jar' available.", e);
}
for (ResourcesArtifact item : resourcesArtifacts) {
Artifact artifact;
try {
artifact = createAndResolveArtifact(item);
} catch (ArtifactResolverException e) {
throw new MavenReportException("Unable to resolve artifact:" + item, e);
}
unArchiver.setSourceFile(artifact.getFile());
unArchiver.setDestDirectory(anOutputDirectory);
// remove the META-INF directory from resource artifact
IncludeExcludeFileSelector[] selectors = new IncludeExcludeFileSelector[] { new IncludeExcludeFileSelector() };
selectors[0].setExcludes(new String[] { "META-INF/**" });
unArchiver.setFileSelectors(selectors);
getLog().info("Extracting contents of resources artifact: " + artifact.getArtifactId());
try {
unArchiver.extract();
} catch (ArchiverException e) {
throw new MavenReportException("Extraction of resources failed. Artifact that failed was: " + artifact.getArtifactId(), e);
}
}
}
use of org.codehaus.plexus.archiver.ArchiverException in project maven-plugins by apache.
the class ResourceResolver method resolveAndUnpack.
private List<String> resolveAndUnpack(final List<Artifact> artifacts, final SourceResolverConfig config, final List<String> validClassifiers, final boolean propagateErrors) throws ArtifactResolutionException, ArtifactNotFoundException {
// NOTE: Since these are '-sources' and '-test-sources' artifacts, they won't actually
// resolve transitively...this is just used to aggregate resolution failures into a single
// exception.
final Set<Artifact> artifactSet = new LinkedHashSet<Artifact>(artifacts);
final Artifact pomArtifact = config.project().getArtifact();
final ArtifactRepository localRepo = config.localRepository();
final List<ArtifactRepository> remoteRepos = config.project().getRemoteArtifactRepositories();
final ArtifactFilter filter;
if (config.filter() != null) {
filter = new ArtifactIncludeFilterTransformer().transform(config.filter());
} else {
filter = null;
}
ArtifactFilter resolutionFilter = null;
if (filter != null) {
// Wrap the filter in a ProjectArtifactFilter in order to always include the pomArtifact for resolution.
// NOTE that this is necessary, b/c the -sources artifacts are added dynamically to the pomArtifact
// and the resolver also checks the dependency trail with the given filter, thus the pomArtifact has
// to be explicitly included by the filter, otherwise the -sources artifacts won't be resolved.
resolutionFilter = new ProjectArtifactFilter(pomArtifact, filter);
}
Map<String, Artifact> managed = config.project().getManagedVersionMap();
final ArtifactResolutionResult resolutionResult = resolver.resolveTransitively(artifactSet, pomArtifact, managed, localRepo, remoteRepos, artifactMetadataSource, resolutionFilter);
final List<String> result = new ArrayList<String>(artifacts.size());
for (final Artifact a : (Collection<Artifact>) resolutionResult.getArtifacts()) {
if (!validClassifiers.contains(a.getClassifier()) || (filter != null && !filter.include(a))) {
continue;
}
final File d = new File(config.outputBasedir(), a.getArtifactId() + "-" + a.getVersion() + "-" + a.getClassifier());
if (!d.exists()) {
d.mkdirs();
}
try {
final UnArchiver unArchiver = archiverManager.getUnArchiver(a.getType());
unArchiver.setDestDirectory(d);
unArchiver.setSourceFile(a.getFile());
unArchiver.extract();
result.add(d.getAbsolutePath());
} catch (final NoSuchArchiverException e) {
if (propagateErrors) {
throw new ArtifactResolutionException("Failed to retrieve valid un-archiver component: " + a.getType(), a, e);
}
} catch (final ArchiverException e) {
if (propagateErrors) {
throw new ArtifactResolutionException("Failed to unpack: " + a.getId(), a, e);
}
}
}
return result;
}
Aggregations