use of org.codehaus.plexus.archiver.ArchiverException in project midpoint by Evolveum.
the class SchemaDocMojo method execute.
public void execute() throws MojoExecutionException, MojoFailureException {
getLog().info("SchemaDoc plugin started");
PrismContext prismContext = createInitializedPrismContext();
File outDir = initializeOutDir();
PathGenerator pathGenerator = new PathGenerator(outDir);
VelocityEngine velocityEngine = createVelocityEngine();
SchemaRegistry schemaRegistry = prismContext.getSchemaRegistry();
try {
renderSchemaIndex(schemaRegistry, prismContext, velocityEngine, pathGenerator);
} catch (IOException e) {
throw new MojoExecutionException(e.getMessage(), e);
}
for (PrismSchema schema : schemaRegistry.getSchemas()) {
try {
renderSchema(schema, prismContext, velocityEngine, pathGenerator);
} catch (IOException e) {
throw new MojoExecutionException(e.getMessage(), e);
}
}
try {
copyResources(outDir);
} catch (IOException e) {
throw new MojoExecutionException(e.getMessage(), e);
}
File archiveFile = null;
try {
archiveFile = generateArchive(outDir, finalName + "-schemadoc.zip");
} catch (IOException e) {
throw new MojoExecutionException(e.getMessage(), e);
} catch (ArchiverException e) {
throw new MojoExecutionException(e.getMessage(), e);
}
projectHelper.attachArtifact(project, "zip", "schemadoc", archiveFile);
getLog().info("SchemaDoc plugin finished");
}
use of org.codehaus.plexus.archiver.ArchiverException in project maven-plugins by apache.
the class AbstractJavadocMojo method copyAdditionalJavadocResources.
/**
* Method that copy additional Javadoc resources from given artifacts.
*
* @param anOutputDirectory the output directory
* @throws MavenReportException if any
* @see #resourcesArtifacts
*/
private void copyAdditionalJavadocResources(File anOutputDirectory) throws MavenReportException {
Set<ResourcesArtifact> resourcesArtifacts = collectResourcesArtifacts();
if (isEmpty(resourcesArtifacts)) {
return;
}
UnArchiver unArchiver;
try {
unArchiver = archiverManager.getUnArchiver("jar");
} catch (NoSuchArchiverException e) {
throw new MavenReportException("Unable to extract resources artifact. " + "No archiver for 'jar' available.", e);
}
for (ResourcesArtifact item : resourcesArtifacts) {
Artifact artifact;
try {
artifact = createAndResolveArtifact(item);
} catch (ArtifactResolverException e) {
throw new MavenReportException("Unable to resolve artifact:" + item, e);
}
unArchiver.setSourceFile(artifact.getFile());
unArchiver.setDestDirectory(anOutputDirectory);
// remove the META-INF directory from resource artifact
IncludeExcludeFileSelector[] selectors = new IncludeExcludeFileSelector[] { new IncludeExcludeFileSelector() };
selectors[0].setExcludes(new String[] { "META-INF/**" });
unArchiver.setFileSelectors(selectors);
getLog().info("Extracting contents of resources artifact: " + artifact.getArtifactId());
try {
unArchiver.extract();
} catch (ArchiverException e) {
throw new MavenReportException("Extraction of resources failed. Artifact that failed was: " + artifact.getArtifactId(), e);
}
}
}
use of org.codehaus.plexus.archiver.ArchiverException in project maven-plugins by apache.
the class ResourceResolver method resolveAndUnpack.
private List<String> resolveAndUnpack(final List<Artifact> artifacts, final SourceResolverConfig config, final List<String> validClassifiers, final boolean propagateErrors) throws ArtifactResolutionException, ArtifactNotFoundException {
// NOTE: Since these are '-sources' and '-test-sources' artifacts, they won't actually
// resolve transitively...this is just used to aggregate resolution failures into a single
// exception.
final Set<Artifact> artifactSet = new LinkedHashSet<Artifact>(artifacts);
final Artifact pomArtifact = config.project().getArtifact();
final ArtifactRepository localRepo = config.localRepository();
final List<ArtifactRepository> remoteRepos = config.project().getRemoteArtifactRepositories();
final ArtifactFilter filter;
if (config.filter() != null) {
filter = new ArtifactIncludeFilterTransformer().transform(config.filter());
} else {
filter = null;
}
ArtifactFilter resolutionFilter = null;
if (filter != null) {
// Wrap the filter in a ProjectArtifactFilter in order to always include the pomArtifact for resolution.
// NOTE that this is necessary, b/c the -sources artifacts are added dynamically to the pomArtifact
// and the resolver also checks the dependency trail with the given filter, thus the pomArtifact has
// to be explicitly included by the filter, otherwise the -sources artifacts won't be resolved.
resolutionFilter = new ProjectArtifactFilter(pomArtifact, filter);
}
Map<String, Artifact> managed = config.project().getManagedVersionMap();
final ArtifactResolutionResult resolutionResult = resolver.resolveTransitively(artifactSet, pomArtifact, managed, localRepo, remoteRepos, artifactMetadataSource, resolutionFilter);
final List<String> result = new ArrayList<String>(artifacts.size());
for (final Artifact a : (Collection<Artifact>) resolutionResult.getArtifacts()) {
if (!validClassifiers.contains(a.getClassifier()) || (filter != null && !filter.include(a))) {
continue;
}
final File d = new File(config.outputBasedir(), a.getArtifactId() + "-" + a.getVersion() + "-" + a.getClassifier());
if (!d.exists()) {
d.mkdirs();
}
try {
final UnArchiver unArchiver = archiverManager.getUnArchiver(a.getType());
unArchiver.setDestDirectory(d);
unArchiver.setSourceFile(a.getFile());
unArchiver.extract();
result.add(d.getAbsolutePath());
} catch (final NoSuchArchiverException e) {
if (propagateErrors) {
throw new ArtifactResolutionException("Failed to retrieve valid un-archiver component: " + a.getType(), a, e);
}
} catch (final ArchiverException e) {
if (propagateErrors) {
throw new ArtifactResolutionException("Failed to unpack: " + a.getId(), a, e);
}
}
}
return result;
}
use of org.codehaus.plexus.archiver.ArchiverException in project maven-plugins by apache.
the class JavadocJar method generateArchive.
// ----------------------------------------------------------------------
// private methods
// ----------------------------------------------------------------------
/**
* Method that creates the jar file
*
* @param javadocFiles the directory where the generated jar file will be put
* @param jarFileName the filename of the generated jar file
* @return a File object that contains the generated jar file
* @throws ArchiverException {@link ArchiverException}
* @throws IOException {@link IOException}
*/
private File generateArchive(File javadocFiles, String jarFileName) throws ArchiverException, IOException {
File javadocJar = new File(jarOutputDirectory, jarFileName);
if (javadocJar.exists()) {
javadocJar.delete();
}
MavenArchiver archiver = new MavenArchiver();
archiver.setArchiver(jarArchiver);
archiver.setOutputFile(javadocJar);
File contentDirectory = javadocFiles;
if (!contentDirectory.exists()) {
getLog().warn("JAR will be empty - no content was marked for inclusion!");
} else {
archiver.getArchiver().addDirectory(contentDirectory, DEFAULT_INCLUDES, DEFAULT_EXCLUDES);
}
List<Resource> resources = project.getBuild().getResources();
for (Resource r : resources) {
if (r.getDirectory().endsWith("maven-shared-archive-resources")) {
archiver.getArchiver().addDirectory(new File(r.getDirectory()));
}
}
if (useDefaultManifestFile && defaultManifestFile.exists() && archive.getManifestFile() == null) {
getLog().info("Adding existing MANIFEST to archive. Found under: " + defaultManifestFile.getPath());
archive.setManifestFile(defaultManifestFile);
}
try {
archiver.createArchive(session, project, archive);
} catch (ManifestException e) {
throw new ArchiverException("ManifestException: " + e.getMessage(), e);
} catch (DependencyResolutionRequiredException e) {
throw new ArchiverException("DependencyResolutionRequiredException: " + e.getMessage(), e);
}
return javadocJar;
}
use of org.codehaus.plexus.archiver.ArchiverException in project sofa-ark by alipay.
the class ArkPluginMojo method execute.
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
Archiver archiver;
try {
archiver = getArchiver();
} catch (NoSuchArchiverException e) {
throw new MojoExecutionException(e.getMessage());
}
if (!outputDirectory.exists()) {
outputDirectory.mkdirs();
}
String fileName = getFileName();
File destination = new File(outputDirectory, fileName);
if (destination.exists()) {
destination.delete();
}
archiver.setDestFile(destination);
Set<Artifact> artifacts = project.getArtifacts();
artifacts = filterExcludeArtifacts(artifacts);
Set<Artifact> conflictArtifacts = filterConflictArtifacts(artifacts);
addArkPluginArtifact(archiver, artifacts, conflictArtifacts);
addArkPluginConfig(archiver);
try {
archiver.createArchive();
} catch (ArchiverException e) {
throw new MojoExecutionException(e.getMessage());
} catch (IOException e) {
throw new MojoExecutionException(e.getMessage());
}
if (isAttach()) {
projectHelper.attachArtifact(project, destination, getClassifier());
}
}
Aggregations