use of org.jboss.shrinkwrap.api.ArchivePath in project wildfly-swarm by wildfly-swarm.
the class OpenApiAnnotationScanner method indexArchive.
/**
* Indexes the given archive.
* @param config
* @param indexer
* @param archive
*/
@SuppressWarnings("unchecked")
private static void indexArchive(OpenApiConfig config, Indexer indexer, Archive archive) {
Map<ArchivePath, Node> c = archive.getContent();
try {
for (Map.Entry<ArchivePath, Node> each : c.entrySet()) {
ArchivePath archivePath = each.getKey();
if (archivePath.get().endsWith(OpenApiConstants.CLASS_SUFFIX) && acceptClassForScanning(config, archivePath.get())) {
try (InputStream contentStream = each.getValue().getAsset().openStream()) {
LOG.debugv("Indexing asset: {0} from archive: {1}", archivePath.get(), archive.getName());
indexer.index(contentStream);
}
continue;
}
if (archivePath.get().endsWith(OpenApiConstants.JAR_SUFFIX) && acceptJarForScanning(config, archivePath.get())) {
try (InputStream contentStream = each.getValue().getAsset().openStream()) {
JARArchive jarArchive = ShrinkWrap.create(JARArchive.class, archivePath.get()).as(ZipImporter.class).importFrom(contentStream).as(JARArchive.class);
indexArchive(config, indexer, jarArchive);
}
continue;
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
use of org.jboss.shrinkwrap.api.ArchivePath in project wildfly-swarm by wildfly-swarm.
the class WebInfLibFilteringArchive method filter.
protected void filter(ResolvedDependencies resolvedDependencies) {
Set<ArchivePath> remove = new HashSet<>();
filter(remove, getArchive().get(ArchivePaths.root()), resolvedDependencies);
for (ArchivePath each : remove) {
getArchive().delete(each);
}
}
use of org.jboss.shrinkwrap.api.ArchivePath in project wildfly-swarm by wildfly-swarm.
the class StaticContentContainer method mergeIgnoringDuplicates.
@SuppressWarnings("unchecked")
default T mergeIgnoringDuplicates(Archive<?> source, String base, Filter<ArchivePath> filter) {
if (!base.startsWith("/")) {
base = "/" + base;
}
// Get existing contents from source archive
final Map<ArchivePath, Node> sourceContent = source.getContent();
// Add each asset from the source archive
for (final Map.Entry<ArchivePath, Node> contentEntry : sourceContent.entrySet()) {
final Node node = contentEntry.getValue();
ArchivePath nodePath = contentEntry.getKey();
if (!nodePath.get().startsWith(base)) {
continue;
}
if (!filter.include(nodePath)) {
continue;
}
if (contains(nodePath)) {
continue;
}
nodePath = new BasicPath(nodePath.get().replaceFirst(base, ""));
// Delegate
if (node.getAsset() == null) {
addAsDirectory(nodePath);
} else {
add(node.getAsset(), nodePath);
}
}
return (T) this;
}
use of org.jboss.shrinkwrap.api.ArchivePath in project wildfly-swarm by wildfly-swarm.
the class JAXRSArchiveImpl method isJAXRS.
public static boolean isJAXRS(Archive<?> archive) {
Map<ArchivePath, Node> content = archive.getContent();
for (Map.Entry<ArchivePath, Node> entry : content.entrySet()) {
Node node = entry.getValue();
Asset asset = node.getAsset();
if (isJAXRS(node.getPath(), asset)) {
return true;
}
}
return false;
}
use of org.jboss.shrinkwrap.api.ArchivePath in project wildfly-swarm by wildfly-swarm.
the class RuntimeDeployer method deploy.
public void deploy(Archive<?> deployment, String asName) throws DeploymentException {
if (deployment.getName().endsWith(".rar")) {
// Track any .rar deployments
this.rarDeploymentNames.add(deployment.getName());
} else if (!this.rarDeploymentNames.isEmpty()) {
// Add any previous .rar deployments as dependencies
// of any non-.rar deployments.
JARArchive mutable = deployment.as(JARArchive.class);
this.rarDeploymentNames.forEach(e -> {
mutable.addModule("deployment." + e);
});
}
try (AutoCloseable deploymentTimer = Performance.time("deployment: " + deployment.getName())) {
// see DependenciesContainer#addAllDependencies()
if (deployment instanceof DependenciesContainer) {
DependenciesContainer<?> depContainer = (DependenciesContainer) deployment;
if (depContainer.hasMarker(DependenciesContainer.ALL_DEPENDENCIES_MARKER)) {
if (!depContainer.hasMarker(ALL_DEPENDENCIES_ADDED_MARKER)) {
ApplicationEnvironment appEnv = ApplicationEnvironment.get();
if (ApplicationEnvironment.Mode.UBERJAR == appEnv.getMode()) {
ArtifactLookup artifactLookup = ArtifactLookup.get();
for (String gav : appEnv.getDependencies()) {
depContainer.addAsLibrary(artifactLookup.artifact(gav));
}
} else {
Set<String> paths = appEnv.resolveDependencies(Collections.emptyList());
for (String path : paths) {
final File pathFile = new File(path);
if (path.endsWith(".jar")) {
depContainer.addAsLibrary(pathFile);
} else if (pathFile.isDirectory()) {
depContainer.merge(ShrinkWrap.create(GenericArchive.class).as(ExplodedImporter.class).importDirectory(pathFile).as(GenericArchive.class), "/WEB-INF/classes", Filters.includeAll());
}
}
}
depContainer.addMarker(ALL_DEPENDENCIES_ADDED_MARKER);
}
}
}
this.deploymentContext.activate(deployment, asName, !this.implicitDeploymentsComplete);
// 2. give fractions a chance to handle the deployment
for (DeploymentProcessor processor : this.deploymentProcessors) {
processor.process();
}
this.deploymentContext.deactivate();
if (DeployerMessages.MESSAGES.isDebugEnabled()) {
DeployerMessages.MESSAGES.deploying(deployment.getName());
Map<ArchivePath, Node> ctx = deployment.getContent();
for (Map.Entry<ArchivePath, Node> each : ctx.entrySet()) {
DeployerMessages.MESSAGES.deploymentContent(each.getKey().toString());
}
}
if (BootstrapProperties.flagIsSet(SwarmProperties.EXPORT_DEPLOYMENT)) {
String exportLocation = System.getProperty(SwarmProperties.EXPORT_DEPLOYMENT);
if (exportLocation != null) {
Path archivePath = null;
if (exportLocation.toLowerCase().equals("true")) {
archivePath = Paths.get(deployment.getName());
} else {
Path exportDir = Paths.get(exportLocation);
Files.createDirectories(exportDir);
archivePath = exportDir.resolve(deployment.getName());
}
final File out = archivePath.toFile();
DeployerMessages.MESSAGES.exportingDeployment(out.getAbsolutePath());
deployment.as(ZipExporter.class).exportTo(out, true);
}
}
byte[] hash = this.contentRepository.addContent(deployment);
final ModelNode deploymentAdd = new ModelNode();
deploymentAdd.get(OP).set(ADD);
deploymentAdd.get(OP_ADDR).set("deployment", deployment.getName());
deploymentAdd.get(RUNTIME_NAME).set(deployment.getName());
deploymentAdd.get(ENABLED).set(true);
deploymentAdd.get(PERSISTENT).set(true);
ModelNode content = deploymentAdd.get(CONTENT).add();
content.get(HASH).set(hash);
int deploymentTimeout = Integer.getInteger(SwarmProperties.DEPLOYMENT_TIMEOUT, 300);
final ModelNode opHeaders = new ModelNode();
opHeaders.get(BLOCKING_TIMEOUT).set(deploymentTimeout);
deploymentAdd.get(OPERATION_HEADERS).set(opHeaders);
BootstrapLogger.logger("org.wildfly.swarm.runtime.deployer").info("deploying " + deployment.getName());
System.setProperty(SwarmInternalProperties.CURRENT_DEPLOYMENT, deployment.getName());
try {
ModelNode result = client.execute(deploymentAdd);
ModelNode outcome = result.get("outcome");
if (outcome.asString().equals("success")) {
return;
}
ModelNode description = result.get("failure-description");
throw new DeploymentException(deployment, SwarmMessages.MESSAGES.deploymentFailed(description.asString()));
} catch (IOException e) {
throw SwarmMessages.MESSAGES.deploymentFailed(e, deployment);
}
} catch (Exception e) {
throw new DeploymentException(deployment, e);
}
}
Aggregations