use of org.glassfish.api.deployment.archive.ReadableArchive in project Payara by payara.
the class ApplicationLifecycle method getDeployableParser.
public Parser getDeployableParser(ReadableArchive source, boolean skipScanExternalLibProp, boolean modelUnAnnotatedMembers, StructuredDeploymentTracing tracing, Logger logger) throws IOException {
try {
ResourceLocator locator = determineLocator();
// scan the jar and store the result in the deployment context.
ParsingContext.Builder parsingContextBuilder = new ParsingContext.Builder().logger(logger).executorService(executorService.getUnderlyingExecutorService()).config(new ParsingConfig() {
@Override
public Set<String> getAnnotationsOfInterest() {
return Collections.emptySet();
}
@Override
public Set<String> getTypesOfInterest() {
return Collections.emptySet();
}
@Override
public boolean modelUnAnnotatedMembers() {
return modelUnAnnotatedMembers;
}
});
// workaround bug in Builder
parsingContextBuilder.locator(locator);
ParsingContext parsingContext = parsingContextBuilder.build();
Parser parser = new Parser(parsingContext);
ReadableArchiveScannerAdapter scannerAdapter = new ReadableArchiveScannerAdapter(parser, source);
DeploymentSpan mainScanSpan = tracing.startSpan(DeploymentTracing.AppStage.CLASS_SCANNING, source.getName());
parser.parse(scannerAdapter, () -> mainScanSpan.close());
for (ReadableArchive externalLibArchive : getExternalLibraries(source, skipScanExternalLibProp)) {
ReadableArchiveScannerAdapter libAdapter = null;
try {
DeploymentSpan span = tracing.startSpan(DeploymentTracing.AppStage.CLASS_SCANNING, externalLibArchive.getName());
libAdapter = new ReadableArchiveScannerAdapter(parser, externalLibArchive);
parser.parse(libAdapter, () -> span.close());
} finally {
if (libAdapter != null) {
libAdapter.close();
}
}
}
parser.awaitTermination();
scannerAdapter.close();
return parser;
} catch (InterruptedException e) {
throw new IOException(e);
}
}
use of org.glassfish.api.deployment.archive.ReadableArchive in project Payara by payara.
the class ApplicationLifecycle method getContext.
private ExtendedDeploymentContext getContext(ExtendedDeploymentContext initial, DeploymentContextBuilder builder) throws IOException {
DeploymentContextBuilder copy = new DeploymentContextBuidlerImpl(builder);
ReadableArchive archive = getArchive(copy);
copy.source(archive);
if (initial == null) {
initial = new DeploymentContextImpl(copy, env);
}
ArchiveHandler archiveHandler = copy.archiveHandler();
if (archiveHandler == null) {
String type = null;
OpsParams params = builder.params();
if (params != null) {
if (params instanceof DeployCommandParameters) {
type = ((DeployCommandParameters) params).type;
} else if (params instanceof UndeployCommandParameters) {
type = ((UndeployCommandParameters) params)._type;
}
}
archiveHandler = getArchiveHandler(archive, type);
}
// this is needed for autoundeploy to find the application
// with the archive name
File sourceFile = new File(archive.getURI().getSchemeSpecificPart());
initial.getAppProps().put(ServerTags.DEFAULT_APP_NAME, DeploymentUtils.getDefaultEEName(sourceFile.getName()));
if (!(sourceFile.isDirectory())) {
String repositoryBitName = copy.params().name();
try {
repositoryBitName = VersioningUtils.getRepositoryName(repositoryBitName);
} catch (VersioningSyntaxException e) {
ActionReport report = copy.report();
report.setMessage(e.getMessage());
report.setActionExitCode(ActionReport.ExitCode.FAILURE);
}
// create a temporary deployment context
File expansionDir = new File(domain.getApplicationRoot(), repositoryBitName);
if (!expansionDir.mkdirs()) {
/*
* On Windows especially a previous directory might have
* remainded after an earlier undeployment, for example if
* a JAR file in the earlier deployment had been locked.
* Warn but do not fail in such a case.
*/
logger.fine(localStrings.getLocalString("deploy.cannotcreateexpansiondir", "Error while creating directory for jar expansion: {0}", expansionDir));
}
try {
Long start = System.currentTimeMillis();
final WritableArchive expandedArchive = archiveFactory.createArchive(expansionDir);
archiveHandler.expand(archive, expandedArchive, initial);
if (logger.isLoggable(Level.FINE)) {
logger.log(FINE, "Deployment expansion took {0}", System.currentTimeMillis() - start);
}
// Close the JAR archive before losing the reference to it or else the JAR remains locked.
try {
archive.close();
} catch (IOException e) {
logger.log(SEVERE, KernelLoggerInfo.errorClosingArtifact, new Object[] { archive.getURI().getSchemeSpecificPart(), e });
throw e;
}
archive = (FileArchive) expandedArchive;
initial.setSource(archive);
} catch (IOException e) {
logger.log(SEVERE, KernelLoggerInfo.errorExpandingFile, e);
throw e;
}
}
initial.setArchiveHandler(archiveHandler);
return initial;
}
use of org.glassfish.api.deployment.archive.ReadableArchive in project Payara by payara.
the class ClassLoaderHierarchyImpl method createApplicationParentCL.
/**
* Sets up the parent class loader for the application class loader.
* Application class loader are under the control of the ArchiveHandler since
* a special archive file format will require a specific class loader.
*
* However GlassFish needs to be able to add capabilities to the application
* like adding APIs accessibility, this is done through its parent class loader
* which we create and maintain.
*
* @param parent the parent class loader
* @param context deployment context
* @return class loader capable of loading public APIs identified by the deployers
* @throws ResolveError if one of the deployer's public API module is not found.
*/
@Override
public ClassLoader createApplicationParentCL(ClassLoader parent, DeploymentContext context) throws ResolveError {
final ReadableArchive source = context.getSource();
List<ModuleDefinition> defs = new ArrayList<ModuleDefinition>();
// now let's see if the application is requesting any module imports
Manifest m = null;
try {
m = source.getManifest();
} catch (IOException e) {
logger.log(Level.SEVERE, "Cannot load application's manifest file :", e.getMessage());
if (logger.isLoggable(Level.FINE)) {
logger.log(Level.FINE, e.getMessage(), e);
}
}
if (m != null) {
String importedBundles = m.getMainAttributes().getValue(ManifestConstants.BUNDLE_IMPORT_NAME);
if (importedBundles != null) {
for (String token : new Tokenizer(importedBundles, ",")) {
Collection<HK2Module> modules = modulesRegistry.getModules(token);
if (modules.size() == 1) {
defs.add(modules.iterator().next().getModuleDefinition());
} else {
throw new ResolveError("Not able to locate a unique module by name " + token);
}
}
}
// Applications can add an additional osgi repos...
String additionalRepo = m.getMainAttributes().getValue(org.glassfish.api.ManifestConstants.GLASSFISH_REQUIRE_REPOSITORY);
if (additionalRepo != null) {
for (String token : new Tokenizer(additionalRepo, ",")) {
// Each entry should be name=path
int equals = token.indexOf('=');
if (equals == -1) {
// Missing '='...
throw new IllegalArgumentException("\"" + org.glassfish.api.ManifestConstants.GLASSFISH_REQUIRE_REPOSITORY + ": " + additionalRepo + "\" is missing an '='. " + "It must be in the format: name=path[,name=path]...");
}
String name = token.substring(0, equals);
String path = token.substring(++equals);
addRepository(name, resolver.translate(path));
}
}
// Applications can also request to be wired to implementors of certain services.
// That means that any module implementing the requested service will be accessible
// by the parent class loader of the application.
String requestedWiring = m.getMainAttributes().getValue(org.glassfish.api.ManifestConstants.GLASSFISH_REQUIRE_SERVICES);
if (requestedWiring != null) {
for (String token : new Tokenizer(requestedWiring, ",")) {
for (Object impl : habitat.getAllServices(BuilderHelper.createContractFilter(token))) {
HK2Module wiredBundle = modulesRegistry.find(impl.getClass());
if (wiredBundle != null) {
defs.add(wiredBundle.getModuleDefinition());
}
}
}
}
}
if (defs.isEmpty()) {
return parent;
} else {
return modulesRegistry.getModulesClassLoader(parent, defs);
}
}
use of org.glassfish.api.deployment.archive.ReadableArchive in project Payara by payara.
the class ApplicationLoaderService method processApplication.
public List<Deployment.ApplicationDeployment> processApplication(Application app, ApplicationRef appRef) {
long operationStartTime = Calendar.getInstance().getTimeInMillis();
initializeRuntimeDependencies();
String source = app.getLocation();
final String appName = app.getName();
// lifecycle modules are loaded separately
if (Boolean.valueOf(app.getDeployProperties().getProperty(ServerTags.IS_LIFECYCLE))) {
return Collections.emptyList();
}
URI uri;
try {
uri = new URI(source);
} catch (URISyntaxException e) {
logger.log(Level.SEVERE, KernelLoggerInfo.cantDetermineLocation, e.getLocalizedMessage());
return Collections.emptyList();
}
List<Deployment.ApplicationDeployment> appDeployments = new ArrayList<>();
File sourceFile = new File(uri);
if (sourceFile.exists()) {
try {
ReadableArchive archive = null;
try {
StructuredDeploymentTracing structuredTracing = deploymentTracingEnabled != null ? StructuredDeploymentTracing.create(app.getName()) : StructuredDeploymentTracing.createDisabled(app.getName());
DeploymentTracing tracing = null;
DeployCommandParameters deploymentParams = app.getDeployParameters(appRef);
deploymentParams.target = server.getName();
deploymentParams.origin = DeployCommandParameters.Origin.load;
deploymentParams.command = DeployCommandParameters.Command.startup_server;
if (domain.isAppReferencedByPaaSTarget(appName)) {
if (server.isDas()) {
// for loading PaaS application on DAS
// we set it to the real PaaS target
deploymentParams.target = deployment.getDefaultTarget(appName, deploymentParams.origin, deploymentParams._classicstyle);
}
}
archive = archiveFactoryProvider.get().openArchive(sourceFile, deploymentParams);
ActionReport report = new HTMLActionReporter();
ExtendedDeploymentContext depContext = deployment.getBuilder(logger, deploymentParams, report).source(archive).build();
tracing = structuredTracing.register(depContext);
depContext.getAppProps().putAll(app.getDeployProperties());
depContext.setModulePropsMap(app.getModulePropertiesMap());
new ApplicationConfigInfo(app).store(depContext.getAppProps());
appDeployments.add(deployment.prepare(deployment.getSniffersFromApp(app), depContext));
appDeployments.addAll(loadApplicationForTenants(app, appRef, report));
if (report.getActionExitCode().equals(ActionReport.ExitCode.SUCCESS)) {
if (tracing != null) {
tracing.print(System.out);
}
logger.log(Level.INFO, KernelLoggerInfo.loadingApplicationTime, new Object[] { appName, (Calendar.getInstance().getTimeInMillis() - operationStartTime) });
} else {
logger.log(Level.SEVERE, KernelLoggerInfo.deployFail, report.getMessage());
}
} finally {
if (archive != null) {
try {
archive.close();
} catch (IOException e) {
logger.log(Level.FINE, KernelLoggerInfo.deployException, e);
}
}
}
} catch (IOException e) {
logger.log(Level.SEVERE, KernelLoggerInfo.exceptionOpenArtifact, e);
}
} else {
logger.log(Level.SEVERE, KernelLoggerInfo.notFoundInOriginalLocation, source);
}
appDeployments.removeIf(t -> t == null);
return appDeployments;
}
use of org.glassfish.api.deployment.archive.ReadableArchive in project Payara by payara.
the class ReadableArchiveScannerAdapter method handleJar.
protected Future handleJar(final String name, final Logger logger) throws IOException {
// we need to check that there is no exploded directory by this name.
String explodedName = name.replaceAll("[/ ]", "__").replace(".jar", "_jar");
if (!archive.exists(explodedName)) {
final ReadableArchive subArchive = archive.getSubArchive(name);
if (subArchive == null) {
logger.log(Level.SEVERE, KernelLoggerInfo.cantOpenSubArchive, new Object[] { name, archive.getURI() });
return null;
}
if (logger.isLoggable(level)) {
logger.log(level, "Spawning sub parsing " + subArchive.getURI());
}
final ReadableArchiveScannerAdapter adapter = new InternalJarAdapter(this, subArchive, subArchive.getURI());
// we increment our release count, this tells us when we can safely close the parent
// archive.
releaseCount.incrementAndGet();
return parser.parse(adapter, new Runnable() {
@Override
public void run() {
try {
if (logger.isLoggable(level))
logger.log(level, "Closing sub archive " + subArchive.getURI());
adapter.close();
} catch (IOException e) {
logger.log(Level.SEVERE, KernelLoggerInfo.exceptionWhileClosing, new Object[] { name, e });
}
}
});
}
return null;
}
Aggregations