use of io.cdap.cdap.common.InvalidArtifactException in project cdap by caskdata.
the class DefaultArtifactRepository method validateParentSet.
/**
* Validates the parents of an artifact. Checks that each artifact only appears with a single version range.
*
* @param parents the set of parent ranges to validate
* @throws InvalidArtifactException if there is more than one version range for an artifact
*/
@VisibleForTesting
static void validateParentSet(Id.Artifact artifactId, Set<ArtifactRange> parents) throws InvalidArtifactException {
boolean isInvalid = false;
StringBuilder errMsg = new StringBuilder("Invalid parents field.");
// check for multiple version ranges for the same artifact.
// ex: "parents": [ "etlbatch[1.0.0,2.0.0)", "etlbatch[3.0.0,4.0.0)" ]
Set<String> parentNames = new HashSet<>();
// keep track of dupes so that we don't have repeat error messages if there are more than 2 ranges for a name
Set<String> dupes = new HashSet<>();
for (ArtifactRange parent : parents) {
String parentName = parent.getName();
if (!parentNames.add(parentName) && !dupes.contains(parentName)) {
errMsg.append(" Only one version range for parent '");
errMsg.append(parentName);
errMsg.append("' can be present.");
dupes.add(parentName);
isInvalid = true;
}
if (artifactId.getName().equals(parentName) && artifactId.getNamespace().toEntityId().getNamespace().equals(parent.getNamespace())) {
throw new InvalidArtifactException(String.format("Invalid parent '%s' for artifact '%s'. An artifact cannot extend itself.", parent, artifactId));
}
}
// "Invalid parents. Only one version range for parent 'etlbatch' can be present."
if (isInvalid) {
throw new InvalidArtifactException(errMsg.toString());
}
}
use of io.cdap.cdap.common.InvalidArtifactException in project cdap by caskdata.
the class DefaultArtifactRepository method getParentArtifactDescriptors.
/**
* Get {@link ArtifactDescriptor} of parent and grandparent (if any) artifacts for the given artifact.
*
* @param artifactId the id of the artifact for which to find its parent and grandparent {@link ArtifactDescriptor}
* @param parentArtifacts the ranges of parents to find
* @return {@link ArtifactDescriptor} of parent and grandparent (if any) artifacts, in that specific order
* @throws ArtifactRangeNotFoundException if none of the parents could be found
* @throws InvalidArtifactException if one of the parents also has parents
*/
private List<ArtifactDescriptor> getParentArtifactDescriptors(Id.Artifact artifactId, Set<ArtifactRange> parentArtifacts) throws ArtifactRangeNotFoundException, InvalidArtifactException {
List<ArtifactDetail> parents = new ArrayList<>();
for (ArtifactRange parentRange : parentArtifacts) {
parents.addAll(artifactStore.getArtifacts(parentRange, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED));
}
if (parents.isEmpty()) {
throw new ArtifactRangeNotFoundException(String.format("Artifact %s extends artifacts '%s' that do not exist", artifactId, Joiner.on('/').join(parentArtifacts)));
}
ArtifactDescriptor parentArtifact = null;
ArtifactDescriptor grandparentArtifact = null;
// complicated dependency trees that are hard to manage.
for (ArtifactDetail parent : parents) {
Set<ArtifactRange> grandparentRanges = parent.getMeta().getUsableBy();
for (ArtifactRange grandparentRange : grandparentRanges) {
// if the parent as the child as a parent (cyclic dependency)
if (grandparentRange.getNamespace().equals(artifactId.getNamespace().getId()) && grandparentRange.getName().equals(artifactId.getName()) && grandparentRange.versionIsInRange(artifactId.getVersion())) {
throw new InvalidArtifactException(String.format("Invalid artifact '%s': cyclic dependency. Parent '%s' has artifact '%s' as a parent.", artifactId, parent.getDescriptor().getArtifactId(), artifactId));
}
List<ArtifactDetail> grandparents = artifactStore.getArtifacts(grandparentRange, Integer.MAX_VALUE, ArtifactSortOrder.UNORDERED);
// check that no grandparent has parents
for (ArtifactDetail grandparent : grandparents) {
Set<ArtifactRange> greatGrandparents = grandparent.getMeta().getUsableBy();
if (!greatGrandparents.isEmpty()) {
throw new InvalidArtifactException(String.format("Invalid artifact '%s'. Grandparents of artifacts cannot have parents. Grandparent '%s' has parents.", artifactId, grandparent.getDescriptor().getArtifactId()));
}
// assumes any grandparent will do
if (parentArtifact == null && grandparentArtifact == null) {
grandparentArtifact = grandparent.getDescriptor();
}
}
}
// assumes any parent will do
if (parentArtifact == null) {
parentArtifact = parent.getDescriptor();
}
}
List<ArtifactDescriptor> parentArtifactList = new ArrayList<>();
parentArtifactList.add(parentArtifact);
if (grandparentArtifact != null) {
parentArtifactList.add(grandparentArtifact);
}
return parentArtifactList;
}
use of io.cdap.cdap.common.InvalidArtifactException in project cdap by caskdata.
the class DefaultArtifactRepository method addSystemArtifacts.
@Override
public void addSystemArtifacts() throws Exception {
// scan the directory for artifact .jar files and config files for those artifacts
Map<Id.Artifact, SystemArtifactInfo> systemArtifacts = new HashMap<>();
for (File systemArtifactDir : systemArtifactDirs) {
for (File jarFile : DirUtils.listFiles(systemArtifactDir, "jar")) {
// parse id from filename
Id.Artifact artifactId;
try {
artifactId = Id.Artifact.parse(Id.Namespace.SYSTEM, jarFile.getName());
} catch (IllegalArgumentException e) {
LOG.warn(String.format("Skipping system artifact '%s' because the name is invalid: ", e.getMessage()));
continue;
}
// check for a corresponding .json config file
String artifactFileName = jarFile.getName();
String configFileName = artifactFileName.substring(0, artifactFileName.length() - ".jar".length()) + ".json";
File configFile = new File(systemArtifactDir, configFileName);
try {
// read and parse the config file if it exists. Otherwise use an empty config with the artifact filename
ArtifactConfig artifactConfig = configFile.isFile() ? configReader.read(artifactId.getNamespace(), configFile) : new ArtifactConfig();
validateParentSet(artifactId, artifactConfig.getParents());
validatePluginSet(artifactConfig.getPlugins());
systemArtifacts.put(artifactId, new SystemArtifactInfo(artifactId, jarFile, artifactConfig));
} catch (InvalidArtifactException e) {
LOG.warn(String.format("Could not add system artifact '%s' because it is invalid.", artifactFileName), e);
}
}
}
// child -> parents
Multimap<Id.Artifact, Id.Artifact> childToParents = HashMultimap.create();
// parent -> children
Multimap<Id.Artifact, Id.Artifact> parentToChildren = HashMultimap.create();
Set<Id.Artifact> remainingArtifacts = new HashSet<>();
// build mapping from child to parents and from parents to children
for (SystemArtifactInfo child : systemArtifacts.values()) {
Id.Artifact childId = child.getArtifactId();
remainingArtifacts.add(childId);
for (SystemArtifactInfo potentialParent : systemArtifacts.values()) {
Id.Artifact potentialParentId = potentialParent.getArtifactId();
// skip if we're looking at ourselves
if (childId.equals(potentialParentId)) {
continue;
}
if (child.getConfig().hasParent(potentialParentId)) {
childToParents.put(childId, potentialParentId);
parentToChildren.put(potentialParentId, childId);
}
}
}
if (!remainingArtifacts.isEmpty()) {
ExecutorService executorService = Executors.newFixedThreadPool(Math.min(maxArtifactLoadParallelism, remainingArtifacts.size()), Threads.createDaemonThreadFactory("system-artifact-loader-%d"));
try {
// loop until there is no change
boolean artifactsAdded = true;
while (!remainingArtifacts.isEmpty() && artifactsAdded) {
artifactsAdded = loadSystemArtifacts(executorService, systemArtifacts, remainingArtifacts, parentToChildren, childToParents);
}
} finally {
executorService.shutdownNow();
}
if (!remainingArtifacts.isEmpty()) {
LOG.warn("Unable to add system artifacts {} due to cyclic dependencies", Joiner.on(",").join(remainingArtifacts));
}
}
}
use of io.cdap.cdap.common.InvalidArtifactException in project cdap by caskdata.
the class InMemoryConfigurator method createResponse.
private <T extends Config> ConfigResponse createResponse(Application<T> app, ClassLoader artifactClassLoader) throws Exception {
// This Gson cannot be static since it is used to deserialize user class.
// Gson will keep a static map to class, hence will leak the classloader
Gson gson = new GsonBuilder().registerTypeAdapterFactory(new CaseInsensitiveEnumTypeAdapterFactory()).create();
// Now, we call configure, which returns application specification.
DefaultAppConfigurer configurer;
File tempDir = DirUtils.createTempDir(baseUnpackDir);
try (PluginInstantiator pluginInstantiator = new PluginInstantiator(cConf, app.getClass().getClassLoader(), tempDir)) {
RuntimeConfigurer runtimeConfigurer = runtimeInfo != null ? new DefaultAppRuntimeConfigurer(appNamespace.getId(), remoteClientFactory, runtimeInfo.getUserArguments(), runtimeInfo.getExistingAppSpec()) : null;
configurer = new DefaultAppConfigurer(appNamespace, artifactId, app, configString, pluginFinder, pluginInstantiator, runtimeConfigurer, runtimeInfo, featureFlagsProvider);
T appConfig;
Type configType = Artifacts.getConfigType(app.getClass());
if (configString.isEmpty()) {
// noinspection unchecked
appConfig = ((Class<T>) configType).newInstance();
} else {
try {
appConfig = gson.fromJson(configString, configType);
} catch (JsonSyntaxException e) {
throw new IllegalArgumentException("Invalid JSON configuration was provided. Please check the syntax.", e);
}
}
try {
ClassLoader oldClassLoader = ClassLoaders.setContextClassLoader(new CombineClassLoader(null, app.getClass().getClassLoader(), getClass().getClassLoader()));
try {
app.configure(configurer, new DefaultApplicationContext<>(appConfig));
} finally {
ClassLoaders.setContextClassLoader(oldClassLoader);
}
} catch (Throwable t) {
Throwable rootCause = Throwables.getRootCause(t);
if (rootCause instanceof ClassNotFoundException) {
// Heuristic to provide better error message
String missingClass = rootCause.getMessage();
// If the missing class has "spark" in the name, try to see if Spark is available
if (missingClass.startsWith("org.apache.spark.") || missingClass.startsWith("io.cdap.cdap.api.spark.")) {
// Try to load the SparkContext class, which should be available if Spark is available in the platform
try {
artifactClassLoader.loadClass("org.apache.spark.SparkContext");
} catch (ClassNotFoundException e) {
// Spark is not available, it is most likely caused by missing Spark in the platform
throw new IllegalStateException("Missing Spark related class " + missingClass + ". It may be caused by unavailability of Spark. " + "Please verify environment variable " + Constants.SPARK_HOME + " is set correctly", t);
}
// Spark is available, can be caused by incompatible Spark version
throw new InvalidArtifactException("Missing Spark related class " + missingClass + ". Configured to use Spark located at " + System.getenv(Constants.SPARK_HOME) + ", which may be incompatible with the one required by the application", t);
}
// then the missing class is most likely due to some missing library in the artifact jar
throw new InvalidArtifactException("Missing class " + missingClass + ". It may be caused by missing dependency jar(s) in the artifact jar.", t);
}
throw t;
}
} finally {
try {
DirUtils.deleteDirectoryContents(tempDir);
} catch (IOException e) {
LOG.warn("Exception raised when deleting directory {}", tempDir, e);
}
}
ApplicationSpecification specification = configurer.createSpecification(applicationName, applicationVersion);
AppSpecInfo appSpecInfo = new AppSpecInfo(specification, configurer.getSystemTables(), configurer.getMetadata());
return new DefaultConfigResponse(0, appSpecInfo);
}
use of io.cdap.cdap.common.InvalidArtifactException in project cdap by caskdata.
the class AppLifecycleHttpHandler method upgradeApplications.
/**
* Upgrades a lis of existing application to use latest version of application artifact and plugin artifacts.
*
* <pre>
* {@code
* [
* {"name":"XYZ"},
* {"name":"ABC"},
* {"name":"FOO"},
* ]
* }
* </pre>
* The response will be an array of {@link ApplicationUpdateDetail} object, which either indicates a success (200) or
* failure for each of the requested application in the same order as the request. The failure also indicates reason
* for the error. The response will be sent via ChunkResponder to continuously stream upgrade result per application.
*/
@POST
@Path("/upgrade")
@AuditPolicy(AuditDetail.REQUEST_BODY)
public void upgradeApplications(FullHttpRequest request, HttpResponder responder, @PathParam("namespace-id") String namespaceId, @QueryParam("artifactScope") Set<String> artifactScopes, @QueryParam("allowSnapshot") boolean allowSnapshot) throws Exception {
// TODO: (CDAP-16910) Improve batch API performance as each application upgrade is an event independent of each
// other.
List<ApplicationId> appIds = decodeAndValidateBatchApplicationRecord(validateNamespace(namespaceId), request);
Set<ArtifactScope> allowedArtifactScopes = getArtifactScopes(artifactScopes);
try (ChunkResponder chunkResponder = responder.sendChunkStart(HttpResponseStatus.OK)) {
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
try (JsonWriter jsonWriter = new JsonWriter(new OutputStreamWriter(outputStream, StandardCharsets.UTF_8))) {
jsonWriter.beginArray();
for (ApplicationId appId : appIds) {
ApplicationUpdateDetail updateDetail;
try {
applicationLifecycleService.upgradeApplication(appId, allowedArtifactScopes, allowSnapshot);
updateDetail = new ApplicationUpdateDetail(appId);
} catch (UnsupportedOperationException e) {
String errorMessage = String.format("Application %s does not support upgrade.", appId);
updateDetail = new ApplicationUpdateDetail(appId, new NotImplementedException(errorMessage));
} catch (InvalidArtifactException | NotFoundException e) {
updateDetail = new ApplicationUpdateDetail(appId, e);
} catch (Exception e) {
updateDetail = new ApplicationUpdateDetail(appId, new ServiceException("Upgrade failed due to internal error.", e, HttpResponseStatus.INTERNAL_SERVER_ERROR));
LOG.error("Application upgrade failed with exception", e);
}
GSON.toJson(updateDetail, ApplicationUpdateDetail.class, jsonWriter);
jsonWriter.flush();
chunkResponder.sendChunk(Unpooled.wrappedBuffer(outputStream.toByteArray()));
outputStream.reset();
chunkResponder.flush();
}
jsonWriter.endArray();
}
chunkResponder.sendChunk(Unpooled.wrappedBuffer(outputStream.toByteArray()));
}
}
Aggregations