use of co.cask.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class ArtifactRepository method addSystemArtifacts.
/**
* Scan all files in the local system artifact directory, looking for jar files and adding them as system artifacts.
* If the artifact already exists it will not be added again unless it is a snapshot version.
*
* @throws IOException if there was some IO error adding the system artifacts
*/
public void addSystemArtifacts() throws Exception {
// to add system artifacts, users should have write privileges on the system namespace
Principal principal = authenticationContext.getPrincipal();
authorizationEnforcer.enforce(NamespaceId.SYSTEM, principal, Action.WRITE);
// scan the directory for artifact .jar files and config files for those artifacts
List<SystemArtifactInfo> systemArtifacts = new ArrayList<>();
for (File systemArtifactDir : systemArtifactDirs) {
for (File jarFile : DirUtils.listFiles(systemArtifactDir, "jar")) {
// parse id from filename
Id.Artifact artifactId;
try {
artifactId = Id.Artifact.parse(Id.Namespace.SYSTEM, jarFile.getName());
} catch (IllegalArgumentException e) {
LOG.warn(String.format("Skipping system artifact '%s' because the name is invalid: ", e.getMessage()));
continue;
}
// first revoke any orphane privileges
co.cask.cdap.proto.id.ArtifactId artifact = artifactId.toEntityId();
privilegesManager.revoke(artifact);
// then grant all on the artifact
privilegesManager.grant(artifact, principal, EnumSet.allOf(Action.class));
// check for a corresponding .json config file
String artifactFileName = jarFile.getName();
String configFileName = artifactFileName.substring(0, artifactFileName.length() - ".jar".length()) + ".json";
File configFile = new File(systemArtifactDir, configFileName);
try {
// read and parse the config file if it exists. Otherwise use an empty config with the artifact filename
ArtifactConfig artifactConfig = configFile.isFile() ? configReader.read(artifactId.getNamespace(), configFile) : new ArtifactConfig();
validateParentSet(artifactId, artifactConfig.getParents());
validatePluginSet(artifactConfig.getPlugins());
systemArtifacts.add(new SystemArtifactInfo(artifactId, jarFile, artifactConfig));
} catch (InvalidArtifactException e) {
LOG.warn(String.format("Could not add system artifact '%s' because it is invalid.", artifactFileName), e);
// since adding artifact failed, revoke privileges, since they may be orphane now
privilegesManager.revoke(artifact);
}
}
}
// taking advantage of the fact that we only have 1 level of dependencies
// so we can add all the parents first, then we know its safe to add everything else
// add all parents
Set<Id.Artifact> parents = new HashSet<>();
for (SystemArtifactInfo child : systemArtifacts) {
Id.Artifact childId = child.getArtifactId();
for (SystemArtifactInfo potentialParent : systemArtifacts) {
Id.Artifact potentialParentId = potentialParent.getArtifactId();
// skip if we're looking at ourselves
if (childId.equals(potentialParentId)) {
continue;
}
if (child.getConfig().hasParent(potentialParentId)) {
parents.add(potentialParentId);
}
}
}
// add all parents first
for (SystemArtifactInfo systemArtifact : systemArtifacts) {
if (parents.contains(systemArtifact.getArtifactId())) {
addSystemArtifact(systemArtifact);
}
}
// add children next
for (SystemArtifactInfo systemArtifact : systemArtifacts) {
if (!parents.contains(systemArtifact.getArtifactId())) {
addSystemArtifact(systemArtifact);
}
}
}
use of co.cask.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class ArtifactRepository method getArtifactDetails.
/**
* Get all artifact details that match artifacts in the given ranges.
*
* @param range the range to match artifacts in
* @param limit the limit number of the result
* @param order the order of the result
* @return an unmodifiable list of all artifacts that match the given ranges. If none exist, an empty list is returned
*/
public List<ArtifactDetail> getArtifactDetails(final ArtifactRange range, int limit, ArtifactSortOrder order) throws Exception {
List<ArtifactDetail> artifacts = artifactStore.getArtifacts(range, limit, order);
// No authorization for system artifacts
if (NamespaceId.SYSTEM.getNamespace().equals(range.getNamespace())) {
return artifacts;
}
Principal principal = authenticationContext.getPrincipal();
final Predicate<EntityId> filter = authorizationEnforcer.createFilter(principal);
return Lists.newArrayList(Iterables.filter(artifacts, new com.google.common.base.Predicate<ArtifactDetail>() {
@Override
public boolean apply(ArtifactDetail artifactDetail) {
ArtifactId artifactId = artifactDetail.getDescriptor().getArtifactId();
return filter.apply(new NamespaceId(range.getNamespace()).artifact(artifactId.getName(), artifactId.getVersion().getVersion()));
}
}));
}
use of co.cask.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class ArtifactSelector method select.
@Override
public Map.Entry<ArtifactId, PluginClass> select(SortedMap<ArtifactId, PluginClass> plugins) {
NavigableMap<ArtifactId, PluginClass> pluginMap;
if (plugins instanceof NavigableMap) {
pluginMap = (NavigableMap<ArtifactId, PluginClass>) plugins;
} else {
pluginMap = new TreeMap<>();
pluginMap.putAll(plugins);
}
for (Map.Entry<ArtifactId, PluginClass> entry : pluginMap.descendingMap().entrySet()) {
ArtifactId artifactId = entry.getKey();
if ((scope == null || artifactId.getScope().equals(scope)) && (name == null || artifactId.getName().equals(name)) && (range == null || range.versionIsInRange(artifactId.getVersion()))) {
return entry;
}
}
throw new IllegalArgumentException(errMsg);
}
use of co.cask.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class PluginService method getPluginEndpoint.
private PluginEndpoint getPluginEndpoint(NamespaceId namespace, ArtifactDetail artifactDetail, String pluginType, String pluginName, ArtifactDescriptor parentArtifactDescriptor, Set<ArtifactRange> parentArtifactRanges, String methodName) throws NotFoundException, IOException, ClassNotFoundException {
Id.Artifact artifactId = Id.Artifact.from(namespace.toId(), artifactDetail.getDescriptor().getArtifactId());
Set<PluginClass> pluginClasses = artifactDetail.getMeta().getClasses().getPlugins();
PluginClass pluginClass = null;
for (PluginClass plugin : pluginClasses) {
if (plugin.getName().equals(pluginName) && plugin.getType().equals(pluginType)) {
// plugin type and name matched, next check for endpoint method presence
if (plugin.getEndpoints() == null || !plugin.getEndpoints().contains(methodName)) {
throw new NotFoundException(String.format("Plugin with type: %s name: %s found, " + "but Endpoint %s was not found", pluginType, pluginName, methodName));
}
pluginClass = plugin;
}
}
if (pluginClass == null) {
throw new NotFoundException(String.format("No Plugin with type : %s, name: %s was found", pluginType, pluginName));
}
// initialize parent classloader and plugin instantiator
Instantiators instantiators = this.instantiators.getUnchecked(parentArtifactDescriptor);
PluginInstantiator pluginInstantiator = instantiators.getPluginInstantiator(artifactDetail, artifactId.toArtifactId());
// we pass the parent artifact to endpoint plugin context,
// as plugin method will use this context to load other plugins.
DefaultEndpointPluginContext defaultEndpointPluginContext = new DefaultEndpointPluginContext(namespace, artifactRepository, pluginInstantiator, parentArtifactRanges);
return getPluginEndpoint(pluginInstantiator, artifactId, pluginClass, methodName, defaultEndpointPluginContext);
}
use of co.cask.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class MapReduceContextConfigTest method testManyMacrosInAppSpec.
@Test
public void testManyMacrosInAppSpec() {
Configuration hConf = new Configuration();
MapReduceContextConfig cfg = new MapReduceContextConfig(hConf);
StringBuilder appCfg = new StringBuilder();
for (int i = 0; i < 100; i++) {
appCfg.append("${").append(i).append("}");
hConf.setInt(String.valueOf(i), i);
}
ApplicationSpecification appSpec = new DefaultApplicationSpecification("name", "desc", appCfg.toString(), new ArtifactId("artifact", new ArtifactVersion("1.0.0"), ArtifactScope.USER), Collections.<String, StreamSpecification>emptyMap(), Collections.<String, String>emptyMap(), Collections.<String, DatasetCreationSpec>emptyMap(), Collections.<String, FlowSpecification>emptyMap(), Collections.<String, MapReduceSpecification>emptyMap(), Collections.<String, SparkSpecification>emptyMap(), Collections.<String, WorkflowSpecification>emptyMap(), Collections.<String, ServiceSpecification>emptyMap(), Collections.<String, ScheduleSpecification>emptyMap(), Collections.<String, ScheduleCreationSpec>emptyMap(), Collections.<String, WorkerSpecification>emptyMap(), Collections.<String, Plugin>emptyMap());
cfg.setApplicationSpecification(appSpec);
Assert.assertEquals(appSpec.getConfiguration(), cfg.getApplicationSpecification().getConfiguration());
}
Aggregations