use of io.cdap.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class DefaultStoreTest method testRunningInRangeSimple.
@Test
public void testRunningInRangeSimple() {
NamespaceId ns = new NamespaceId("d");
ProgramRunId run1 = ns.app("a1").program(ProgramType.SERVICE, "f1").run(RunIds.generate(20000).getId());
ProgramRunId run2 = ns.app("a2").program(ProgramType.MAPREDUCE, "f2").run(RunIds.generate(10000).getId());
ProgramRunId run3 = ns.app("a3").program(ProgramType.WORKER, "f3").run(RunIds.generate(40000).getId());
ProgramRunId run4 = ns.app("a4").program(ProgramType.SERVICE, "f4").run(RunIds.generate(70000).getId());
ProgramRunId run5 = ns.app("a5").program(ProgramType.SPARK, "f5").run(RunIds.generate(30000).getId());
ProgramRunId run6 = ns.app("a6").program(ProgramType.WORKFLOW, "f6").run(RunIds.generate(60000).getId());
ArtifactId artifactId = ns.artifact("testArtifact", "1.0").toApiArtifactId();
writeStartRecord(run1, artifactId);
writeStartRecord(run2, artifactId);
writeStartRecord(run3, artifactId);
writeStartRecord(run4, artifactId);
writeStartRecord(run5, artifactId);
writeStartRecord(run6, artifactId);
Assert.assertEquals(runsToTime(run1, run2), runIdsToTime(store.getRunningInRange(1, 30)));
Assert.assertEquals(runsToTime(run1, run2, run5, run3), runIdsToTime(store.getRunningInRange(30, 50)));
Assert.assertEquals(runsToTime(run1, run2, run3, run4, run5, run6), runIdsToTime(store.getRunningInRange(1, 71)));
Assert.assertEquals(runsToTime(run1, run2, run3, run4, run5, run6), runIdsToTime(store.getRunningInRange(50, 71)));
Assert.assertEquals(ImmutableSet.of(), runIdsToTime(store.getRunningInRange(1, 10)));
writeStopRecord(run1, 45000);
writeStopRecord(run3, 55000);
writeSuspendedRecord(run5);
Assert.assertEquals(runsToTime(run2, run3, run4, run5, run6), runIdsToTime(store.getRunningInRange(50, 71)));
}
use of io.cdap.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class DefaultArtifactRepository method addSystemArtifacts.
@Override
public void addSystemArtifacts() throws Exception {
// scan the directory for artifact .jar files and config files for those artifacts
Map<Id.Artifact, SystemArtifactInfo> systemArtifacts = new HashMap<>();
for (File systemArtifactDir : systemArtifactDirs) {
for (File jarFile : DirUtils.listFiles(systemArtifactDir, "jar")) {
// parse id from filename
Id.Artifact artifactId;
try {
artifactId = Id.Artifact.parse(Id.Namespace.SYSTEM, jarFile.getName());
} catch (IllegalArgumentException e) {
LOG.warn(String.format("Skipping system artifact '%s' because the name is invalid: ", e.getMessage()));
continue;
}
// check for a corresponding .json config file
String artifactFileName = jarFile.getName();
String configFileName = artifactFileName.substring(0, artifactFileName.length() - ".jar".length()) + ".json";
File configFile = new File(systemArtifactDir, configFileName);
try {
// read and parse the config file if it exists. Otherwise use an empty config with the artifact filename
ArtifactConfig artifactConfig = configFile.isFile() ? configReader.read(artifactId.getNamespace(), configFile) : new ArtifactConfig();
validateParentSet(artifactId, artifactConfig.getParents());
validatePluginSet(artifactConfig.getPlugins());
systemArtifacts.put(artifactId, new SystemArtifactInfo(artifactId, jarFile, artifactConfig));
} catch (InvalidArtifactException e) {
LOG.warn(String.format("Could not add system artifact '%s' because it is invalid.", artifactFileName), e);
}
}
}
// child -> parents
Multimap<Id.Artifact, Id.Artifact> childToParents = HashMultimap.create();
// parent -> children
Multimap<Id.Artifact, Id.Artifact> parentToChildren = HashMultimap.create();
Set<Id.Artifact> remainingArtifacts = new HashSet<>();
// build mapping from child to parents and from parents to children
for (SystemArtifactInfo child : systemArtifacts.values()) {
Id.Artifact childId = child.getArtifactId();
remainingArtifacts.add(childId);
for (SystemArtifactInfo potentialParent : systemArtifacts.values()) {
Id.Artifact potentialParentId = potentialParent.getArtifactId();
// skip if we're looking at ourselves
if (childId.equals(potentialParentId)) {
continue;
}
if (child.getConfig().hasParent(potentialParentId)) {
childToParents.put(childId, potentialParentId);
parentToChildren.put(potentialParentId, childId);
}
}
}
if (!remainingArtifacts.isEmpty()) {
ExecutorService executorService = Executors.newFixedThreadPool(Math.min(maxArtifactLoadParallelism, remainingArtifacts.size()), Threads.createDaemonThreadFactory("system-artifact-loader-%d"));
try {
// loop until there is no change
boolean artifactsAdded = true;
while (!remainingArtifacts.isEmpty() && artifactsAdded) {
artifactsAdded = loadSystemArtifacts(executorService, systemArtifacts, remainingArtifacts, parentToChildren, childToParents);
}
} finally {
executorService.shutdownNow();
}
if (!remainingArtifacts.isEmpty()) {
LOG.warn("Unable to add system artifacts {} due to cyclic dependencies", Joiner.on(",").join(remainingArtifacts));
}
}
}
use of io.cdap.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class ArtifactSelector method select.
@Nullable
@Override
public Map.Entry<ArtifactId, PluginClass> select(SortedMap<ArtifactId, PluginClass> plugins) {
NavigableMap<ArtifactId, PluginClass> pluginMap;
if (plugins instanceof NavigableMap) {
pluginMap = (NavigableMap<ArtifactId, PluginClass>) plugins;
} else {
pluginMap = new TreeMap<>();
pluginMap.putAll(plugins);
}
for (Map.Entry<ArtifactId, PluginClass> entry : pluginMap.descendingMap().entrySet()) {
ArtifactId artifactId = entry.getKey();
if ((scope == null || artifactId.getScope().equals(scope)) && (name == null || artifactId.getName().equals(name)) && (range == null || range.versionIsInRange(artifactId.getVersion()))) {
return entry;
}
}
return null;
}
use of io.cdap.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class BatchPhaseSpecTest method testDescription.
@Test
public void testDescription() throws Exception {
/*
* source1 --|
* |--> sink.connector
* source2 --|
*/
Map<String, String> props = new HashMap<>();
PluginSpec connectorSpec = new PluginSpec(Constants.Connector.PLUGIN_TYPE, "connector", ImmutableMap.<String, String>of(), null);
ArtifactId artifactId = new ArtifactId("art", new ArtifactVersion("1.0.0"), ArtifactScope.USER);
PipelinePhase.Builder builder = PipelinePhase.builder(ImmutableSet.of(BatchSource.PLUGIN_TYPE, Constants.Connector.PLUGIN_TYPE)).addStage(StageSpec.builder("source1", new PluginSpec(BatchSource.PLUGIN_TYPE, "src", props, artifactId)).build()).addStage(StageSpec.builder("source2", new PluginSpec(BatchSource.PLUGIN_TYPE, "src", props, artifactId)).addInputSchema("a", Schema.recordOf("stuff", Schema.Field.of("x", Schema.of(Schema.Type.INT)))).build()).addStage(StageSpec.builder("sink.connector", connectorSpec).build()).addConnection("source1", "sink.connector").addConnection("source2", "sink.connector");
BatchPhaseSpec phaseSpec = new BatchPhaseSpec("phase-1", builder.build(), new Resources(), new Resources(), new Resources(), false, false, Collections.<String, String>emptyMap(), 0, Collections.<String, String>emptyMap(), false, null);
Assert.assertEquals("Sources 'source1', 'source2' to sinks 'sink.connector'.", phaseSpec.getDescription());
}
use of io.cdap.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class PipelineSpecGeneratorTest method testConflictingPipelineProperties.
@Test(expected = IllegalArgumentException.class)
public void testConflictingPipelineProperties() throws ValidationException {
// populate some mock plugins.
MockPluginConfigurer pluginConfigurer = new MockPluginConfigurer();
Set<ArtifactId> artifactIds = ImmutableSet.of(ARTIFACT_ID);
pluginConfigurer.addMockPlugin(Action.PLUGIN_TYPE, "action1", MockPlugin.builder().putPipelineProperty("prop1", "val1").build(), artifactIds);
pluginConfigurer.addMockPlugin(Action.PLUGIN_TYPE, "action2", MockPlugin.builder().putPipelineProperty("prop1", "val2").build(), artifactIds);
Map<String, String> empty = ImmutableMap.of();
ETLBatchConfig config = ETLBatchConfig.builder().setTimeSchedule("* * * * *").addStage(new ETLStage("a1", new ETLPlugin("action1", Action.PLUGIN_TYPE, empty))).addStage(new ETLStage("a2", new ETLPlugin("action2", Action.PLUGIN_TYPE, empty))).addConnection("a1", "a2").setEngine(Engine.MAPREDUCE).build();
new BatchPipelineSpecGenerator(NamespaceId.DEFAULT.getNamespace(), pluginConfigurer, null, ImmutableSet.of(BatchSource.PLUGIN_TYPE), ImmutableSet.of(BatchSink.PLUGIN_TYPE), Engine.MAPREDUCE, MOCK_FEATURE_FLAGS_PROVIDER).generateSpec(config);
}
Aggregations