use of co.cask.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class PipelineSpecGeneratorTest method testPipelineProperties.
@Test
public void testPipelineProperties() {
// populate some mock plugins.
MockPluginConfigurer pluginConfigurer = new MockPluginConfigurer();
Set<ArtifactId> artifactIds = ImmutableSet.of(ARTIFACT_ID);
pluginConfigurer.addMockPlugin(Action.PLUGIN_TYPE, "action1", MockPlugin.builder().putPipelineProperty("prop1", "val1").putPipelineProperty("prop2", "val2").build(), artifactIds);
pluginConfigurer.addMockPlugin(Action.PLUGIN_TYPE, "action2", MockPlugin.builder().putPipelineProperty("prop2", "val2").build(), artifactIds);
PipelineSpecGenerator specGenerator = new BatchPipelineSpecGenerator(pluginConfigurer, ImmutableSet.of(BatchSource.PLUGIN_TYPE), ImmutableSet.of(BatchSink.PLUGIN_TYPE), FileSet.class, DatasetProperties.EMPTY, Engine.MAPREDUCE);
Map<String, String> empty = ImmutableMap.of();
ETLConfig config = ETLBatchConfig.builder("* * * * *").setProperties(ImmutableMap.of("system.spark.spark.test", "abc", "system.mapreduce.prop3", "val3")).addStage(new ETLStage("a1", new ETLPlugin("action1", Action.PLUGIN_TYPE, empty))).addStage(new ETLStage("a2", new ETLPlugin("action2", Action.PLUGIN_TYPE, empty))).addConnection("a1", "a2").setEngine(Engine.MAPREDUCE).build();
PipelineSpec actual = specGenerator.generateSpec(config);
PipelineSpec expected = BatchPipelineSpec.builder().addConnection("a1", "a2").setProperties(ImmutableMap.of("prop1", "val1", "prop2", "val2", "prop3", "val3")).addStage(StageSpec.builder("a1", new PluginSpec(Action.PLUGIN_TYPE, "action1", empty, ARTIFACT_ID)).addOutputs("a2").build()).addStage(StageSpec.builder("a2", new PluginSpec(Action.PLUGIN_TYPE, "action2", empty, ARTIFACT_ID)).addInputs("a1").build()).setResources(new Resources(1024)).setDriverResources(new Resources(1024)).setClientResources(new Resources(1024)).build();
Assert.assertEquals(expected, actual);
}
use of co.cask.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class ArtifactStore method addArtifactsToList.
private void addArtifactsToList(List<ArtifactDetail> artifactDetails, Row row, int limit, @Nullable ArtifactRange range) throws IOException {
ArtifactKey artifactKey = ArtifactKey.parse(row.getRow());
for (Map.Entry<byte[], byte[]> columnVal : row.getColumns().entrySet()) {
if (limit != Integer.MAX_VALUE && artifactDetails.size() == limit) {
break;
}
String version = Bytes.toString(columnVal.getKey());
if (range != null && !range.versionIsInRange(new ArtifactVersion(version))) {
continue;
}
ArtifactData data = GSON.fromJson(Bytes.toString(columnVal.getValue()), ArtifactData.class);
Id.Artifact artifactId = new NamespaceId(artifactKey.namespace).artifact(artifactKey.name, version).toId();
artifactDetails.add(new ArtifactDetail(new ArtifactDescriptor(artifactId.toArtifactId(), Locations.getLocationFromAbsolutePath(locationFactory, data.getLocationPath())), data.meta));
}
}
use of co.cask.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class ArtifactStore method addAndSortArtifacts.
private void addAndSortArtifacts(List<ArtifactDetail> artifacts, Row row, int limit, final ArtifactSortOrder order, @Nullable ArtifactRange range) {
ArtifactKey artifactKey = ArtifactKey.parse(row.getRow());
PriorityQueue<ArtifactDetail> queue = getPriorityQueue(limit, order);
for (Map.Entry<byte[], byte[]> columnEntry : row.getColumns().entrySet()) {
String version = Bytes.toString(columnEntry.getKey());
if (range != null && !range.versionIsInRange(new ArtifactVersion(version))) {
continue;
}
ArtifactData data = GSON.fromJson(Bytes.toString(columnEntry.getValue()), ArtifactData.class);
ArtifactId artifactId = new ArtifactId(artifactKey.name, new ArtifactVersion(version), artifactKey.namespace.equals(NamespaceId.SYSTEM.getNamespace()) ? ArtifactScope.SYSTEM : ArtifactScope.USER);
queue.add(new ArtifactDetail(new ArtifactDescriptor(artifactId, Locations.getLocationFromAbsolutePath(locationFactory, data.getLocationPath())), data.meta));
if (limit != Integer.MAX_VALUE && queue.size() > limit) {
queue.poll();
}
}
while (!queue.isEmpty()) {
artifacts.add(queue.poll());
}
Collections.reverse(artifacts.subList(0, artifacts.size()));
}
use of co.cask.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class ArtifactRepository method getArtifactsInfo.
/**
* return list of {@link ArtifactInfo} in the namespace
* @param namespace
* @return list of {@link ArtifactInfo}
* @throws Exception
*/
public List<ArtifactInfo> getArtifactsInfo(NamespaceId namespace) throws Exception {
final List<ArtifactDetail> artifactDetails = artifactStore.getArtifacts(namespace);
List<ArtifactInfo> artifactInfoList = Lists.transform(artifactDetails, new Function<ArtifactDetail, ArtifactInfo>() {
@Nullable
@Override
public ArtifactInfo apply(@Nullable ArtifactDetail input) {
// transform artifactDetail to artifactInfo
ArtifactId artifactId = input.getDescriptor().getArtifactId();
return new ArtifactInfo(artifactId.getName(), artifactId.getVersion().getVersion(), artifactId.getScope(), input.getMeta().getClasses(), input.getMeta().getProperties(), input.getMeta().getUsableBy());
}
});
// todo - CDAP-11560 should filter in artifact store
return Collections.unmodifiableList(filterAuthorizedArtifactInfos(artifactInfoList, namespace));
}
use of co.cask.cdap.api.artifact.ArtifactId in project cdap by caskdata.
the class PipelineSpecGeneratorTest method testConflictingPipelineProperties.
@Test(expected = IllegalArgumentException.class)
public void testConflictingPipelineProperties() {
// populate some mock plugins.
MockPluginConfigurer pluginConfigurer = new MockPluginConfigurer();
Set<ArtifactId> artifactIds = ImmutableSet.of(ARTIFACT_ID);
pluginConfigurer.addMockPlugin(Action.PLUGIN_TYPE, "action1", MockPlugin.builder().putPipelineProperty("prop1", "val1").build(), artifactIds);
pluginConfigurer.addMockPlugin(Action.PLUGIN_TYPE, "action2", MockPlugin.builder().putPipelineProperty("prop1", "val2").build(), artifactIds);
PipelineSpecGenerator specGenerator = new BatchPipelineSpecGenerator(pluginConfigurer, ImmutableSet.of(BatchSource.PLUGIN_TYPE), ImmutableSet.of(BatchSink.PLUGIN_TYPE), FileSet.class, DatasetProperties.EMPTY, Engine.MAPREDUCE);
Map<String, String> empty = ImmutableMap.of();
ETLConfig config = ETLBatchConfig.builder("* * * * *").addStage(new ETLStage("a1", new ETLPlugin("action1", Action.PLUGIN_TYPE, empty))).addStage(new ETLStage("a2", new ETLPlugin("action2", Action.PLUGIN_TYPE, empty))).addConnection("a1", "a2").setEngine(Engine.MAPREDUCE).build();
specGenerator.generateSpec(config);
}
Aggregations