Search in sources :

Example 11 with ArtifactId

use of co.cask.cdap.api.artifact.ArtifactId in project cdap by caskdata.

the class PipelineSpecGeneratorTest method testPipelineProperties.

@Test
public void testPipelineProperties() {
    // populate some mock plugins.
    MockPluginConfigurer pluginConfigurer = new MockPluginConfigurer();
    Set<ArtifactId> artifactIds = ImmutableSet.of(ARTIFACT_ID);
    pluginConfigurer.addMockPlugin(Action.PLUGIN_TYPE, "action1", MockPlugin.builder().putPipelineProperty("prop1", "val1").putPipelineProperty("prop2", "val2").build(), artifactIds);
    pluginConfigurer.addMockPlugin(Action.PLUGIN_TYPE, "action2", MockPlugin.builder().putPipelineProperty("prop2", "val2").build(), artifactIds);
    PipelineSpecGenerator specGenerator = new BatchPipelineSpecGenerator(pluginConfigurer, ImmutableSet.of(BatchSource.PLUGIN_TYPE), ImmutableSet.of(BatchSink.PLUGIN_TYPE), FileSet.class, DatasetProperties.EMPTY, Engine.MAPREDUCE);
    Map<String, String> empty = ImmutableMap.of();
    ETLConfig config = ETLBatchConfig.builder("* * * * *").setProperties(ImmutableMap.of("system.spark.spark.test", "abc", "system.mapreduce.prop3", "val3")).addStage(new ETLStage("a1", new ETLPlugin("action1", Action.PLUGIN_TYPE, empty))).addStage(new ETLStage("a2", new ETLPlugin("action2", Action.PLUGIN_TYPE, empty))).addConnection("a1", "a2").setEngine(Engine.MAPREDUCE).build();
    PipelineSpec actual = specGenerator.generateSpec(config);
    PipelineSpec expected = BatchPipelineSpec.builder().addConnection("a1", "a2").setProperties(ImmutableMap.of("prop1", "val1", "prop2", "val2", "prop3", "val3")).addStage(StageSpec.builder("a1", new PluginSpec(Action.PLUGIN_TYPE, "action1", empty, ARTIFACT_ID)).addOutputs("a2").build()).addStage(StageSpec.builder("a2", new PluginSpec(Action.PLUGIN_TYPE, "action2", empty, ARTIFACT_ID)).addInputs("a1").build()).setResources(new Resources(1024)).setDriverResources(new Resources(1024)).setClientResources(new Resources(1024)).build();
    Assert.assertEquals(expected, actual);
}
Also used : ArtifactId(co.cask.cdap.api.artifact.ArtifactId) BatchPipelineSpecGenerator(co.cask.cdap.etl.batch.BatchPipelineSpecGenerator) ETLPlugin(co.cask.cdap.etl.proto.v2.ETLPlugin) MockPluginConfigurer(co.cask.cdap.etl.common.MockPluginConfigurer) ETLConfig(co.cask.cdap.etl.proto.v2.ETLConfig) ETLStage(co.cask.cdap.etl.proto.v2.ETLStage) BatchPipelineSpec(co.cask.cdap.etl.batch.BatchPipelineSpec) BatchPipelineSpecGenerator(co.cask.cdap.etl.batch.BatchPipelineSpecGenerator) Resources(co.cask.cdap.api.Resources) Test(org.junit.Test)

Example 12 with ArtifactId

use of co.cask.cdap.api.artifact.ArtifactId in project cdap by caskdata.

the class ArtifactStore method addArtifactsToList.

private void addArtifactsToList(List<ArtifactDetail> artifactDetails, Row row, int limit, @Nullable ArtifactRange range) throws IOException {
    ArtifactKey artifactKey = ArtifactKey.parse(row.getRow());
    for (Map.Entry<byte[], byte[]> columnVal : row.getColumns().entrySet()) {
        if (limit != Integer.MAX_VALUE && artifactDetails.size() == limit) {
            break;
        }
        String version = Bytes.toString(columnVal.getKey());
        if (range != null && !range.versionIsInRange(new ArtifactVersion(version))) {
            continue;
        }
        ArtifactData data = GSON.fromJson(Bytes.toString(columnVal.getValue()), ArtifactData.class);
        Id.Artifact artifactId = new NamespaceId(artifactKey.namespace).artifact(artifactKey.name, version).toId();
        artifactDetails.add(new ArtifactDetail(new ArtifactDescriptor(artifactId.toArtifactId(), Locations.getLocationFromAbsolutePath(locationFactory, data.getLocationPath())), data.meta));
    }
}
Also used : ArtifactVersion(co.cask.cdap.api.artifact.ArtifactVersion) NamespaceId(co.cask.cdap.proto.id.NamespaceId) ArtifactId(co.cask.cdap.api.artifact.ArtifactId) Id(co.cask.cdap.proto.Id) NamespaceId(co.cask.cdap.proto.id.NamespaceId) DatasetId(co.cask.cdap.proto.id.DatasetId) Map(java.util.Map) SortedMap(java.util.SortedMap) TreeMap(java.util.TreeMap)

Example 13 with ArtifactId

use of co.cask.cdap.api.artifact.ArtifactId in project cdap by caskdata.

the class ArtifactStore method addAndSortArtifacts.

private void addAndSortArtifacts(List<ArtifactDetail> artifacts, Row row, int limit, final ArtifactSortOrder order, @Nullable ArtifactRange range) {
    ArtifactKey artifactKey = ArtifactKey.parse(row.getRow());
    PriorityQueue<ArtifactDetail> queue = getPriorityQueue(limit, order);
    for (Map.Entry<byte[], byte[]> columnEntry : row.getColumns().entrySet()) {
        String version = Bytes.toString(columnEntry.getKey());
        if (range != null && !range.versionIsInRange(new ArtifactVersion(version))) {
            continue;
        }
        ArtifactData data = GSON.fromJson(Bytes.toString(columnEntry.getValue()), ArtifactData.class);
        ArtifactId artifactId = new ArtifactId(artifactKey.name, new ArtifactVersion(version), artifactKey.namespace.equals(NamespaceId.SYSTEM.getNamespace()) ? ArtifactScope.SYSTEM : ArtifactScope.USER);
        queue.add(new ArtifactDetail(new ArtifactDescriptor(artifactId, Locations.getLocationFromAbsolutePath(locationFactory, data.getLocationPath())), data.meta));
        if (limit != Integer.MAX_VALUE && queue.size() > limit) {
            queue.poll();
        }
    }
    while (!queue.isEmpty()) {
        artifacts.add(queue.poll());
    }
    Collections.reverse(artifacts.subList(0, artifacts.size()));
}
Also used : ArtifactVersion(co.cask.cdap.api.artifact.ArtifactVersion) ArtifactId(co.cask.cdap.api.artifact.ArtifactId) Map(java.util.Map) SortedMap(java.util.SortedMap) TreeMap(java.util.TreeMap)

Example 14 with ArtifactId

use of co.cask.cdap.api.artifact.ArtifactId in project cdap by caskdata.

the class ArtifactRepository method getArtifactsInfo.

/**
   * return list of {@link ArtifactInfo} in the namespace
   * @param namespace
   * @return list of {@link ArtifactInfo}
   * @throws Exception
   */
public List<ArtifactInfo> getArtifactsInfo(NamespaceId namespace) throws Exception {
    final List<ArtifactDetail> artifactDetails = artifactStore.getArtifacts(namespace);
    List<ArtifactInfo> artifactInfoList = Lists.transform(artifactDetails, new Function<ArtifactDetail, ArtifactInfo>() {

        @Nullable
        @Override
        public ArtifactInfo apply(@Nullable ArtifactDetail input) {
            // transform artifactDetail to artifactInfo
            ArtifactId artifactId = input.getDescriptor().getArtifactId();
            return new ArtifactInfo(artifactId.getName(), artifactId.getVersion().getVersion(), artifactId.getScope(), input.getMeta().getClasses(), input.getMeta().getProperties(), input.getMeta().getUsableBy());
        }
    });
    // todo - CDAP-11560 should filter in artifact store
    return Collections.unmodifiableList(filterAuthorizedArtifactInfos(artifactInfoList, namespace));
}
Also used : ArtifactInfo(co.cask.cdap.api.artifact.ArtifactInfo) ArtifactId(co.cask.cdap.api.artifact.ArtifactId) Nullable(javax.annotation.Nullable)

Example 15 with ArtifactId

use of co.cask.cdap.api.artifact.ArtifactId in project cdap by caskdata.

the class PipelineSpecGeneratorTest method testConflictingPipelineProperties.

@Test(expected = IllegalArgumentException.class)
public void testConflictingPipelineProperties() {
    // populate some mock plugins.
    MockPluginConfigurer pluginConfigurer = new MockPluginConfigurer();
    Set<ArtifactId> artifactIds = ImmutableSet.of(ARTIFACT_ID);
    pluginConfigurer.addMockPlugin(Action.PLUGIN_TYPE, "action1", MockPlugin.builder().putPipelineProperty("prop1", "val1").build(), artifactIds);
    pluginConfigurer.addMockPlugin(Action.PLUGIN_TYPE, "action2", MockPlugin.builder().putPipelineProperty("prop1", "val2").build(), artifactIds);
    PipelineSpecGenerator specGenerator = new BatchPipelineSpecGenerator(pluginConfigurer, ImmutableSet.of(BatchSource.PLUGIN_TYPE), ImmutableSet.of(BatchSink.PLUGIN_TYPE), FileSet.class, DatasetProperties.EMPTY, Engine.MAPREDUCE);
    Map<String, String> empty = ImmutableMap.of();
    ETLConfig config = ETLBatchConfig.builder("* * * * *").addStage(new ETLStage("a1", new ETLPlugin("action1", Action.PLUGIN_TYPE, empty))).addStage(new ETLStage("a2", new ETLPlugin("action2", Action.PLUGIN_TYPE, empty))).addConnection("a1", "a2").setEngine(Engine.MAPREDUCE).build();
    specGenerator.generateSpec(config);
}
Also used : ArtifactId(co.cask.cdap.api.artifact.ArtifactId) BatchPipelineSpecGenerator(co.cask.cdap.etl.batch.BatchPipelineSpecGenerator) ETLStage(co.cask.cdap.etl.proto.v2.ETLStage) ETLPlugin(co.cask.cdap.etl.proto.v2.ETLPlugin) BatchPipelineSpecGenerator(co.cask.cdap.etl.batch.BatchPipelineSpecGenerator) MockPluginConfigurer(co.cask.cdap.etl.common.MockPluginConfigurer) ETLConfig(co.cask.cdap.etl.proto.v2.ETLConfig) Test(org.junit.Test)

Aggregations

ArtifactId (co.cask.cdap.api.artifact.ArtifactId)25 ArtifactVersion (co.cask.cdap.api.artifact.ArtifactVersion)13 Test (org.junit.Test)10 NamespaceId (co.cask.cdap.proto.id.NamespaceId)7 Id (co.cask.cdap.proto.Id)6 AppDeploymentInfo (co.cask.cdap.internal.app.deploy.pipeline.AppDeploymentInfo)5 File (java.io.File)5 PluginClass (co.cask.cdap.api.plugin.PluginClass)4 ArtifactDescriptor (co.cask.cdap.internal.app.runtime.artifact.ArtifactDescriptor)4 Map (java.util.Map)4 SortedMap (java.util.SortedMap)4 Location (org.apache.twill.filesystem.Location)4 ApplicationSpecification (co.cask.cdap.api.app.ApplicationSpecification)3 BatchPipelineSpecGenerator (co.cask.cdap.etl.batch.BatchPipelineSpecGenerator)3 MockPluginConfigurer (co.cask.cdap.etl.common.MockPluginConfigurer)3 EntityId (co.cask.cdap.proto.id.EntityId)3 Principal (co.cask.cdap.proto.security.Principal)3 TreeMap (java.util.TreeMap)3 ConfigTestApp (co.cask.cdap.ConfigTestApp)2 Predicate (co.cask.cdap.api.Predicate)2