Search in sources :

Example 11 with Processor

use of org.talend.sdk.component.runtime.output.Processor in project component-runtime by Talend.

the class BeamExecutor method run.

@Override
public void run() {
    try {
        final Map<String, Mapper> mappers = delegate.getLevels().values().stream().flatMap(Collection::stream).filter(Job.Component::isSource).collect(toMap(Job.Component::getId, e -> delegate.getManager().findMapper(e.getNode().getFamily(), e.getNode().getComponent(), e.getNode().getVersion(), e.getNode().getConfiguration()).orElseThrow(() -> new IllegalStateException("No mapper found for: " + e.getNode()))));
        final Map<String, Processor> processors = delegate.getLevels().values().stream().flatMap(Collection::stream).filter(component -> !component.isSource()).collect(toMap(Job.Component::getId, e -> delegate.getManager().findProcessor(e.getNode().getFamily(), e.getNode().getComponent(), e.getNode().getVersion(), e.getNode().getConfiguration()).orElseThrow(() -> new IllegalStateException("No processor found for:" + e.getNode()))));
        final Pipeline pipeline = Pipeline.create(createPipelineOptions());
        final Map<String, PCollection<JsonObject>> pCollections = new HashMap<>();
        delegate.getLevels().values().stream().flatMap(Collection::stream).forEach(component -> {
            if (component.isSource()) {
                final Mapper mapper = mappers.get(component.getId());
                pCollections.put(component.getId(), pipeline.apply(toName("TalendIO", component), TalendIO.read(mapper)).apply(toName("RecordNormalizer", component), RecordNormalizer.of(mapper.plugin())));
            } else {
                final Processor processor = processors.get(component.getId());
                final List<Job.Edge> joins = getEdges(delegate.getEdges(), component, e -> e.getTo().getNode());
                final Map<String, PCollection<KV<String, JsonObject>>> inputs = joins.stream().collect(toMap(e -> e.getTo().getBranch(), e -> {
                    final PCollection<JsonObject> pc = pCollections.get(e.getFrom().getNode().getId());
                    final PCollection<JsonObject> filteredInput = pc.apply(toName("RecordBranchFilter", component, e), RecordBranchFilter.of(processor.plugin(), e.getFrom().getBranch()));
                    final PCollection<JsonObject> mappedInput;
                    if (e.getFrom().getBranch().equals(e.getTo().getBranch())) {
                        mappedInput = filteredInput;
                    } else {
                        mappedInput = filteredInput.apply(toName("RecordBranchMapper", component, e), RecordBranchMapper.of(processor.plugin(), e.getFrom().getBranch(), e.getTo().getBranch()));
                    }
                    return mappedInput.apply(toName("RecordBranchUnwrapper", component, e), RecordBranchUnwrapper.of(processor.plugin(), e.getTo().getBranch())).apply(toName("AutoKVWrapper", component, e), AutoKVWrapper.of(processor.plugin(), delegate.getKeyProvider(component.getId()), component.getId(), e.getFrom().getBranch()));
                }));
                KeyedPCollectionTuple<String> join = null;
                for (final Map.Entry<String, PCollection<KV<String, JsonObject>>> entry : inputs.entrySet()) {
                    final TupleTag<JsonObject> branch = new TupleTag<>(entry.getKey());
                    join = join == null ? KeyedPCollectionTuple.of(branch, entry.getValue()) : join.and(branch, entry.getValue());
                }
                final PCollection<JsonObject> preparedInput = join.apply(toName("CoGroupByKey", component), CoGroupByKey.create()).apply(toName("CoGroupByKeyResultMappingTransform", component), new CoGroupByKeyResultMappingTransform<>(processor.plugin(), true));
                if (getEdges(delegate.getEdges(), component, e -> e.getFrom().getNode()).isEmpty()) {
                    final PTransform<PCollection<JsonObject>, PDone> write = TalendIO.write(processor);
                    preparedInput.apply(toName("Output", component), write);
                } else {
                    final PTransform<PCollection<JsonObject>, PCollection<JsonObject>> process = TalendFn.asFn(processor);
                    pCollections.put(component.getId(), preparedInput.apply(toName("Processor", component), process));
                }
            }
        });
        final PipelineResult result = pipeline.run();
        // the wait until finish don't wait for the job to complete on the direct runner
        result.waitUntilFinish();
        while (PipelineResult.State.RUNNING.equals(result.getState())) {
            try {
                Thread.sleep(100L);
            } catch (final InterruptedException e) {
                throw new IllegalStateException("the job was aborted", e);
            }
        }
    } finally {
        delegate.getLevels().values().stream().flatMap(Collection::stream).map(Job.Component::getId).forEach(JobImpl.LocalSequenceHolder::clean);
    }
}
Also used : TalendIO(org.talend.sdk.component.runtime.beam.TalendIO) KV(org.apache.beam.sdk.values.KV) PipelineResult(org.apache.beam.sdk.PipelineResult) RecordBranchFilter(org.talend.sdk.component.runtime.beam.transform.RecordBranchFilter) HashMap(java.util.HashMap) PipelineOptionsFactory(org.apache.beam.sdk.options.PipelineOptionsFactory) Function(java.util.function.Function) PTransform(org.apache.beam.sdk.transforms.PTransform) RecordBranchMapper(org.talend.sdk.component.runtime.beam.transform.RecordBranchMapper) Collectors.toMap(java.util.stream.Collectors.toMap) TupleTag(org.apache.beam.sdk.values.TupleTag) Map(java.util.Map) RecordNormalizer(org.talend.sdk.component.runtime.beam.transform.RecordNormalizer) Pipeline(org.apache.beam.sdk.Pipeline) KeyedPCollectionTuple(org.apache.beam.sdk.transforms.join.KeyedPCollectionTuple) PipelineOptions(org.apache.beam.sdk.options.PipelineOptions) JsonObject(javax.json.JsonObject) PDone(org.apache.beam.sdk.values.PDone) Collection(java.util.Collection) PCollection(org.apache.beam.sdk.values.PCollection) Processor(org.talend.sdk.component.runtime.output.Processor) RecordBranchUnwrapper(org.talend.sdk.component.runtime.beam.transform.RecordBranchUnwrapper) CoGroupByKey(org.apache.beam.sdk.transforms.join.CoGroupByKey) AutoKVWrapper(org.talend.sdk.component.runtime.beam.transform.AutoKVWrapper) Collectors.toList(java.util.stream.Collectors.toList) List(java.util.List) Mapper(org.talend.sdk.component.runtime.input.Mapper) CoGroupByKeyResultMappingTransform(org.talend.sdk.component.runtime.beam.transform.CoGroupByKeyResultMappingTransform) Job(org.talend.sdk.component.runtime.manager.chain.Job) JobImpl(org.talend.sdk.component.runtime.manager.chain.internal.JobImpl) AllArgsConstructor(lombok.AllArgsConstructor) TalendFn(org.talend.sdk.component.runtime.beam.TalendFn) Processor(org.talend.sdk.component.runtime.output.Processor) HashMap(java.util.HashMap) JsonObject(javax.json.JsonObject) TupleTag(org.apache.beam.sdk.values.TupleTag) RecordBranchMapper(org.talend.sdk.component.runtime.beam.transform.RecordBranchMapper) Mapper(org.talend.sdk.component.runtime.input.Mapper) Job(org.talend.sdk.component.runtime.manager.chain.Job) PipelineResult(org.apache.beam.sdk.PipelineResult) Pipeline(org.apache.beam.sdk.Pipeline) PCollection(org.apache.beam.sdk.values.PCollection) PDone(org.apache.beam.sdk.values.PDone) Collection(java.util.Collection) PCollection(org.apache.beam.sdk.values.PCollection) HashMap(java.util.HashMap) Collectors.toMap(java.util.stream.Collectors.toMap) Map(java.util.Map)

Example 12 with Processor

use of org.talend.sdk.component.runtime.output.Processor in project component-runtime by Talend.

the class BeamIOWrappingTest method processor.

@Test
public void processor() {
    MySink.DATA.clear();
    final Object source = newComponent("beamio_output", ComponentManager.ComponentType.PROCESSOR);
    final Processor processor = new BeamProcessorChainImpl((PTransform<PCollection<?>, ?>) source, null, getPlugin(), "test", "beamio_output");
    processor.start();
    processor.beforeGroup();
    Stream.of("tsrif", "dnoces").forEach(data -> processor.onNext(name -> {
        assertEquals(Branches.DEFAULT_BRANCH, name);
        return data;
    }, null));
    processor.afterGroup(name -> {
        assertEquals(Branches.DEFAULT_BRANCH, name);
        return value -> MySink.DATA.add(value.toString());
    });
    processor.stop();
    assertEquals(asList("setup", "start-bundle", "first", "second", "finish-out", "finish-bundle", "teardown"), MySink.DATA);
    MySink.DATA.clear();
}
Also used : PartitionMapper(org.talend.sdk.component.api.input.PartitionMapper) PBegin(org.apache.beam.sdk.values.PBegin) PipelineResult(org.apache.beam.sdk.PipelineResult) RequiredArgsConstructor(lombok.RequiredArgsConstructor) CoreMatchers.instanceOf(org.hamcrest.CoreMatchers.instanceOf) Assert.assertThat(org.junit.Assert.assertThat) Sample(org.talend.sdk.component.runtime.beam.data.Sample) Create(org.apache.beam.sdk.transforms.Create) Arrays.asList(java.util.Arrays.asList) Map(java.util.Map) GlobalWindow(org.apache.beam.sdk.transforms.windowing.GlobalWindow) Assert.fail(org.junit.Assert.fail) ClassRule(org.junit.ClassRule) Collections.emptyList(java.util.Collections.emptyList) Collection(java.util.Collection) Processor(org.talend.sdk.component.runtime.output.Processor) Collectors.joining(java.util.stream.Collectors.joining) StandardCharsets(java.nio.charset.StandardCharsets) Serializable(java.io.Serializable) List(java.util.List) Branches(org.talend.sdk.component.runtime.output.Branches) Stream(java.util.stream.Stream) ParDo(org.apache.beam.sdk.transforms.ParDo) Optional(java.util.Optional) BeamMapperImpl(org.talend.sdk.component.runtime.beam.impl.BeamMapperImpl) BeamProcessorChainImpl(org.talend.sdk.component.runtime.beam.impl.BeamProcessorChainImpl) Getter(lombok.Getter) CapturingPipeline(org.talend.sdk.component.runtime.beam.impl.CapturingPipeline) Coder(org.apache.beam.sdk.coders.Coder) HashMap(java.util.HashMap) Option(org.talend.sdk.component.api.configuration.Option) AtomicReference(java.util.concurrent.atomic.AtomicReference) ArrayList(java.util.ArrayList) PTransform(org.apache.beam.sdk.transforms.PTransform) FileBasedSink(org.apache.beam.sdk.io.FileBasedSink) TestPipeline(org.apache.beam.sdk.testing.TestPipeline) DelegatingTransform(org.talend.sdk.component.runtime.beam.transform.DelegatingTransform) Input(org.talend.sdk.component.runtime.input.Input) OutputStream(java.io.OutputStream) DoFn(org.apache.beam.sdk.transforms.DoFn) PDone(org.apache.beam.sdk.values.PDone) Files(java.nio.file.Files) PAssert(org.apache.beam.sdk.testing.PAssert) Assert.assertNotNull(org.junit.Assert.assertNotNull) Assert.assertTrue(org.junit.Assert.assertTrue) IOException(java.io.IOException) Test(org.junit.Test) PCollection(org.apache.beam.sdk.values.PCollection) InputStreamReader(java.io.InputStreamReader) File(java.io.File) Collectors.toList(java.util.stream.Collectors.toList) Mapper(org.talend.sdk.component.runtime.input.Mapper) Assert.assertNull(org.junit.Assert.assertNull) Rule(org.junit.Rule) Instant(org.joda.time.Instant) JarLocation.jarLocation(org.apache.ziplock.JarLocation.jarLocation) BufferedReader(java.io.BufferedReader) ComponentManager(org.talend.sdk.component.runtime.manager.ComponentManager) Assert.assertEquals(org.junit.Assert.assertEquals) TextIO(org.apache.beam.sdk.io.TextIO) TemporaryFolder(org.junit.rules.TemporaryFolder) InputStream(java.io.InputStream) PCollection(org.apache.beam.sdk.values.PCollection) Processor(org.talend.sdk.component.runtime.output.Processor) BeamProcessorChainImpl(org.talend.sdk.component.runtime.beam.impl.BeamProcessorChainImpl) Test(org.junit.Test)

Example 13 with Processor

use of org.talend.sdk.component.runtime.output.Processor in project component-runtime by Talend.

the class BeamIOWrappingTest method outputChain.

@Test
public void outputChain() {
    MySink.DATA.clear();
    final Object source = newComponent("beamio_output_chain", ComponentManager.ComponentType.PROCESSOR);
    final Processor processor = new BeamProcessorChainImpl((PTransform<PCollection<?>, PDone>) source, null, getPlugin(), "test", "beamio_output");
    processor.start();
    processor.beforeGroup();
    Stream.of("tsrif", "dnoces").forEach(data -> processor.onNext(name -> {
        assertEquals(Branches.DEFAULT_BRANCH, name);
        return new Sample(data);
    }, name -> value -> MySink.DATA.add(value.toString())));
    processor.afterGroup(name -> {
        assertEquals(Branches.DEFAULT_BRANCH, name);
        return value -> MySink.DATA.add(value.toString());
    });
    processor.stop();
    assertEquals(asList("setup", "start-bundle", "first", "second", "finish-out", "finish-bundle", "teardown"), MySink.DATA);
    MySink.DATA.clear();
}
Also used : PartitionMapper(org.talend.sdk.component.api.input.PartitionMapper) PBegin(org.apache.beam.sdk.values.PBegin) PipelineResult(org.apache.beam.sdk.PipelineResult) RequiredArgsConstructor(lombok.RequiredArgsConstructor) CoreMatchers.instanceOf(org.hamcrest.CoreMatchers.instanceOf) Assert.assertThat(org.junit.Assert.assertThat) Sample(org.talend.sdk.component.runtime.beam.data.Sample) Create(org.apache.beam.sdk.transforms.Create) Arrays.asList(java.util.Arrays.asList) Map(java.util.Map) GlobalWindow(org.apache.beam.sdk.transforms.windowing.GlobalWindow) Assert.fail(org.junit.Assert.fail) ClassRule(org.junit.ClassRule) Collections.emptyList(java.util.Collections.emptyList) Collection(java.util.Collection) Processor(org.talend.sdk.component.runtime.output.Processor) Collectors.joining(java.util.stream.Collectors.joining) StandardCharsets(java.nio.charset.StandardCharsets) Serializable(java.io.Serializable) List(java.util.List) Branches(org.talend.sdk.component.runtime.output.Branches) Stream(java.util.stream.Stream) ParDo(org.apache.beam.sdk.transforms.ParDo) Optional(java.util.Optional) BeamMapperImpl(org.talend.sdk.component.runtime.beam.impl.BeamMapperImpl) BeamProcessorChainImpl(org.talend.sdk.component.runtime.beam.impl.BeamProcessorChainImpl) Getter(lombok.Getter) CapturingPipeline(org.talend.sdk.component.runtime.beam.impl.CapturingPipeline) Coder(org.apache.beam.sdk.coders.Coder) HashMap(java.util.HashMap) Option(org.talend.sdk.component.api.configuration.Option) AtomicReference(java.util.concurrent.atomic.AtomicReference) ArrayList(java.util.ArrayList) PTransform(org.apache.beam.sdk.transforms.PTransform) FileBasedSink(org.apache.beam.sdk.io.FileBasedSink) TestPipeline(org.apache.beam.sdk.testing.TestPipeline) DelegatingTransform(org.talend.sdk.component.runtime.beam.transform.DelegatingTransform) Input(org.talend.sdk.component.runtime.input.Input) OutputStream(java.io.OutputStream) DoFn(org.apache.beam.sdk.transforms.DoFn) PDone(org.apache.beam.sdk.values.PDone) Files(java.nio.file.Files) PAssert(org.apache.beam.sdk.testing.PAssert) Assert.assertNotNull(org.junit.Assert.assertNotNull) Assert.assertTrue(org.junit.Assert.assertTrue) IOException(java.io.IOException) Test(org.junit.Test) PCollection(org.apache.beam.sdk.values.PCollection) InputStreamReader(java.io.InputStreamReader) File(java.io.File) Collectors.toList(java.util.stream.Collectors.toList) Mapper(org.talend.sdk.component.runtime.input.Mapper) Assert.assertNull(org.junit.Assert.assertNull) Rule(org.junit.Rule) Instant(org.joda.time.Instant) JarLocation.jarLocation(org.apache.ziplock.JarLocation.jarLocation) BufferedReader(java.io.BufferedReader) ComponentManager(org.talend.sdk.component.runtime.manager.ComponentManager) Assert.assertEquals(org.junit.Assert.assertEquals) TextIO(org.apache.beam.sdk.io.TextIO) TemporaryFolder(org.junit.rules.TemporaryFolder) InputStream(java.io.InputStream) PCollection(org.apache.beam.sdk.values.PCollection) Processor(org.talend.sdk.component.runtime.output.Processor) BeamProcessorChainImpl(org.talend.sdk.component.runtime.beam.impl.BeamProcessorChainImpl) PDone(org.apache.beam.sdk.values.PDone) Sample(org.talend.sdk.component.runtime.beam.data.Sample) Test(org.junit.Test)

Example 14 with Processor

use of org.talend.sdk.component.runtime.output.Processor in project component-runtime by Talend.

the class BeamIOWrappingTest method fileOutput.

@Test
public void fileOutput() throws IOException {
    final Object source = newComponent("beamio_text", ComponentManager.ComponentType.PROCESSOR);
    final Processor processor = new BeamProcessorChainImpl((PTransform<PCollection<?>, PDone>) source, null, getPlugin(), "test", "beamio_text");
    processor.start();
    processor.beforeGroup();
    Stream.of("first", "second").forEach(data -> processor.onNext(name -> {
        assertEquals(Branches.DEFAULT_BRANCH, name);
        return data;
    }, name -> value -> fail(name + " >> " + value)));
    final AtomicReference<Object> output = new AtomicReference<>();
    processor.afterGroup(name -> {
        assertEquals(Branches.DEFAULT_BRANCH, name);
        return output::set;
    });
    processor.stop();
    final FileBasedSink.FileResult result = FileBasedSink.FileResult.class.cast(output.get());
    assertNotNull(result);
    final File file = new File(result.getTempFilename().toString());
    assertTrue(file.exists());
    assertEquals(file.getParentFile().getParentFile(), TEMPORARY_FOLDER.getRoot());
    assertEquals("first\nsecond", Files.lines(file.toPath()).collect(joining("\n")));
}
Also used : PartitionMapper(org.talend.sdk.component.api.input.PartitionMapper) PBegin(org.apache.beam.sdk.values.PBegin) PipelineResult(org.apache.beam.sdk.PipelineResult) RequiredArgsConstructor(lombok.RequiredArgsConstructor) CoreMatchers.instanceOf(org.hamcrest.CoreMatchers.instanceOf) Assert.assertThat(org.junit.Assert.assertThat) Sample(org.talend.sdk.component.runtime.beam.data.Sample) Create(org.apache.beam.sdk.transforms.Create) Arrays.asList(java.util.Arrays.asList) Map(java.util.Map) GlobalWindow(org.apache.beam.sdk.transforms.windowing.GlobalWindow) Assert.fail(org.junit.Assert.fail) ClassRule(org.junit.ClassRule) Collections.emptyList(java.util.Collections.emptyList) Collection(java.util.Collection) Processor(org.talend.sdk.component.runtime.output.Processor) Collectors.joining(java.util.stream.Collectors.joining) StandardCharsets(java.nio.charset.StandardCharsets) Serializable(java.io.Serializable) List(java.util.List) Branches(org.talend.sdk.component.runtime.output.Branches) Stream(java.util.stream.Stream) ParDo(org.apache.beam.sdk.transforms.ParDo) Optional(java.util.Optional) BeamMapperImpl(org.talend.sdk.component.runtime.beam.impl.BeamMapperImpl) BeamProcessorChainImpl(org.talend.sdk.component.runtime.beam.impl.BeamProcessorChainImpl) Getter(lombok.Getter) CapturingPipeline(org.talend.sdk.component.runtime.beam.impl.CapturingPipeline) Coder(org.apache.beam.sdk.coders.Coder) HashMap(java.util.HashMap) Option(org.talend.sdk.component.api.configuration.Option) AtomicReference(java.util.concurrent.atomic.AtomicReference) ArrayList(java.util.ArrayList) PTransform(org.apache.beam.sdk.transforms.PTransform) FileBasedSink(org.apache.beam.sdk.io.FileBasedSink) TestPipeline(org.apache.beam.sdk.testing.TestPipeline) DelegatingTransform(org.talend.sdk.component.runtime.beam.transform.DelegatingTransform) Input(org.talend.sdk.component.runtime.input.Input) OutputStream(java.io.OutputStream) DoFn(org.apache.beam.sdk.transforms.DoFn) PDone(org.apache.beam.sdk.values.PDone) Files(java.nio.file.Files) PAssert(org.apache.beam.sdk.testing.PAssert) Assert.assertNotNull(org.junit.Assert.assertNotNull) Assert.assertTrue(org.junit.Assert.assertTrue) IOException(java.io.IOException) Test(org.junit.Test) PCollection(org.apache.beam.sdk.values.PCollection) InputStreamReader(java.io.InputStreamReader) File(java.io.File) Collectors.toList(java.util.stream.Collectors.toList) Mapper(org.talend.sdk.component.runtime.input.Mapper) Assert.assertNull(org.junit.Assert.assertNull) Rule(org.junit.Rule) Instant(org.joda.time.Instant) JarLocation.jarLocation(org.apache.ziplock.JarLocation.jarLocation) BufferedReader(java.io.BufferedReader) ComponentManager(org.talend.sdk.component.runtime.manager.ComponentManager) Assert.assertEquals(org.junit.Assert.assertEquals) TextIO(org.apache.beam.sdk.io.TextIO) TemporaryFolder(org.junit.rules.TemporaryFolder) InputStream(java.io.InputStream) FileBasedSink(org.apache.beam.sdk.io.FileBasedSink) PCollection(org.apache.beam.sdk.values.PCollection) Processor(org.talend.sdk.component.runtime.output.Processor) BeamProcessorChainImpl(org.talend.sdk.component.runtime.beam.impl.BeamProcessorChainImpl) PDone(org.apache.beam.sdk.values.PDone) AtomicReference(java.util.concurrent.atomic.AtomicReference) File(java.io.File) Test(org.junit.Test)

Example 15 with Processor

use of org.talend.sdk.component.runtime.output.Processor in project component-runtime by Talend.

the class DIBatchSimulationTest method doDi.

private void doDi(final ComponentManager manager, final Collection<Object> sourceData, final Collection<Object> processorData, final Optional<Processor> proc, final Optional<Mapper> mapper) {
    final Map<String, Object> globalMap = new HashMap<>();
    try {
        final Processor processor = proc.orElseThrow(() -> new IllegalStateException("scanning failed"));
        JobStateAware.init(processor, globalMap);
        final Jsonb jsonbProcessor = Jsonb.class.cast(manager.findPlugin(processor.plugin()).get().get(ComponentManager.AllServices.class).getServices().get(Jsonb.class));
        final AutoChunkProcessor processorProcessor = new AutoChunkProcessor(100, processor);
        processorProcessor.start();
        globalMap.put("processorProcessor", processorProcessor);
        final InputsHandler inputsHandlerProcessor = new InputsHandler(jsonbProcessor);
        inputsHandlerProcessor.addConnection("FLOW", row1Struct.class);
        final OutputsHandler outputHandlerProcessor = new OutputsHandler(jsonbProcessor);
        final InputFactory inputsProcessor = inputsHandlerProcessor.asInputFactory();
        final OutputFactory outputsProcessor = outputHandlerProcessor.asOutputFactory();
        final Mapper tempMapperMapper = mapper.orElseThrow(() -> new IllegalStateException("scanning failed"));
        JobStateAware.init(tempMapperMapper, globalMap);
        doRun(manager, sourceData, processorData, globalMap, processorProcessor, inputsHandlerProcessor, outputHandlerProcessor, inputsProcessor, outputsProcessor, tempMapperMapper);
    } finally {
        doClose(globalMap);
    }
}
Also used : InputFactory(org.talend.sdk.component.runtime.output.InputFactory) AutoChunkProcessor(org.talend.sdk.component.runtime.di.AutoChunkProcessor) Processor(org.talend.sdk.component.runtime.output.Processor) HashMap(java.util.HashMap) OutputsHandler(org.talend.sdk.component.runtime.di.OutputsHandler) ToString(lombok.ToString) PartitionMapper(org.talend.sdk.component.api.input.PartitionMapper) ChainedMapper(org.talend.sdk.component.runtime.manager.chain.ChainedMapper) Mapper(org.talend.sdk.component.runtime.input.Mapper) Jsonb(javax.json.bind.Jsonb) AutoChunkProcessor(org.talend.sdk.component.runtime.di.AutoChunkProcessor) JsonObject(javax.json.JsonObject) OutputFactory(org.talend.sdk.component.runtime.output.OutputFactory) InputsHandler(org.talend.sdk.component.runtime.di.InputsHandler)

Aggregations

Processor (org.talend.sdk.component.runtime.output.Processor)20 List (java.util.List)8 Map (java.util.Map)8 Collectors.toList (java.util.stream.Collectors.toList)8 JsonObject (javax.json.JsonObject)8 Collection (java.util.Collection)7 Test (org.junit.Test)7 HashMap (java.util.HashMap)6 Stream (java.util.stream.Stream)6 PipelineResult (org.apache.beam.sdk.PipelineResult)6 PCollection (org.apache.beam.sdk.values.PCollection)6 IOException (java.io.IOException)5 Arrays.asList (java.util.Arrays.asList)5 AtomicReference (java.util.concurrent.atomic.AtomicReference)5 PAssert (org.apache.beam.sdk.testing.PAssert)5 TestPipeline (org.apache.beam.sdk.testing.TestPipeline)5 ClassRule (org.junit.ClassRule)5 Rule (org.junit.Rule)5 Mapper (org.talend.sdk.component.runtime.input.Mapper)5 BufferedReader (java.io.BufferedReader)4