use of org.apache.flink.runtime.jobgraph.InputFormatVertex in project flink by apache.
the class JobGraphGenerator method createDataSourceVertex.
private InputFormatVertex createDataSourceVertex(SourcePlanNode node) throws CompilerException {
final InputFormatVertex vertex = new InputFormatVertex(node.getNodeName());
final TaskConfig config = new TaskConfig(vertex.getConfiguration());
vertex.setResources(node.getMinResources(), node.getPreferredResources());
vertex.setInvokableClass(DataSourceTask.class);
vertex.setFormatDescription(getDescriptionForUserCode(node.getProgramOperator().getUserCodeWrapper()));
// set user code
config.setStubWrapper(node.getProgramOperator().getUserCodeWrapper());
config.setStubParameters(node.getProgramOperator().getParameters());
config.setOutputSerializer(node.getSerializer());
return vertex;
}
use of org.apache.flink.runtime.jobgraph.InputFormatVertex in project flink by apache.
the class StreamingJobGraphGenerator method createJobVertex.
private StreamConfig createJobVertex(Integer streamNodeId, Map<Integer, byte[]> hashes, List<Map<Integer, byte[]>> legacyHashes) {
JobVertex jobVertex;
StreamNode streamNode = streamGraph.getStreamNode(streamNodeId);
byte[] hash = hashes.get(streamNodeId);
if (hash == null) {
throw new IllegalStateException("Cannot find node hash. " + "Did you generate them before calling this method?");
}
JobVertexID jobVertexId = new JobVertexID(hash);
List<JobVertexID> legacyJobVertexIds = new ArrayList<>(legacyHashes.size());
for (Map<Integer, byte[]> legacyHash : legacyHashes) {
hash = legacyHash.get(streamNodeId);
if (null != hash) {
legacyJobVertexIds.add(new JobVertexID(hash));
}
}
if (streamNode.getInputFormat() != null) {
jobVertex = new InputFormatVertex(chainedNames.get(streamNodeId), jobVertexId, legacyJobVertexIds);
TaskConfig taskConfig = new TaskConfig(jobVertex.getConfiguration());
taskConfig.setStubWrapper(new UserCodeObjectWrapper<Object>(streamNode.getInputFormat()));
} else {
jobVertex = new JobVertex(chainedNames.get(streamNodeId), jobVertexId, legacyJobVertexIds);
}
jobVertex.setResources(chainedMinResources.get(streamNodeId), chainedPreferredResources.get(streamNodeId));
jobVertex.setInvokableClass(streamNode.getJobVertexClass());
int parallelism = streamNode.getParallelism();
if (parallelism == ExecutionConfig.PARALLELISM_DEFAULT) {
parallelism = defaultParallelism;
}
jobVertex.setParallelism(parallelism);
jobVertex.setMaxParallelism(streamNode.getMaxParallelism());
if (LOG.isDebugEnabled()) {
LOG.debug("Parallelism set: {} for {}", parallelism, streamNodeId);
}
jobVertices.put(streamNodeId, jobVertex);
builtVertices.add(streamNodeId);
jobGraph.addVertex(jobVertex);
return new StreamConfig(jobVertex.getConfiguration());
}
Aggregations