use of org.apache.flink.api.dag.Pipeline in project flink by apache.
the class DefaultPackagedProgramRetrieverTest method retrieveJobGraph.
private JobGraph retrieveJobGraph(PackagedProgramRetriever retrieverUnderTest, Configuration configuration) throws FlinkException, ProgramInvocationException, MalformedURLException {
final PackagedProgram packagedProgram = retrieverUnderTest.getPackagedProgram();
final int defaultParallelism = configuration.getInteger(CoreOptions.DEFAULT_PARALLELISM);
ConfigUtils.encodeCollectionToConfig(configuration, PipelineOptions.JARS, packagedProgram.getJobJarAndDependencies(), URL::toString);
ConfigUtils.encodeCollectionToConfig(configuration, PipelineOptions.CLASSPATHS, packagedProgram.getClasspaths(), URL::toString);
final Pipeline pipeline = PackagedProgramUtils.getPipelineFromProgram(packagedProgram, configuration, defaultParallelism, false);
return PipelineExecutorUtils.getJobGraph(pipeline, configuration);
}
use of org.apache.flink.api.dag.Pipeline in project flink by apache.
the class TableEnvironmentImpl method executeInternal.
private TableResultInternal executeInternal(List<Transformation<?>> transformations, List<String> sinkIdentifierNames) {
final String defaultJobName = "insert-into_" + String.join(",", sinkIdentifierNames);
Pipeline pipeline = execEnv.createPipeline(transformations, tableConfig.getConfiguration(), defaultJobName);
try {
JobClient jobClient = execEnv.executeAsync(pipeline);
final List<Column> columns = new ArrayList<>();
Long[] affectedRowCounts = new Long[transformations.size()];
for (int i = 0; i < transformations.size(); ++i) {
// use sink identifier name as field name
columns.add(Column.physical(sinkIdentifierNames.get(i), DataTypes.BIGINT()));
affectedRowCounts[i] = -1L;
}
return TableResultImpl.builder().jobClient(jobClient).resultKind(ResultKind.SUCCESS_WITH_CONTENT).schema(ResolvedSchema.of(columns)).resultProvider(new InsertResultProvider(affectedRowCounts).setJobClient(jobClient)).build();
} catch (Exception e) {
throw new TableException("Failed to execute sql", e);
}
}
use of org.apache.flink.api.dag.Pipeline in project flink by apache.
the class CliFrontend method info.
/**
* Executes the info action.
*
* @param args Command line arguments for the info action.
*/
protected void info(String[] args) throws Exception {
LOG.info("Running 'info' command.");
final Options commandOptions = CliFrontendParser.getInfoCommandOptions();
final CommandLine commandLine = CliFrontendParser.parse(commandOptions, args, true);
final ProgramOptions programOptions = ProgramOptions.create(commandLine);
// evaluate help flag
if (commandLine.hasOption(HELP_OPTION.getOpt())) {
CliFrontendParser.printHelpForInfo();
return;
}
// -------- build the packaged program -------------
LOG.info("Building program from JAR file");
PackagedProgram program = null;
try {
int parallelism = programOptions.getParallelism();
if (ExecutionConfig.PARALLELISM_DEFAULT == parallelism) {
parallelism = defaultParallelism;
}
LOG.info("Creating program plan dump");
final CustomCommandLine activeCommandLine = validateAndGetActiveCommandLine(checkNotNull(commandLine));
final Configuration effectiveConfiguration = getEffectiveConfiguration(activeCommandLine, commandLine, programOptions, getJobJarAndDependencies(programOptions));
program = buildProgram(programOptions, effectiveConfiguration);
Pipeline pipeline = PackagedProgramUtils.getPipelineFromProgram(program, effectiveConfiguration, parallelism, true);
String jsonPlan = FlinkPipelineTranslationUtil.translateToJSONExecutionPlan(pipeline);
if (jsonPlan != null) {
System.out.println("----------------------- Execution Plan -----------------------");
System.out.println(jsonPlan);
System.out.println("--------------------------------------------------------------");
} else {
System.out.println("JSON plan could not be generated.");
}
String description = program.getDescription();
if (description != null) {
System.out.println();
System.out.println(description);
} else {
System.out.println();
System.out.println("No description provided.");
}
} finally {
if (program != null) {
program.close();
}
}
}
use of org.apache.flink.api.dag.Pipeline in project flink by apache.
the class DumpCompiledPlanTest method verifyOptimizedPlan.
private void verifyOptimizedPlan(Class<?> entrypoint, String... args) throws Exception {
final PackagedProgram program = PackagedProgram.newBuilder().setEntryPointClassName(entrypoint.getName()).setArguments(args).build();
final Pipeline pipeline = PackagedProgramUtils.getPipelineFromProgram(program, new Configuration(), 1, true);
assertTrue(pipeline instanceof Plan);
final Plan plan = (Plan) pipeline;
final OptimizedPlan op = compileNoStats(plan);
final PlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();
final String json = dumper.getOptimizerPlanAsJSON(op);
try (JsonParser parser = new JsonFactory().createParser(json)) {
while (parser.nextToken() != null) {
}
}
}
use of org.apache.flink.api.dag.Pipeline in project flink by apache.
the class PreviewPlanDumpTest method verifyPlanDump.
private static void verifyPlanDump(Class<?> entrypoint, String... args) throws Exception {
final PackagedProgram program = PackagedProgram.newBuilder().setEntryPointClassName(entrypoint.getName()).setArguments(args).build();
final Pipeline pipeline = PackagedProgramUtils.getPipelineFromProgram(program, new Configuration(), 1, true);
assertTrue(pipeline instanceof Plan);
final Plan plan = (Plan) pipeline;
final List<DataSinkNode> sinks = Optimizer.createPreOptimizedPlan(plan);
final PlanJSONDumpGenerator dumper = new PlanJSONDumpGenerator();
final String json = dumper.getPactPlanAsJSON(sinks);
try (JsonParser parser = new JsonFactory().createParser(json)) {
while (parser.nextToken() != null) {
}
}
}
Aggregations