Search in sources :

Example 11 with JobExecutionResult

use of org.apache.flink.api.common.JobExecutionResult in project flink by apache.

the class DataSetUtils method checksumHashCode.

// --------------------------------------------------------------------------------------------
//  Checksum
// --------------------------------------------------------------------------------------------
/**
	 * Convenience method to get the count (number of elements) of a DataSet
	 * as well as the checksum (sum over element hashes).
	 *
	 * @return A ChecksumHashCode that represents the count and checksum of elements in the data set.
	 * @deprecated replaced with {@code org.apache.flink.graph.asm.dataset.ChecksumHashCode} in Gelly
	 */
@Deprecated
public static <T> Utils.ChecksumHashCode checksumHashCode(DataSet<T> input) throws Exception {
    final String id = new AbstractID().toString();
    input.output(new Utils.ChecksumHashCodeHelper<T>(id)).name("ChecksumHashCode");
    JobExecutionResult res = input.getExecutionEnvironment().execute();
    return res.<Utils.ChecksumHashCode>getAccumulatorResult(id);
}
Also used : JobExecutionResult(org.apache.flink.api.common.JobExecutionResult) AbstractID(org.apache.flink.util.AbstractID)

Example 12 with JobExecutionResult

use of org.apache.flink.api.common.JobExecutionResult in project beam by apache.

the class FlinkRunner method run.

@Override
public PipelineResult run(Pipeline pipeline) {
    logWarningIfPCollectionViewHasNonDeterministicKeyCoder(pipeline);
    MetricsEnvironment.setMetricsSupported(true);
    LOG.info("Executing pipeline using FlinkRunner.");
    FlinkPipelineExecutionEnvironment env = new FlinkPipelineExecutionEnvironment(options);
    LOG.info("Translating pipeline to Flink program.");
    env.translate(this, pipeline);
    JobExecutionResult result;
    try {
        LOG.info("Starting execution of Flink program.");
        result = env.executePipeline();
    } catch (Exception e) {
        LOG.error("Pipeline execution failed", e);
        throw new RuntimeException("Pipeline execution failed", e);
    }
    if (result instanceof DetachedEnvironment.DetachedJobExecutionResult) {
        LOG.info("Pipeline submitted in Detached mode");
        return new FlinkDetachedRunnerResult();
    } else {
        LOG.info("Execution finished in {} msecs", result.getNetRuntime());
        Map<String, Object> accumulators = result.getAllAccumulatorResults();
        if (accumulators != null && !accumulators.isEmpty()) {
            LOG.info("Final accumulator values:");
            for (Map.Entry<String, Object> entry : result.getAllAccumulatorResults().entrySet()) {
                LOG.info("{} : {}", entry.getKey(), entry.getValue());
            }
        }
        return new FlinkRunnerResult(accumulators, result.getNetRuntime());
    }
}
Also used : JobExecutionResult(org.apache.flink.api.common.JobExecutionResult) Map(java.util.Map) URISyntaxException(java.net.URISyntaxException)

Example 13 with JobExecutionResult

use of org.apache.flink.api.common.JobExecutionResult in project flink by apache.

the class CliFrontend method executeProgram.

// --------------------------------------------------------------------------------------------
//  Interaction with programs and JobManager
// --------------------------------------------------------------------------------------------
protected int executeProgram(PackagedProgram program, ClusterClient client, int parallelism) {
    logAndSysout("Starting execution of program");
    JobSubmissionResult result;
    try {
        result = client.run(program, parallelism);
    } catch (ProgramParametrizationException e) {
        return handleParametrizationException(e);
    } catch (ProgramMissingJobException e) {
        return handleMissingJobException();
    } catch (ProgramInvocationException e) {
        return handleError(e);
    } finally {
        program.deleteExtractedLibraries();
    }
    if (null == result) {
        logAndSysout("No JobSubmissionResult returned, please make sure you called " + "ExecutionEnvironment.execute()");
        return 1;
    }
    if (result.isJobExecutionResult()) {
        logAndSysout("Program execution finished");
        JobExecutionResult execResult = result.getJobExecutionResult();
        System.out.println("Job with JobID " + execResult.getJobID() + " has finished.");
        System.out.println("Job Runtime: " + execResult.getNetRuntime() + " ms");
        Map<String, Object> accumulatorsResult = execResult.getAllAccumulatorResults();
        if (accumulatorsResult.size() > 0) {
            System.out.println("Accumulator Results: ");
            System.out.println(AccumulatorHelper.getResultsFormated(accumulatorsResult));
        }
    } else {
        logAndSysout("Job has been submitted with JobID " + result.getJobID());
    }
    return 0;
}
Also used : JobSubmissionResult(org.apache.flink.api.common.JobSubmissionResult) JobExecutionResult(org.apache.flink.api.common.JobExecutionResult) ProgramParametrizationException(org.apache.flink.client.program.ProgramParametrizationException) ProgramInvocationException(org.apache.flink.client.program.ProgramInvocationException) ProgramMissingJobException(org.apache.flink.client.program.ProgramMissingJobException)

Example 14 with JobExecutionResult

use of org.apache.flink.api.common.JobExecutionResult in project flink by apache.

the class EmptyFieldsCountAccumulator method main.

public static void main(final String[] args) throws Exception {
    final ParameterTool params = ParameterTool.fromArgs(args);
    final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    // make parameters available in the web interface
    env.getConfig().setGlobalJobParameters(params);
    // get the data set
    final DataSet<StringTriple> file = getDataSet(env, params);
    // filter lines with empty fields
    final DataSet<StringTriple> filteredLines = file.filter(new EmptyFieldFilter());
    // Here, we could do further processing with the filtered lines...
    JobExecutionResult result;
    // output the filtered lines
    if (params.has("output")) {
        filteredLines.writeAsCsv(params.get("output"));
        // execute program
        result = env.execute("Accumulator example");
    } else {
        System.out.println("Printing result to stdout. Use --output to specify output path.");
        filteredLines.print();
        result = env.getLastJobExecutionResult();
    }
    // get the accumulator result via its registration key
    final List<Integer> emptyFields = result.getAccumulatorResult(EMPTY_FIELD_ACCUMULATOR);
    System.out.format("Number of detected empty fields per column: %s\n", emptyFields);
}
Also used : ParameterTool(org.apache.flink.api.java.utils.ParameterTool) JobExecutionResult(org.apache.flink.api.common.JobExecutionResult) ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment)

Example 15 with JobExecutionResult

use of org.apache.flink.api.common.JobExecutionResult in project flink by apache.

the class DataSet method count.

/**
	 * Convenience method to get the count (number of elements) of a DataSet.
	 *
	 * @return A long integer that represents the number of elements in the data set.
	 */
public long count() throws Exception {
    final String id = new AbstractID().toString();
    output(new Utils.CountHelper<T>(id)).name("count()");
    JobExecutionResult res = getExecutionEnvironment().execute();
    return res.<Long>getAccumulatorResult(id);
}
Also used : JobExecutionResult(org.apache.flink.api.common.JobExecutionResult) AbstractID(org.apache.flink.util.AbstractID)

Aggregations

JobExecutionResult (org.apache.flink.api.common.JobExecutionResult)28 ExecutionEnvironment (org.apache.flink.api.java.ExecutionEnvironment)10 ParameterTool (org.apache.flink.api.java.utils.ParameterTool)7 ProgramParametrizationException (org.apache.flink.client.program.ProgramParametrizationException)7 NumberFormat (java.text.NumberFormat)6 JDKRandomGeneratorFactory (org.apache.flink.graph.generator.random.JDKRandomGeneratorFactory)6 LongValue (org.apache.flink.types.LongValue)6 NullValue (org.apache.flink.types.NullValue)6 Graph (org.apache.flink.graph.Graph)5 GraphCsvReader (org.apache.flink.graph.GraphCsvReader)5 LongValueToUnsignedIntValue (org.apache.flink.graph.asm.translate.translators.LongValueToUnsignedIntValue)5 RMatGraph (org.apache.flink.graph.generator.RMatGraph)5 RandomGenerableFactory (org.apache.flink.graph.generator.random.RandomGenerableFactory)5 IntValue (org.apache.flink.types.IntValue)5 StringValue (org.apache.flink.types.StringValue)5 Test (org.junit.Test)5 IOException (java.io.IOException)4 DataSet (org.apache.flink.api.java.DataSet)4 ProgramInvocationException (org.apache.flink.client.program.ProgramInvocationException)3 GraphAnalytic (org.apache.flink.graph.GraphAnalytic)3