use of org.apache.flink.types.IntValue in project flink by apache.
the class GenericCsvInputFormatTest method testReadNoPosFirstN.
@Test
public void testReadNoPosFirstN() throws IOException {
try {
final String fileContent = "111|222|333|444|555|\n666|777|888|999|000|";
final FileInputSplit split = createTempFile(fileContent);
final Configuration parameters = new Configuration();
format.setFieldDelimiter("|");
format.setFieldTypesGeneric(IntValue.class, IntValue.class);
format.configure(parameters);
format.open(split);
Value[] values = createIntValues(2);
// if this would parse all, we would get an index out of bounds exception
values = format.nextRecord(values);
assertNotNull(values);
assertEquals(111, ((IntValue) values[0]).getValue());
assertEquals(222, ((IntValue) values[1]).getValue());
values = format.nextRecord(values);
assertNotNull(values);
assertEquals(666, ((IntValue) values[0]).getValue());
assertEquals(777, ((IntValue) values[1]).getValue());
assertNull(format.nextRecord(values));
assertTrue(format.reachedEnd());
} catch (Exception ex) {
fail("Test failed due to a " + ex.getClass().getSimpleName() + ": " + ex.getMessage());
}
}
use of org.apache.flink.types.IntValue in project flink by apache.
the class IntValueComparatorTest method getSortedTestData.
@Override
protected IntValue[] getSortedTestData() {
Random rnd = new Random(874597969123412338L);
int rndInt = rnd.nextInt();
if (rndInt < 0) {
rndInt = -rndInt;
}
if (rndInt == Integer.MAX_VALUE) {
rndInt -= 3;
}
if (rndInt <= 2) {
rndInt += 3;
}
return new IntValue[] { new IntValue(Integer.MIN_VALUE), new IntValue(-rndInt), new IntValue(-1), new IntValue(0), new IntValue(1), new IntValue(2), new IntValue(rndInt), new IntValue(Integer.MAX_VALUE) };
}
use of org.apache.flink.types.IntValue in project flink by apache.
the class IntValueSerializerTest method getTestData.
@Override
protected IntValue[] getTestData() {
Random rnd = new Random(874597969123412341L);
int rndInt = rnd.nextInt();
return new IntValue[] { new IntValue(0), new IntValue(1), new IntValue(-1), new IntValue(Integer.MAX_VALUE), new IntValue(Integer.MIN_VALUE), new IntValue(rndInt), new IntValue(-rndInt) };
}
use of org.apache.flink.types.IntValue in project flink by apache.
the class TriangleListing method main.
public static void main(String[] args) throws Exception {
// Set up the execution environment
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
env.getConfig().enableObjectReuse();
ParameterTool parameters = ParameterTool.fromArgs(args);
env.getConfig().setGlobalJobParameters(parameters);
if (!parameters.has("directed")) {
throw new ProgramParametrizationException(getUsage("must declare execution mode as '--directed true' or '--directed false'"));
}
boolean directedAlgorithm = parameters.getBoolean("directed");
int little_parallelism = parameters.getInt("little_parallelism", PARALLELISM_DEFAULT);
boolean triadic_census = parameters.getBoolean("triadic_census", DEFAULT_TRIADIC_CENSUS);
GraphAnalytic tc = null;
DataSet tl;
switch(parameters.get("input", "")) {
case "csv":
{
String lineDelimiter = StringEscapeUtils.unescapeJava(parameters.get("input_line_delimiter", CsvOutputFormat.DEFAULT_LINE_DELIMITER));
String fieldDelimiter = StringEscapeUtils.unescapeJava(parameters.get("input_field_delimiter", CsvOutputFormat.DEFAULT_FIELD_DELIMITER));
GraphCsvReader reader = Graph.fromCsvReader(parameters.getRequired("input_filename"), env).ignoreCommentsEdges("#").lineDelimiterEdges(lineDelimiter).fieldDelimiterEdges(fieldDelimiter);
switch(parameters.get("type", "")) {
case "integer":
{
Graph<LongValue, NullValue, NullValue> graph = reader.keyType(LongValue.class);
if (directedAlgorithm) {
if (parameters.getBoolean("simplify", false)) {
graph = graph.run(new org.apache.flink.graph.asm.simple.directed.Simplify<LongValue, NullValue, NullValue>().setParallelism(little_parallelism));
}
if (triadic_census) {
tc = graph.run(new org.apache.flink.graph.library.clustering.directed.TriadicCensus<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
}
tl = graph.run(new org.apache.flink.graph.library.clustering.directed.TriangleListing<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
} else {
if (parameters.getBoolean("simplify", false)) {
graph = graph.run(new org.apache.flink.graph.asm.simple.undirected.Simplify<LongValue, NullValue, NullValue>(false).setParallelism(little_parallelism));
}
if (triadic_census) {
tc = graph.run(new org.apache.flink.graph.library.clustering.undirected.TriadicCensus<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
}
tl = graph.run(new org.apache.flink.graph.library.clustering.undirected.TriangleListing<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
}
}
break;
case "string":
{
Graph<StringValue, NullValue, NullValue> graph = reader.keyType(StringValue.class);
if (directedAlgorithm) {
if (parameters.getBoolean("simplify", false)) {
graph = graph.run(new org.apache.flink.graph.asm.simple.directed.Simplify<StringValue, NullValue, NullValue>().setParallelism(little_parallelism));
}
if (triadic_census) {
tc = graph.run(new org.apache.flink.graph.library.clustering.directed.TriadicCensus<StringValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
}
tl = graph.run(new org.apache.flink.graph.library.clustering.directed.TriangleListing<StringValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
} else {
if (parameters.getBoolean("simplify", false)) {
graph = graph.run(new org.apache.flink.graph.asm.simple.undirected.Simplify<StringValue, NullValue, NullValue>(false).setParallelism(little_parallelism));
}
if (triadic_census) {
tc = graph.run(new org.apache.flink.graph.library.clustering.undirected.TriadicCensus<StringValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
}
tl = graph.run(new org.apache.flink.graph.library.clustering.undirected.TriangleListing<StringValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
}
}
break;
default:
throw new ProgramParametrizationException(getUsage("invalid CSV type"));
}
}
break;
case "rmat":
{
int scale = parameters.getInt("scale", DEFAULT_SCALE);
int edgeFactor = parameters.getInt("edge_factor", DEFAULT_EDGE_FACTOR);
RandomGenerableFactory<JDKRandomGenerator> rnd = new JDKRandomGeneratorFactory();
long vertexCount = 1L << scale;
long edgeCount = vertexCount * edgeFactor;
Graph<LongValue, NullValue, NullValue> graph = new RMatGraph<>(env, rnd, vertexCount, edgeCount).generate();
if (directedAlgorithm) {
if (scale > 32) {
Graph<LongValue, NullValue, NullValue> simpleGraph = graph.run(new org.apache.flink.graph.asm.simple.directed.Simplify<LongValue, NullValue, NullValue>().setParallelism(little_parallelism));
if (triadic_census) {
tc = simpleGraph.run(new org.apache.flink.graph.library.clustering.directed.TriadicCensus<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
}
tl = simpleGraph.run(new org.apache.flink.graph.library.clustering.directed.TriangleListing<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
} else {
Graph<LongValue, NullValue, NullValue> simpleGraph = graph.run(new org.apache.flink.graph.asm.simple.directed.Simplify<LongValue, NullValue, NullValue>().setParallelism(little_parallelism));
if (triadic_census) {
tc = simpleGraph.run(new org.apache.flink.graph.library.clustering.directed.TriadicCensus<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
}
tl = simpleGraph.run(new org.apache.flink.graph.library.clustering.directed.TriangleListing<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
}
} else {
boolean clipAndFlip = parameters.getBoolean("clip_and_flip", DEFAULT_CLIP_AND_FLIP);
if (scale > 32) {
Graph<LongValue, NullValue, NullValue> simpleGraph = graph.run(new org.apache.flink.graph.asm.simple.undirected.Simplify<LongValue, NullValue, NullValue>(clipAndFlip).setParallelism(little_parallelism));
if (triadic_census) {
tc = simpleGraph.run(new org.apache.flink.graph.library.clustering.undirected.TriadicCensus<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
}
tl = simpleGraph.run(new org.apache.flink.graph.library.clustering.undirected.TriangleListing<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
} else {
Graph<IntValue, NullValue, NullValue> simpleGraph = graph.run(new TranslateGraphIds<LongValue, IntValue, NullValue, NullValue>(new LongValueToUnsignedIntValue()).setParallelism(little_parallelism)).run(new org.apache.flink.graph.asm.simple.undirected.Simplify<IntValue, NullValue, NullValue>(clipAndFlip).setParallelism(little_parallelism));
if (triadic_census) {
tc = simpleGraph.run(new org.apache.flink.graph.library.clustering.undirected.TriadicCensus<IntValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
}
tl = simpleGraph.run(new org.apache.flink.graph.library.clustering.undirected.TriangleListing<IntValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
}
}
}
break;
default:
throw new ProgramParametrizationException(getUsage("invalid input type"));
}
switch(parameters.get("output", "")) {
case "print":
System.out.println();
if (directedAlgorithm) {
for (Object e : tl.collect()) {
org.apache.flink.graph.library.clustering.directed.TriangleListing.Result result = (org.apache.flink.graph.library.clustering.directed.TriangleListing.Result) e;
System.out.println(result.toPrintableString());
}
} else {
tl.print();
}
break;
case "hash":
System.out.println();
System.out.println(DataSetUtils.checksumHashCode(tl));
break;
case "csv":
String filename = parameters.getRequired("output_filename");
String lineDelimiter = StringEscapeUtils.unescapeJava(parameters.get("output_line_delimiter", CsvOutputFormat.DEFAULT_LINE_DELIMITER));
String fieldDelimiter = StringEscapeUtils.unescapeJava(parameters.get("output_field_delimiter", CsvOutputFormat.DEFAULT_FIELD_DELIMITER));
tl.writeAsCsv(filename, lineDelimiter, fieldDelimiter);
env.execute();
break;
default:
throw new ProgramParametrizationException(getUsage("invalid output type"));
}
if (tc != null) {
System.out.print("Triadic census:\n ");
System.out.println(tc.getResult().toString().replace(";", "\n "));
}
JobExecutionResult result = env.getLastJobExecutionResult();
NumberFormat nf = NumberFormat.getInstance();
System.out.println();
System.out.println("Execution runtime: " + nf.format(result.getNetRuntime()) + " ms");
}
use of org.apache.flink.types.IntValue in project flink by apache.
the class IterationSynchronizationSinkTask method invoke.
// --------------------------------------------------------------------------------------------
@Override
public void invoke() throws Exception {
this.headEventReader = new MutableRecordReader<>(getEnvironment().getInputGate(0), getEnvironment().getTaskManagerInfo().getTmpDirectories());
TaskConfig taskConfig = new TaskConfig(getTaskConfiguration());
// store all aggregators
this.aggregators = new HashMap<>();
for (AggregatorWithName<?> aggWithName : taskConfig.getIterationAggregators(getUserCodeClassLoader())) {
aggregators.put(aggWithName.getName(), aggWithName.getAggregator());
}
// store the aggregator convergence criterion
if (taskConfig.usesConvergenceCriterion()) {
convergenceCriterion = taskConfig.getConvergenceCriterion(getUserCodeClassLoader());
convergenceAggregatorName = taskConfig.getConvergenceCriterionAggregatorName();
Preconditions.checkNotNull(convergenceAggregatorName);
}
// store the default aggregator convergence criterion
if (taskConfig.usesImplicitConvergenceCriterion()) {
implicitConvergenceCriterion = taskConfig.getImplicitConvergenceCriterion(getUserCodeClassLoader());
implicitConvergenceAggregatorName = taskConfig.getImplicitConvergenceCriterionAggregatorName();
Preconditions.checkNotNull(implicitConvergenceAggregatorName);
}
maxNumberOfIterations = taskConfig.getNumberOfIterations();
// set up the event handler
int numEventsTillEndOfSuperstep = taskConfig.getNumberOfEventsUntilInterruptInIterativeGate(0);
eventHandler = new SyncEventHandler(numEventsTillEndOfSuperstep, aggregators, getEnvironment().getUserClassLoader());
headEventReader.registerTaskEventListener(eventHandler, WorkerDoneEvent.class);
IntValue dummy = new IntValue();
while (!terminationRequested()) {
if (log.isInfoEnabled()) {
log.info(formatLogString("starting iteration [" + currentIteration + "]"));
}
// this call listens for events until the end-of-superstep is reached
readHeadEventChannel(dummy);
if (log.isInfoEnabled()) {
log.info(formatLogString("finishing iteration [" + currentIteration + "]"));
}
if (checkForConvergence()) {
if (log.isInfoEnabled()) {
log.info(formatLogString("signaling that all workers are to terminate in iteration [" + currentIteration + "]"));
}
requestTermination();
sendToAllWorkers(new TerminationEvent());
} else {
if (log.isInfoEnabled()) {
log.info(formatLogString("signaling that all workers are done in iteration [" + currentIteration + "]"));
}
AllWorkersDoneEvent allWorkersDoneEvent = new AllWorkersDoneEvent(aggregators);
sendToAllWorkers(allWorkersDoneEvent);
// reset all aggregators
for (Aggregator<?> agg : aggregators.values()) {
agg.reset();
}
currentIteration++;
}
}
}
Aggregations