Search in sources :

Example 36 with IntValue

use of org.apache.flink.types.IntValue in project flink by apache.

the class OutputEmitterTest method testWrongKeyClass.

@Test
public void testWrongKeyClass() {
    // Test for IntValue
    @SuppressWarnings({ "unchecked", "rawtypes" }) final TypeComparator<Record> doubleComp = new RecordComparatorFactory(new int[] { 0 }, new Class[] { DoubleValue.class }).createComparator();
    final ChannelSelector<SerializationDelegate<Record>> oe1 = new OutputEmitter<Record>(ShipStrategyType.PARTITION_HASH, doubleComp);
    final SerializationDelegate<Record> delegate = new SerializationDelegate<Record>(new RecordSerializerFactory().getSerializer());
    ;
    Record rec = null;
    try {
        PipedInputStream pipedInput = new PipedInputStream(1024 * 1024);
        DataInputView in = new DataInputViewStreamWrapper(pipedInput);
        DataOutputView out = new DataOutputViewStreamWrapper(new PipedOutputStream(pipedInput));
        rec = new Record(1);
        rec.setField(0, new IntValue());
        rec.write(out);
        rec = new Record();
        rec.read(in);
    } catch (IOException e) {
        fail("Test erroneous");
    }
    try {
        delegate.setInstance(rec);
        oe1.selectChannels(delegate, 100);
    } catch (DeserializationException re) {
        return;
    }
    Assert.fail("Expected a NullKeyFieldException.");
}
Also used : RecordComparatorFactory(org.apache.flink.runtime.testutils.recordutils.RecordComparatorFactory) DataInputView(org.apache.flink.core.memory.DataInputView) RecordSerializerFactory(org.apache.flink.runtime.testutils.recordutils.RecordSerializerFactory) SerializationDelegate(org.apache.flink.runtime.plugable.SerializationDelegate) DataOutputView(org.apache.flink.core.memory.DataOutputView) PipedOutputStream(java.io.PipedOutputStream) PipedInputStream(java.io.PipedInputStream) IOException(java.io.IOException) DataInputViewStreamWrapper(org.apache.flink.core.memory.DataInputViewStreamWrapper) DeserializationException(org.apache.flink.types.DeserializationException) OutputEmitter(org.apache.flink.runtime.operators.shipping.OutputEmitter) DataOutputViewStreamWrapper(org.apache.flink.core.memory.DataOutputViewStreamWrapper) DoubleValue(org.apache.flink.types.DoubleValue) Record(org.apache.flink.types.Record) IntValue(org.apache.flink.types.IntValue) Test(org.junit.Test)

Example 37 with IntValue

use of org.apache.flink.types.IntValue in project flink by apache.

the class GraphMetrics method main.

public static void main(String[] args) throws Exception {
    // Set up the execution environment
    final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    env.getConfig().enableObjectReuse();
    ParameterTool parameters = ParameterTool.fromArgs(args);
    env.getConfig().setGlobalJobParameters(parameters);
    if (!parameters.has("directed")) {
        throw new ProgramParametrizationException(getUsage("must declare execution mode as '--directed true' or '--directed false'"));
    }
    boolean directedAlgorithm = parameters.getBoolean("directed");
    GraphAnalytic vm;
    GraphAnalytic em;
    switch(parameters.get("input", "")) {
        case "csv":
            {
                String lineDelimiter = StringEscapeUtils.unescapeJava(parameters.get("input_line_delimiter", CsvOutputFormat.DEFAULT_LINE_DELIMITER));
                String fieldDelimiter = StringEscapeUtils.unescapeJava(parameters.get("input_field_delimiter", CsvOutputFormat.DEFAULT_FIELD_DELIMITER));
                GraphCsvReader reader = Graph.fromCsvReader(parameters.getRequired("input_filename"), env).ignoreCommentsEdges("#").lineDelimiterEdges(lineDelimiter).fieldDelimiterEdges(fieldDelimiter);
                switch(parameters.get("type", "")) {
                    case "integer":
                        {
                            Graph<LongValue, NullValue, NullValue> graph = reader.keyType(LongValue.class);
                            if (directedAlgorithm) {
                                if (parameters.getBoolean("simplify", false)) {
                                    graph = graph.run(new org.apache.flink.graph.asm.simple.directed.Simplify<LongValue, NullValue, NullValue>());
                                }
                                vm = graph.run(new org.apache.flink.graph.library.metric.directed.VertexMetrics<LongValue, NullValue, NullValue>());
                                em = graph.run(new org.apache.flink.graph.library.metric.directed.EdgeMetrics<LongValue, NullValue, NullValue>());
                            } else {
                                if (parameters.getBoolean("simplify", false)) {
                                    graph = graph.run(new org.apache.flink.graph.asm.simple.undirected.Simplify<LongValue, NullValue, NullValue>(false));
                                }
                                vm = graph.run(new org.apache.flink.graph.library.metric.undirected.VertexMetrics<LongValue, NullValue, NullValue>());
                                em = graph.run(new org.apache.flink.graph.library.metric.undirected.EdgeMetrics<LongValue, NullValue, NullValue>());
                            }
                        }
                        break;
                    case "string":
                        {
                            Graph<StringValue, NullValue, NullValue> graph = reader.keyType(StringValue.class);
                            if (directedAlgorithm) {
                                if (parameters.getBoolean("simplify", false)) {
                                    graph = graph.run(new org.apache.flink.graph.asm.simple.directed.Simplify<StringValue, NullValue, NullValue>());
                                }
                                vm = graph.run(new org.apache.flink.graph.library.metric.directed.VertexMetrics<StringValue, NullValue, NullValue>());
                                em = graph.run(new org.apache.flink.graph.library.metric.directed.EdgeMetrics<StringValue, NullValue, NullValue>());
                            } else {
                                if (parameters.getBoolean("simplify", false)) {
                                    graph = graph.run(new org.apache.flink.graph.asm.simple.undirected.Simplify<StringValue, NullValue, NullValue>(false));
                                }
                                vm = graph.run(new org.apache.flink.graph.library.metric.undirected.VertexMetrics<StringValue, NullValue, NullValue>());
                                em = graph.run(new org.apache.flink.graph.library.metric.undirected.EdgeMetrics<StringValue, NullValue, NullValue>());
                            }
                        }
                        break;
                    default:
                        throw new ProgramParametrizationException(getUsage("invalid CSV type"));
                }
            }
            break;
        case "rmat":
            {
                int scale = parameters.getInt("scale", DEFAULT_SCALE);
                int edgeFactor = parameters.getInt("edge_factor", DEFAULT_EDGE_FACTOR);
                RandomGenerableFactory<JDKRandomGenerator> rnd = new JDKRandomGeneratorFactory();
                long vertexCount = 1L << scale;
                long edgeCount = vertexCount * edgeFactor;
                Graph<LongValue, NullValue, NullValue> graph = new RMatGraph<>(env, rnd, vertexCount, edgeCount).generate();
                if (directedAlgorithm) {
                    if (scale > 32) {
                        Graph<LongValue, NullValue, NullValue> newGraph = graph.run(new org.apache.flink.graph.asm.simple.directed.Simplify<LongValue, NullValue, NullValue>());
                        vm = newGraph.run(new org.apache.flink.graph.library.metric.directed.VertexMetrics<LongValue, NullValue, NullValue>());
                        em = newGraph.run(new org.apache.flink.graph.library.metric.directed.EdgeMetrics<LongValue, NullValue, NullValue>());
                    } else {
                        Graph<IntValue, NullValue, NullValue> newGraph = graph.run(new TranslateGraphIds<LongValue, IntValue, NullValue, NullValue>(new LongValueToUnsignedIntValue())).run(new org.apache.flink.graph.asm.simple.directed.Simplify<IntValue, NullValue, NullValue>());
                        vm = newGraph.run(new org.apache.flink.graph.library.metric.directed.VertexMetrics<IntValue, NullValue, NullValue>());
                        em = newGraph.run(new org.apache.flink.graph.library.metric.directed.EdgeMetrics<IntValue, NullValue, NullValue>());
                    }
                } else {
                    boolean clipAndFlip = parameters.getBoolean("clip_and_flip", DEFAULT_CLIP_AND_FLIP);
                    if (scale > 32) {
                        Graph<LongValue, NullValue, NullValue> newGraph = graph.run(new org.apache.flink.graph.asm.simple.undirected.Simplify<LongValue, NullValue, NullValue>(clipAndFlip));
                        vm = newGraph.run(new org.apache.flink.graph.library.metric.undirected.VertexMetrics<LongValue, NullValue, NullValue>());
                        em = newGraph.run(new org.apache.flink.graph.library.metric.undirected.EdgeMetrics<LongValue, NullValue, NullValue>());
                    } else {
                        Graph<IntValue, NullValue, NullValue> newGraph = graph.run(new TranslateGraphIds<LongValue, IntValue, NullValue, NullValue>(new LongValueToUnsignedIntValue())).run(new org.apache.flink.graph.asm.simple.undirected.Simplify<IntValue, NullValue, NullValue>(clipAndFlip));
                        vm = newGraph.run(new org.apache.flink.graph.library.metric.undirected.VertexMetrics<IntValue, NullValue, NullValue>());
                        em = newGraph.run(new org.apache.flink.graph.library.metric.undirected.EdgeMetrics<IntValue, NullValue, NullValue>());
                    }
                }
            }
            break;
        default:
            throw new ProgramParametrizationException(getUsage("invalid input type"));
    }
    env.execute("Graph Metrics");
    System.out.println();
    System.out.print("Vertex metrics:\n  ");
    System.out.println(vm.getResult().toString().replace(";", "\n "));
    System.out.println();
    System.out.print("Edge metrics:\n  ");
    System.out.println(em.getResult().toString().replace(";", "\n "));
    JobExecutionResult result = env.getLastJobExecutionResult();
    NumberFormat nf = NumberFormat.getInstance();
    System.out.println();
    System.out.println("Execution runtime: " + nf.format(result.getNetRuntime()) + " ms");
}
Also used : ParameterTool(org.apache.flink.api.java.utils.ParameterTool) LongValueToUnsignedIntValue(org.apache.flink.graph.asm.translate.translators.LongValueToUnsignedIntValue) ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) RandomGenerableFactory(org.apache.flink.graph.generator.random.RandomGenerableFactory) GraphAnalytic(org.apache.flink.graph.GraphAnalytic) JDKRandomGeneratorFactory(org.apache.flink.graph.generator.random.JDKRandomGeneratorFactory) NullValue(org.apache.flink.types.NullValue) StringValue(org.apache.flink.types.StringValue) LongValueToUnsignedIntValue(org.apache.flink.graph.asm.translate.translators.LongValueToUnsignedIntValue) IntValue(org.apache.flink.types.IntValue) JobExecutionResult(org.apache.flink.api.common.JobExecutionResult) GraphCsvReader(org.apache.flink.graph.GraphCsvReader) RMatGraph(org.apache.flink.graph.generator.RMatGraph) RMatGraph(org.apache.flink.graph.generator.RMatGraph) Graph(org.apache.flink.graph.Graph) ProgramParametrizationException(org.apache.flink.client.program.ProgramParametrizationException) LongValue(org.apache.flink.types.LongValue) NumberFormat(java.text.NumberFormat)

Example 38 with IntValue

use of org.apache.flink.types.IntValue in project flink by apache.

the class LongValueToUnsignedIntValueTest method testTranslation.

@Test
public void testTranslation() throws Exception {
    assertEquals(new IntValue(0), translator.translate(new LongValue(0L), reuse));
    assertEquals(new IntValue(Integer.MIN_VALUE), translator.translate(new LongValue((long) Integer.MAX_VALUE + 1), reuse));
    assertEquals(new IntValue(-1), translator.translate(new LongValue((1L << 32) - 1), reuse));
}
Also used : LongValue(org.apache.flink.types.LongValue) IntValue(org.apache.flink.types.IntValue) Test(org.junit.Test)

Example 39 with IntValue

use of org.apache.flink.types.IntValue in project flink by apache.

the class ToNullValueTest method testTranslation.

@Test
public void testTranslation() throws Exception {
    NullValue reuse = NullValue.getInstance();
    assertEquals(NullValue.getInstance(), new ToNullValue<>().translate(new DoubleValue(), reuse));
    assertEquals(NullValue.getInstance(), new ToNullValue<>().translate(new FloatValue(), reuse));
    assertEquals(NullValue.getInstance(), new ToNullValue<>().translate(new IntValue(), reuse));
    assertEquals(NullValue.getInstance(), new ToNullValue<>().translate(new LongValue(), reuse));
    assertEquals(NullValue.getInstance(), new ToNullValue<>().translate(new StringValue(), reuse));
}
Also used : NullValue(org.apache.flink.types.NullValue) DoubleValue(org.apache.flink.types.DoubleValue) LongValue(org.apache.flink.types.LongValue) FloatValue(org.apache.flink.types.FloatValue) StringValue(org.apache.flink.types.StringValue) IntValue(org.apache.flink.types.IntValue) Test(org.junit.Test)

Example 40 with IntValue

use of org.apache.flink.types.IntValue in project flink by apache.

the class SimplifyTest method setup.

@Before
public void setup() {
    ExecutionEnvironment env = ExecutionEnvironment.createCollectionsEnvironment();
    Object[][] edges = new Object[][] { new Object[] { 0, 0 }, new Object[] { 0, 1 }, new Object[] { 0, 1 }, new Object[] { 0, 2 }, new Object[] { 0, 2 }, new Object[] { 1, 0 }, new Object[] { 2, 2 } };
    List<Edge<IntValue, NullValue>> edgeList = new LinkedList<>();
    for (Object[] edge : edges) {
        edgeList.add(new Edge<>(new IntValue((int) edge[0]), new IntValue((int) edge[1]), NullValue.getInstance()));
    }
    graph = Graph.fromCollection(edgeList, env);
}
Also used : ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) Edge(org.apache.flink.graph.Edge) IntValue(org.apache.flink.types.IntValue) LinkedList(java.util.LinkedList) Before(org.junit.Before)

Aggregations

IntValue (org.apache.flink.types.IntValue)65 Test (org.junit.Test)41 StringValue (org.apache.flink.types.StringValue)36 IOException (java.io.IOException)23 Record (org.apache.flink.types.Record)23 LongValue (org.apache.flink.types.LongValue)20 DoubleValue (org.apache.flink.types.DoubleValue)15 Configuration (org.apache.flink.configuration.Configuration)12 FileInputSplit (org.apache.flink.core.fs.FileInputSplit)12 Value (org.apache.flink.types.Value)12 ArrayList (java.util.ArrayList)9 Before (org.junit.Before)9 OutputEmitter (org.apache.flink.runtime.operators.shipping.OutputEmitter)8 SerializationDelegate (org.apache.flink.runtime.plugable.SerializationDelegate)8 RecordSerializerFactory (org.apache.flink.runtime.testutils.recordutils.RecordSerializerFactory)8 NoSuchElementException (java.util.NoSuchElementException)7 ExecutionEnvironment (org.apache.flink.api.java.ExecutionEnvironment)7 RecordComparatorFactory (org.apache.flink.runtime.testutils.recordutils.RecordComparatorFactory)7 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)6 NullValue (org.apache.flink.types.NullValue)6