Search in sources :

Example 1 with StringValue

use of org.apache.flink.types.StringValue in project flink by apache.

the class ClusteringCoefficient method main.

public static void main(String[] args) throws Exception {
    // Set up the execution environment
    final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    env.getConfig().enableObjectReuse();
    ParameterTool parameters = ParameterTool.fromArgs(args);
    env.getConfig().setGlobalJobParameters(parameters);
    if (!parameters.has("directed")) {
        throw new ProgramParametrizationException(getUsage("must declare execution mode as '--directed true' or '--directed false'"));
    }
    boolean directedAlgorithm = parameters.getBoolean("directed");
    int little_parallelism = parameters.getInt("little_parallelism", PARALLELISM_DEFAULT);
    // global and local clustering coefficient results
    GraphAnalytic gcc;
    GraphAnalytic acc;
    DataSet lcc;
    switch(parameters.get("input", "")) {
        case "csv":
            {
                String lineDelimiter = StringEscapeUtils.unescapeJava(parameters.get("input_line_delimiter", CsvOutputFormat.DEFAULT_LINE_DELIMITER));
                String fieldDelimiter = StringEscapeUtils.unescapeJava(parameters.get("input_field_delimiter", CsvOutputFormat.DEFAULT_FIELD_DELIMITER));
                GraphCsvReader reader = Graph.fromCsvReader(parameters.get("input_filename"), env).ignoreCommentsEdges("#").lineDelimiterEdges(lineDelimiter).fieldDelimiterEdges(fieldDelimiter);
                switch(parameters.get("type", "")) {
                    case "integer":
                        {
                            Graph<LongValue, NullValue, NullValue> graph = reader.keyType(LongValue.class);
                            if (directedAlgorithm) {
                                if (parameters.getBoolean("simplify", false)) {
                                    graph = graph.run(new org.apache.flink.graph.asm.simple.directed.Simplify<LongValue, NullValue, NullValue>().setParallelism(little_parallelism));
                                }
                                gcc = graph.run(new org.apache.flink.graph.library.clustering.directed.GlobalClusteringCoefficient<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                                acc = graph.run(new org.apache.flink.graph.library.clustering.directed.AverageClusteringCoefficient<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                                lcc = graph.run(new org.apache.flink.graph.library.clustering.directed.LocalClusteringCoefficient<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                            } else {
                                if (parameters.getBoolean("simplify", false)) {
                                    graph = graph.run(new org.apache.flink.graph.asm.simple.undirected.Simplify<LongValue, NullValue, NullValue>(false).setParallelism(little_parallelism));
                                }
                                gcc = graph.run(new org.apache.flink.graph.library.clustering.undirected.GlobalClusteringCoefficient<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                                acc = graph.run(new org.apache.flink.graph.library.clustering.undirected.AverageClusteringCoefficient<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                                lcc = graph.run(new org.apache.flink.graph.library.clustering.undirected.LocalClusteringCoefficient<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                            }
                        }
                        break;
                    case "string":
                        {
                            Graph<StringValue, NullValue, NullValue> graph = reader.keyType(StringValue.class);
                            if (directedAlgorithm) {
                                if (parameters.getBoolean("simplify", false)) {
                                    graph = graph.run(new org.apache.flink.graph.asm.simple.directed.Simplify<StringValue, NullValue, NullValue>().setParallelism(little_parallelism));
                                }
                                gcc = graph.run(new org.apache.flink.graph.library.clustering.directed.GlobalClusteringCoefficient<StringValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                                acc = graph.run(new org.apache.flink.graph.library.clustering.directed.AverageClusteringCoefficient<StringValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                                lcc = graph.run(new org.apache.flink.graph.library.clustering.directed.LocalClusteringCoefficient<StringValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                            } else {
                                if (parameters.getBoolean("simplify", false)) {
                                    graph = graph.run(new org.apache.flink.graph.asm.simple.undirected.Simplify<StringValue, NullValue, NullValue>(false).setParallelism(little_parallelism));
                                }
                                gcc = graph.run(new org.apache.flink.graph.library.clustering.undirected.GlobalClusteringCoefficient<StringValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                                acc = graph.run(new org.apache.flink.graph.library.clustering.undirected.AverageClusteringCoefficient<StringValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                                lcc = graph.run(new org.apache.flink.graph.library.clustering.undirected.LocalClusteringCoefficient<StringValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                            }
                        }
                        break;
                    default:
                        throw new ProgramParametrizationException(getUsage("invalid CSV type"));
                }
            }
            break;
        case "rmat":
            {
                int scale = parameters.getInt("scale", DEFAULT_SCALE);
                int edgeFactor = parameters.getInt("edge_factor", DEFAULT_EDGE_FACTOR);
                RandomGenerableFactory<JDKRandomGenerator> rnd = new JDKRandomGeneratorFactory();
                long vertexCount = 1L << scale;
                long edgeCount = vertexCount * edgeFactor;
                Graph<LongValue, NullValue, NullValue> graph = new RMatGraph<>(env, rnd, vertexCount, edgeCount).setParallelism(little_parallelism).generate();
                if (directedAlgorithm) {
                    if (scale > 32) {
                        Graph<LongValue, NullValue, NullValue> newGraph = graph.run(new org.apache.flink.graph.asm.simple.directed.Simplify<LongValue, NullValue, NullValue>().setParallelism(little_parallelism));
                        gcc = newGraph.run(new org.apache.flink.graph.library.clustering.directed.GlobalClusteringCoefficient<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                        acc = newGraph.run(new org.apache.flink.graph.library.clustering.directed.AverageClusteringCoefficient<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                        lcc = newGraph.run(new org.apache.flink.graph.library.clustering.directed.LocalClusteringCoefficient<LongValue, NullValue, NullValue>().setIncludeZeroDegreeVertices(false).setLittleParallelism(little_parallelism));
                    } else {
                        Graph<IntValue, NullValue, NullValue> newGraph = graph.run(new TranslateGraphIds<LongValue, IntValue, NullValue, NullValue>(new LongValueToUnsignedIntValue()).setParallelism(little_parallelism)).run(new org.apache.flink.graph.asm.simple.directed.Simplify<IntValue, NullValue, NullValue>().setParallelism(little_parallelism));
                        gcc = newGraph.run(new org.apache.flink.graph.library.clustering.directed.GlobalClusteringCoefficient<IntValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                        acc = newGraph.run(new org.apache.flink.graph.library.clustering.directed.AverageClusteringCoefficient<IntValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                        lcc = newGraph.run(new org.apache.flink.graph.library.clustering.directed.LocalClusteringCoefficient<IntValue, NullValue, NullValue>().setIncludeZeroDegreeVertices(false).setLittleParallelism(little_parallelism));
                    }
                } else {
                    boolean clipAndFlip = parameters.getBoolean("clip_and_flip", DEFAULT_CLIP_AND_FLIP);
                    if (scale > 32) {
                        Graph<LongValue, NullValue, NullValue> newGraph = graph.run(new org.apache.flink.graph.asm.simple.undirected.Simplify<LongValue, NullValue, NullValue>(clipAndFlip).setParallelism(little_parallelism));
                        gcc = newGraph.run(new org.apache.flink.graph.library.clustering.undirected.GlobalClusteringCoefficient<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                        acc = newGraph.run(new org.apache.flink.graph.library.clustering.undirected.AverageClusteringCoefficient<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                        lcc = newGraph.run(new org.apache.flink.graph.library.clustering.undirected.LocalClusteringCoefficient<LongValue, NullValue, NullValue>().setIncludeZeroDegreeVertices(false).setLittleParallelism(little_parallelism));
                    } else {
                        Graph<IntValue, NullValue, NullValue> newGraph = graph.run(new TranslateGraphIds<LongValue, IntValue, NullValue, NullValue>(new LongValueToUnsignedIntValue()).setParallelism(little_parallelism)).run(new org.apache.flink.graph.asm.simple.undirected.Simplify<IntValue, NullValue, NullValue>(clipAndFlip).setParallelism(little_parallelism));
                        gcc = newGraph.run(new org.apache.flink.graph.library.clustering.undirected.GlobalClusteringCoefficient<IntValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                        acc = newGraph.run(new org.apache.flink.graph.library.clustering.undirected.AverageClusteringCoefficient<IntValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                        lcc = newGraph.run(new org.apache.flink.graph.library.clustering.undirected.LocalClusteringCoefficient<IntValue, NullValue, NullValue>().setIncludeZeroDegreeVertices(false).setLittleParallelism(little_parallelism));
                    }
                }
            }
            break;
        default:
            throw new ProgramParametrizationException(getUsage("invalid input type"));
    }
    switch(parameters.get("output", "")) {
        case "print":
            if (directedAlgorithm) {
                for (Object e : lcc.collect()) {
                    org.apache.flink.graph.library.clustering.directed.LocalClusteringCoefficient.Result result = (org.apache.flink.graph.library.clustering.directed.LocalClusteringCoefficient.Result) e;
                    System.out.println(result.toPrintableString());
                }
            } else {
                for (Object e : lcc.collect()) {
                    org.apache.flink.graph.library.clustering.undirected.LocalClusteringCoefficient.Result result = (org.apache.flink.graph.library.clustering.undirected.LocalClusteringCoefficient.Result) e;
                    System.out.println(result.toPrintableString());
                }
            }
            break;
        case "hash":
            System.out.println(DataSetUtils.checksumHashCode(lcc));
            break;
        case "csv":
            String filename = parameters.get("output_filename");
            String lineDelimiter = StringEscapeUtils.unescapeJava(parameters.get("output_line_delimiter", CsvOutputFormat.DEFAULT_LINE_DELIMITER));
            String fieldDelimiter = StringEscapeUtils.unescapeJava(parameters.get("output_field_delimiter", CsvOutputFormat.DEFAULT_FIELD_DELIMITER));
            lcc.writeAsCsv(filename, lineDelimiter, fieldDelimiter);
            env.execute("Clustering Coefficient");
            break;
        default:
            throw new ProgramParametrizationException(getUsage("invalid output type"));
    }
    System.out.println(gcc.getResult());
    System.out.println(acc.getResult());
    JobExecutionResult result = env.getLastJobExecutionResult();
    NumberFormat nf = NumberFormat.getInstance();
    System.out.println("Execution runtime: " + nf.format(result.getNetRuntime()) + " ms");
}
Also used : RandomGenerableFactory(org.apache.flink.graph.generator.random.RandomGenerableFactory) DataSet(org.apache.flink.api.java.DataSet) GraphAnalytic(org.apache.flink.graph.GraphAnalytic) JobExecutionResult(org.apache.flink.api.common.JobExecutionResult) NullValue(org.apache.flink.types.NullValue) StringValue(org.apache.flink.types.StringValue) LongValueToUnsignedIntValue(org.apache.flink.graph.asm.translate.translators.LongValueToUnsignedIntValue) IntValue(org.apache.flink.types.IntValue) JobExecutionResult(org.apache.flink.api.common.JobExecutionResult) GraphCsvReader(org.apache.flink.graph.GraphCsvReader) RMatGraph(org.apache.flink.graph.generator.RMatGraph) Graph(org.apache.flink.graph.Graph) LongValue(org.apache.flink.types.LongValue) ParameterTool(org.apache.flink.api.java.utils.ParameterTool) LongValueToUnsignedIntValue(org.apache.flink.graph.asm.translate.translators.LongValueToUnsignedIntValue) ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) JDKRandomGeneratorFactory(org.apache.flink.graph.generator.random.JDKRandomGeneratorFactory) TranslateGraphIds(org.apache.flink.graph.asm.translate.TranslateGraphIds) ProgramParametrizationException(org.apache.flink.client.program.ProgramParametrizationException) NumberFormat(java.text.NumberFormat)

Example 2 with StringValue

use of org.apache.flink.types.StringValue in project flink by apache.

the class JaccardIndex method main.

public static void main(String[] args) throws Exception {
    // Set up the execution environment
    final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
    env.getConfig().enableObjectReuse();
    ParameterTool parameters = ParameterTool.fromArgs(args);
    env.getConfig().setGlobalJobParameters(parameters);
    int little_parallelism = parameters.getInt("little_parallelism", PARALLELISM_DEFAULT);
    DataSet ji;
    switch(parameters.get("input", "")) {
        case "csv":
            {
                String lineDelimiter = StringEscapeUtils.unescapeJava(parameters.get("input_line_delimiter", CsvOutputFormat.DEFAULT_LINE_DELIMITER));
                String fieldDelimiter = StringEscapeUtils.unescapeJava(parameters.get("input_field_delimiter", CsvOutputFormat.DEFAULT_FIELD_DELIMITER));
                GraphCsvReader reader = Graph.fromCsvReader(parameters.getRequired("input_filename"), env).ignoreCommentsEdges("#").lineDelimiterEdges(lineDelimiter).fieldDelimiterEdges(fieldDelimiter);
                switch(parameters.get("type", "")) {
                    case "integer":
                        {
                            Graph<LongValue, NullValue, NullValue> graph = reader.keyType(LongValue.class);
                            if (parameters.getBoolean("simplify", false)) {
                                graph = graph.run(new org.apache.flink.graph.asm.simple.undirected.Simplify<LongValue, NullValue, NullValue>(false).setParallelism(little_parallelism));
                            }
                            ji = graph.run(new org.apache.flink.graph.library.similarity.JaccardIndex<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                        }
                        break;
                    case "string":
                        {
                            Graph<StringValue, NullValue, NullValue> graph = reader.keyType(StringValue.class);
                            if (parameters.getBoolean("simplify", false)) {
                                graph = graph.run(new org.apache.flink.graph.asm.simple.undirected.Simplify<StringValue, NullValue, NullValue>(false).setParallelism(little_parallelism));
                            }
                            ji = graph.run(new org.apache.flink.graph.library.similarity.JaccardIndex<StringValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                        }
                        break;
                    default:
                        throw new ProgramParametrizationException(getUsage("invalid CSV type"));
                }
            }
            break;
        case "rmat":
            {
                int scale = parameters.getInt("scale", DEFAULT_SCALE);
                int edgeFactor = parameters.getInt("edge_factor", DEFAULT_EDGE_FACTOR);
                RandomGenerableFactory<JDKRandomGenerator> rnd = new JDKRandomGeneratorFactory();
                long vertexCount = 1L << scale;
                long edgeCount = vertexCount * edgeFactor;
                Graph<LongValue, NullValue, NullValue> graph = new RMatGraph<>(env, rnd, vertexCount, edgeCount).setParallelism(little_parallelism).generate();
                boolean clipAndFlip = parameters.getBoolean("clip_and_flip", DEFAULT_CLIP_AND_FLIP);
                if (scale > 32) {
                    ji = graph.run(new Simplify<LongValue, NullValue, NullValue>(clipAndFlip).setParallelism(little_parallelism)).run(new org.apache.flink.graph.library.similarity.JaccardIndex<LongValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                } else {
                    ji = graph.run(new TranslateGraphIds<LongValue, IntValue, NullValue, NullValue>(new LongValueToUnsignedIntValue()).setParallelism(little_parallelism)).run(new Simplify<IntValue, NullValue, NullValue>(clipAndFlip).setParallelism(little_parallelism)).run(new org.apache.flink.graph.library.similarity.JaccardIndex<IntValue, NullValue, NullValue>().setLittleParallelism(little_parallelism));
                }
            }
            break;
        default:
            throw new ProgramParametrizationException(getUsage("invalid input type"));
    }
    switch(parameters.get("output", "")) {
        case "print":
            System.out.println();
            for (Object e : ji.collect()) {
                Result result = (Result) e;
                System.out.println(result.toPrintableString());
            }
            break;
        case "hash":
            System.out.println();
            System.out.println(DataSetUtils.checksumHashCode(ji));
            break;
        case "csv":
            String filename = parameters.getRequired("output_filename");
            String lineDelimiter = StringEscapeUtils.unescapeJava(parameters.get("output_line_delimiter", CsvOutputFormat.DEFAULT_LINE_DELIMITER));
            String fieldDelimiter = StringEscapeUtils.unescapeJava(parameters.get("output_field_delimiter", CsvOutputFormat.DEFAULT_FIELD_DELIMITER));
            ji.writeAsCsv(filename, lineDelimiter, fieldDelimiter);
            env.execute("Jaccard Index");
            break;
        default:
            throw new ProgramParametrizationException(getUsage("invalid output type"));
    }
    JobExecutionResult result = env.getLastJobExecutionResult();
    NumberFormat nf = NumberFormat.getInstance();
    System.out.println();
    System.out.println("Execution runtime: " + nf.format(result.getNetRuntime()) + " ms");
}
Also used : ParameterTool(org.apache.flink.api.java.utils.ParameterTool) LongValueToUnsignedIntValue(org.apache.flink.graph.asm.translate.translators.LongValueToUnsignedIntValue) ExecutionEnvironment(org.apache.flink.api.java.ExecutionEnvironment) RandomGenerableFactory(org.apache.flink.graph.generator.random.RandomGenerableFactory) DataSet(org.apache.flink.api.java.DataSet) JDKRandomGeneratorFactory(org.apache.flink.graph.generator.random.JDKRandomGeneratorFactory) JobExecutionResult(org.apache.flink.api.common.JobExecutionResult) Result(org.apache.flink.graph.library.similarity.JaccardIndex.Result) NullValue(org.apache.flink.types.NullValue) StringValue(org.apache.flink.types.StringValue) LongValueToUnsignedIntValue(org.apache.flink.graph.asm.translate.translators.LongValueToUnsignedIntValue) IntValue(org.apache.flink.types.IntValue) TranslateGraphIds(org.apache.flink.graph.asm.translate.TranslateGraphIds) JobExecutionResult(org.apache.flink.api.common.JobExecutionResult) GraphCsvReader(org.apache.flink.graph.GraphCsvReader) RMatGraph(org.apache.flink.graph.generator.RMatGraph) Graph(org.apache.flink.graph.Graph) ProgramParametrizationException(org.apache.flink.client.program.ProgramParametrizationException) LongValue(org.apache.flink.types.LongValue) NumberFormat(java.text.NumberFormat)

Example 3 with StringValue

use of org.apache.flink.types.StringValue in project flink by apache.

the class OutputEmitterTest method testForcedRebalance.

@Test
public void testForcedRebalance() {
    // Test for IntValue
    int numChannels = 100;
    int toTaskIndex = numChannels * 6 / 7;
    int fromTaskIndex = toTaskIndex + numChannels;
    int extraRecords = numChannels / 3;
    int numRecords = 50000 + extraRecords;
    final ChannelSelector<SerializationDelegate<Record>> oe1 = new OutputEmitter<Record>(ShipStrategyType.PARTITION_FORCED_REBALANCE, fromTaskIndex);
    final SerializationDelegate<Record> delegate = new SerializationDelegate<Record>(new RecordSerializerFactory().getSerializer());
    int[] hit = new int[numChannels];
    for (int i = 0; i < numRecords; i++) {
        IntValue k = new IntValue(i);
        Record rec = new Record(k);
        delegate.setInstance(rec);
        int[] chans = oe1.selectChannels(delegate, hit.length);
        for (int chan : chans) {
            hit[chan]++;
        }
    }
    int cnt = 0;
    for (int i = 0; i < hit.length; i++) {
        if (toTaskIndex <= i || i < toTaskIndex + extraRecords - numChannels) {
            assertTrue(hit[i] == (numRecords / numChannels) + 1);
        } else {
            assertTrue(hit[i] == numRecords / numChannels);
        }
        cnt += hit[i];
    }
    assertTrue(cnt == numRecords);
    // Test for StringValue
    numChannels = 100;
    toTaskIndex = numChannels / 5;
    fromTaskIndex = toTaskIndex + 2 * numChannels;
    extraRecords = numChannels * 2 / 9;
    numRecords = 10000 + extraRecords;
    final ChannelSelector<SerializationDelegate<Record>> oe2 = new OutputEmitter<Record>(ShipStrategyType.PARTITION_FORCED_REBALANCE, fromTaskIndex);
    hit = new int[numChannels];
    for (int i = 0; i < numRecords; i++) {
        StringValue k = new StringValue(i + "");
        Record rec = new Record(k);
        delegate.setInstance(rec);
        int[] chans = oe2.selectChannels(delegate, hit.length);
        for (int chan : chans) {
            hit[chan]++;
        }
    }
    cnt = 0;
    for (int i = 0; i < hit.length; i++) {
        if (toTaskIndex <= i && i < toTaskIndex + extraRecords) {
            assertTrue(hit[i] == (numRecords / numChannels) + 1);
        } else {
            assertTrue(hit[i] == numRecords / numChannels);
        }
        cnt += hit[i];
    }
    assertTrue(cnt == numRecords);
}
Also used : OutputEmitter(org.apache.flink.runtime.operators.shipping.OutputEmitter) RecordSerializerFactory(org.apache.flink.runtime.testutils.recordutils.RecordSerializerFactory) SerializationDelegate(org.apache.flink.runtime.plugable.SerializationDelegate) Record(org.apache.flink.types.Record) StringValue(org.apache.flink.types.StringValue) IntValue(org.apache.flink.types.IntValue) Test(org.junit.Test)

Example 4 with StringValue

use of org.apache.flink.types.StringValue in project flink by apache.

the class OutputEmitterTest method testForward.

@Test
public void testForward() {
    // Test for IntValue
    @SuppressWarnings({ "unchecked", "rawtypes" }) final TypeComparator<Record> intComp = new RecordComparatorFactory(new int[] { 0 }, new Class[] { IntValue.class }).createComparator();
    final ChannelSelector<SerializationDelegate<Record>> oe1 = new OutputEmitter<Record>(ShipStrategyType.FORWARD, intComp);
    final SerializationDelegate<Record> delegate = new SerializationDelegate<Record>(new RecordSerializerFactory().getSerializer());
    int numChannels = 100;
    int numRecords = 50000 + numChannels / 2;
    int[] hit = new int[numChannels];
    for (int i = 0; i < numRecords; i++) {
        IntValue k = new IntValue(i);
        Record rec = new Record(k);
        delegate.setInstance(rec);
        int[] chans = oe1.selectChannels(delegate, hit.length);
        for (int chan : chans) {
            hit[chan]++;
        }
    }
    assertTrue(hit[0] == numRecords);
    for (int i = 1; i < hit.length; i++) {
        assertTrue(hit[i] == 0);
    }
    // Test for StringValue
    @SuppressWarnings({ "unchecked", "rawtypes" }) final TypeComparator<Record> stringComp = new RecordComparatorFactory(new int[] { 0 }, new Class[] { StringValue.class }).createComparator();
    final ChannelSelector<SerializationDelegate<Record>> oe2 = new OutputEmitter<Record>(ShipStrategyType.FORWARD, stringComp);
    numChannels = 100;
    numRecords = 10000 + numChannels / 2;
    hit = new int[numChannels];
    for (int i = 0; i < numRecords; i++) {
        StringValue k = new StringValue(i + "");
        Record rec = new Record(k);
        delegate.setInstance(rec);
        int[] chans = oe2.selectChannels(delegate, hit.length);
        for (int chan : chans) {
            hit[chan]++;
        }
    }
    assertTrue(hit[0] == numRecords);
    for (int i = 1; i < hit.length; i++) {
        assertTrue(hit[i] == 0);
    }
}
Also used : RecordComparatorFactory(org.apache.flink.runtime.testutils.recordutils.RecordComparatorFactory) RecordSerializerFactory(org.apache.flink.runtime.testutils.recordutils.RecordSerializerFactory) SerializationDelegate(org.apache.flink.runtime.plugable.SerializationDelegate) OutputEmitter(org.apache.flink.runtime.operators.shipping.OutputEmitter) Record(org.apache.flink.types.Record) StringValue(org.apache.flink.types.StringValue) IntValue(org.apache.flink.types.IntValue) Test(org.junit.Test)

Example 5 with StringValue

use of org.apache.flink.types.StringValue in project flink by apache.

the class SimpleStringUtilsTest method testToLowerCaseConverting.

@Test
public void testToLowerCaseConverting() {
    StringValue testString = new StringValue("TEST");
    SimpleStringUtils.toLowerCase(testString);
    assertEquals(new StringValue("test"), testString);
}
Also used : StringValue(org.apache.flink.types.StringValue) Test(org.junit.Test)

Aggregations

StringValue (org.apache.flink.types.StringValue)88 Test (org.junit.Test)61 IntValue (org.apache.flink.types.IntValue)35 LongValue (org.apache.flink.types.LongValue)21 IOException (java.io.IOException)17 ArrayList (java.util.ArrayList)15 Record (org.apache.flink.types.Record)13 TupleTypeInfo (org.apache.flink.api.java.typeutils.TupleTypeInfo)12 ExecutionEnvironment (org.apache.flink.api.java.ExecutionEnvironment)11 Tuple2 (org.apache.flink.api.java.tuple.Tuple2)11 DoubleValue (org.apache.flink.types.DoubleValue)11 Value (org.apache.flink.types.Value)10 Tuple3 (org.apache.flink.api.java.tuple.Tuple3)9 ExecutionConfig (org.apache.flink.api.common.ExecutionConfig)7 Plan (org.apache.flink.api.common.Plan)7 Configuration (org.apache.flink.configuration.Configuration)7 FileInputSplit (org.apache.flink.core.fs.FileInputSplit)7 NoSuchElementException (java.util.NoSuchElementException)6 File (java.io.File)5 JobExecutionResult (org.apache.flink.api.common.JobExecutionResult)5