Search in sources :

Example 1 with HashingFn

use of org.apache.beam.sdk.io.common.HashingFn in project beam by apache.

the class HIFIOWithEmbeddedCassandraTest method testHIFReadForCassandra.

/**
   * Test to read data from embedded Cassandra instance and verify whether data is read
   * successfully.
   * @throws Exception
   */
@Test
public void testHIFReadForCassandra() throws Exception {
    // Expected hashcode is evaluated during insertion time one time and hardcoded here.
    String expectedHashCode = "1b9780833cce000138b9afa25ba63486";
    Configuration conf = getConfiguration();
    PCollection<KV<Long, String>> cassandraData = p.apply(HadoopInputFormatIO.<Long, String>read().withConfiguration(conf).withValueTranslation(myValueTranslate));
    // Verify the count of data retrieved from Cassandra matches expected count.
    PAssert.thatSingleton(cassandraData.apply("Count", Count.<KV<Long, String>>globally())).isEqualTo(TEST_DATA_ROW_COUNT);
    PCollection<String> textValues = cassandraData.apply(Values.<String>create());
    // Verify the output values using checksum comparison.
    PCollection<String> consolidatedHashcode = textValues.apply(Combine.globally(new HashingFn()).withoutDefaults());
    PAssert.that(consolidatedHashcode).containsInAnyOrder(expectedHashCode);
    p.run().waitUntilFinish();
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) KV(org.apache.beam.sdk.values.KV) HashingFn(org.apache.beam.sdk.io.common.HashingFn) Test(org.junit.Test)

Example 2 with HashingFn

use of org.apache.beam.sdk.io.common.HashingFn in project beam by apache.

the class HIFIOCassandraIT method testHIFReadForCassandraQuery.

/**
   * This test reads data from the Cassandra instance based on query and verifies if data is read
   * successfully.
   */
@Test
public void testHIFReadForCassandraQuery() {
    String expectedHashCode = "7bead6d6385c5f4dd0524720cd320b49";
    Long expectedNumRows = 1L;
    Configuration conf = getConfiguration(options);
    conf.set("cassandra.input.cql", "select * from " + CASSANDRA_KEYSPACE + "." + CASSANDRA_TABLE + " where token(y_id) > ? and token(y_id) <= ? " + "and field0 = 'user48:field0:431531'");
    PCollection<KV<Long, String>> cassandraData = pipeline.apply(HadoopInputFormatIO.<Long, String>read().withConfiguration(conf).withValueTranslation(myValueTranslate));
    PAssert.thatSingleton(cassandraData.apply("Count", Count.<KV<Long, String>>globally())).isEqualTo(expectedNumRows);
    PCollection<String> textValues = cassandraData.apply(Values.<String>create());
    // Verify the output values using checksum comparison.
    PCollection<String> consolidatedHashcode = textValues.apply(Combine.globally(new HashingFn()).withoutDefaults());
    PAssert.that(consolidatedHashcode).containsInAnyOrder(expectedHashCode);
    pipeline.run().waitUntilFinish();
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) KV(org.apache.beam.sdk.values.KV) HashingFn(org.apache.beam.sdk.io.common.HashingFn) Test(org.junit.Test)

Example 3 with HashingFn

use of org.apache.beam.sdk.io.common.HashingFn in project beam by apache.

the class HIFIOWithElasticTest method testHifIOWithElasticQuery.

/**
   * Test to read data from embedded Elasticsearch instance based on query and verify whether data
   * is read successfully.
   */
@Test
public void testHifIOWithElasticQuery() {
    long expectedRowCount = 1L;
    String expectedHashCode = "cfbf3e5c993d44e57535a114e25f782d";
    Configuration conf = getConfiguration();
    String fieldValue = ELASTIC_TYPE_ID_PREFIX + "2";
    String query = "{" + "  \"query\": {" + "  \"match\" : {" + "    \"id\" : {" + "      \"query\" : \"" + fieldValue + "\"," + "      \"type\" : \"boolean\"" + "    }" + "  }" + "  }" + "}";
    conf.set(ConfigurationOptions.ES_QUERY, query);
    PCollection<KV<Text, LinkedMapWritable>> esData = pipeline.apply(HadoopInputFormatIO.<Text, LinkedMapWritable>read().withConfiguration(conf));
    PCollection<Long> count = esData.apply(Count.<KV<Text, LinkedMapWritable>>globally());
    // Verify that the count of objects fetched using HIFInputFormat IO is correct.
    PAssert.thatSingleton(count).isEqualTo(expectedRowCount);
    PCollection<LinkedMapWritable> values = esData.apply(Values.<LinkedMapWritable>create());
    PCollection<String> textValues = values.apply(transformFunc);
    // Verify the output values using checksum comparison.
    PCollection<String> consolidatedHashcode = textValues.apply(Combine.globally(new HashingFn()).withoutDefaults());
    PAssert.that(consolidatedHashcode).containsInAnyOrder(expectedHashCode);
    pipeline.run().waitUntilFinish();
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) LinkedMapWritable(org.elasticsearch.hadoop.mr.LinkedMapWritable) Text(org.apache.hadoop.io.Text) KV(org.apache.beam.sdk.values.KV) HashingFn(org.apache.beam.sdk.io.common.HashingFn) Test(org.junit.Test)

Example 4 with HashingFn

use of org.apache.beam.sdk.io.common.HashingFn in project beam by apache.

the class HIFIOElasticIT method testHifIOWithElastic.

/**
   * This test reads data from the Elasticsearch instance and verifies whether data is read
   * successfully.
   */
@Test
public void testHifIOWithElastic() throws SecurityException, IOException {
    // Expected hashcode is evaluated during insertion time one time and hardcoded here.
    final long expectedRowCount = 1000L;
    String expectedHashCode = "42e254c8689050ed0a617ff5e80ea392";
    Configuration conf = getConfiguration(options);
    PCollection<KV<Text, LinkedMapWritable>> esData = pipeline.apply(HadoopInputFormatIO.<Text, LinkedMapWritable>read().withConfiguration(conf));
    // Verify that the count of objects fetched using HIFInputFormat IO is correct.
    PCollection<Long> count = esData.apply(Count.<KV<Text, LinkedMapWritable>>globally());
    PAssert.thatSingleton(count).isEqualTo(expectedRowCount);
    PCollection<LinkedMapWritable> values = esData.apply(Values.<LinkedMapWritable>create());
    PCollection<String> textValues = values.apply(transformFunc);
    // Verify the output values using checksum comparison.
    PCollection<String> consolidatedHashcode = textValues.apply(Combine.globally(new HashingFn()).withoutDefaults());
    PAssert.that(consolidatedHashcode).containsInAnyOrder(expectedHashCode);
    pipeline.run().waitUntilFinish();
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) LinkedMapWritable(org.elasticsearch.hadoop.mr.LinkedMapWritable) Text(org.apache.hadoop.io.Text) KV(org.apache.beam.sdk.values.KV) HashingFn(org.apache.beam.sdk.io.common.HashingFn) Test(org.junit.Test)

Example 5 with HashingFn

use of org.apache.beam.sdk.io.common.HashingFn in project beam by apache.

the class HIFIOElasticIT method testHifIOWithElasticQuery.

/**
   * This test reads data from the Elasticsearch instance based on a query and verifies if data is
   * read successfully.
   */
@Test
public void testHifIOWithElasticQuery() {
    String expectedHashCode = "d7a7e4e42c2ca7b83ef7c1ad1ebce000";
    Long expectedRecordsCount = 1L;
    Configuration conf = getConfiguration(options);
    String query = "{" + "  \"query\": {" + "  \"match\" : {" + "    \"Title\" : {" + "      \"query\" : \"Title9\"," + "      \"type\" : \"boolean\"" + "    }" + "  }" + "  }" + "}";
    conf.set(ConfigurationOptions.ES_QUERY, query);
    PCollection<KV<Text, LinkedMapWritable>> esData = pipeline.apply(HadoopInputFormatIO.<Text, LinkedMapWritable>read().withConfiguration(conf));
    PCollection<Long> count = esData.apply(Count.<KV<Text, LinkedMapWritable>>globally());
    // Verify that the count of objects fetched using HIFInputFormat IO is correct.
    PAssert.thatSingleton(count).isEqualTo(expectedRecordsCount);
    PCollection<LinkedMapWritable> values = esData.apply(Values.<LinkedMapWritable>create());
    PCollection<String> textValues = values.apply(transformFunc);
    // Verify the output values using checksum comparison.
    PCollection<String> consolidatedHashcode = textValues.apply(Combine.globally(new HashingFn()).withoutDefaults());
    PAssert.that(consolidatedHashcode).containsInAnyOrder(expectedHashCode);
    pipeline.run().waitUntilFinish();
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) LinkedMapWritable(org.elasticsearch.hadoop.mr.LinkedMapWritable) Text(org.apache.hadoop.io.Text) KV(org.apache.beam.sdk.values.KV) HashingFn(org.apache.beam.sdk.io.common.HashingFn) Test(org.junit.Test)

Aggregations

HashingFn (org.apache.beam.sdk.io.common.HashingFn)8 KV (org.apache.beam.sdk.values.KV)8 Configuration (org.apache.hadoop.conf.Configuration)8 Test (org.junit.Test)8 Text (org.apache.hadoop.io.Text)4 LinkedMapWritable (org.elasticsearch.hadoop.mr.LinkedMapWritable)4