use of com.datastax.oss.sink.pulsar.PulsarRecordImpl in project pulsar-sink by datastax.
the class WriteTimestampAndTtlCCMIT method should_extract_ttl_and_timestamp_from_json_and_use_as_ttl_and_timestamp_columns.
@Test
void should_extract_ttl_and_timestamp_from_json_and_use_as_ttl_and_timestamp_columns() {
// given
taskConfigs.add(makeConnectorProperties("bigintcol=value.bigint, doublecol=value.double, __ttl = value.ttlcol, __timestamp = value.timestampcol", ImmutableMap.of(String.format("topic.mytopic.%s.%s.ttlTimeUnit", keyspaceName, "types"), "MILLISECONDS", String.format("topic.mytopic.%s.%s.timestampTimeUnit", keyspaceName, "types"), "MICROSECONDS")));
String json = "{\"bigint\": 1234567, \"double\": 42.0, \"ttlcol\": 1000000, \"timestampcol\": 1000}";
PulsarRecordImpl record = new PulsarRecordImpl("persistent://tenant/namespace/mytopic", null, json, Schema.STRING);
runTaskWithRecords(record);
// then
List<Row> results = session.execute("SELECT bigintcol, doublecol, ttl(doublecol), writetime(doublecol) FROM types").all();
assertThat(results.size()).isEqualTo(1);
Row row = results.get(0);
assertThat(row.getLong("bigintcol")).isEqualTo(1234567L);
assertThat(row.getDouble("doublecol")).isEqualTo(42.0);
assertTtl(row.getInt(2), 1000);
assertThat(row.getLong(3)).isEqualTo(1000L);
}
use of com.datastax.oss.sink.pulsar.PulsarRecordImpl in project pulsar-sink by datastax.
the class WriteTimestampAndTtlCCMIT method should_insert_record_with_timestamp_provided_via_mapping.
/**
* Test for KAF-46.
*/
@Test
void should_insert_record_with_timestamp_provided_via_mapping() {
taskConfigs.add(makeConnectorProperties("bigintcol=value.bigint, doublecol=value.double, __timestamp = value.timestamp"));
RecordSchemaBuilder builder = org.apache.pulsar.client.api.schema.SchemaBuilder.record("MyBean");
builder.field("bigint").type(SchemaType.INT64);
builder.field("double").type(SchemaType.DOUBLE);
builder.field("timestamp").type(SchemaType.INT64);
Schema schema = org.apache.pulsar.client.api.Schema.generic(builder.build(SchemaType.AVRO));
GenericRecordImpl value = new GenericRecordImpl().put("bigint", 1234567L).put("double", 42.0).put("timestamp", 12314L);
PulsarRecordImpl record = new PulsarRecordImpl("persistent://tenant/namespace/mytopic", null, value, schema);
runTaskWithRecords(record);
// Verify that the record was inserted properly in the database.
List<Row> results = session.execute("SELECT bigintcol, doublecol, writetime(doublecol) FROM types").all();
assertThat(results.size()).isEqualTo(1);
Row row = results.get(0);
assertThat(row.getLong("bigintcol")).isEqualTo(1234567L);
assertThat(row.getDouble("doublecol")).isEqualTo(42.0);
assertThat(row.getLong(2)).isEqualTo(12314L);
}
use of com.datastax.oss.sink.pulsar.PulsarRecordImpl in project pulsar-sink by datastax.
the class WriteTimestampAndTtlCCMIT method should_extract_ttl_from_json_and_use_existing_column_as_ttl.
@Test
void should_extract_ttl_from_json_and_use_existing_column_as_ttl() {
// given
taskConfigs.add(makeConnectorProperties("bigintcol=value.bigint, doublecol=value.double, __ttl = value.double", ImmutableMap.of(String.format("topic.mytopic.%s.%s.ttlTimeUnit", keyspaceName, "types"), "MILLISECONDS")));
// when
String json = "{\"bigint\": 1234567, \"double\": 1000000.0}";
PulsarRecordImpl record = new PulsarRecordImpl("persistent://tenant/namespace/mytopic", null, json, Schema.STRING);
runTaskWithRecords(record);
// then
List<Row> results = session.execute("SELECT bigintcol, doublecol, ttl(doublecol) FROM types").all();
assertThat(results.size()).isEqualTo(1);
Row row = results.get(0);
assertThat(row.getLong("bigintcol")).isEqualTo(1234567L);
assertThat(row.getDouble("doublecol")).isEqualTo(1000000.0);
assertTtl(row.getInt(2), 1000);
}
use of com.datastax.oss.sink.pulsar.PulsarRecordImpl in project pulsar-sink by datastax.
the class JsonEndToEndCCMIT method raw_udt_value_and_cherry_pick_from_json_bytearray.
@Test
void raw_udt_value_and_cherry_pick_from_json_bytearray() {
taskConfigs.add(makeConnectorProperties("bigintcol=key, udtcol=value, intcol=value.udtmem1"));
PulsarRecordImpl record = new PulsarRecordImpl("persistent://tenant/namespace/mytopic", "98761234", "{\"udtmem1\": 42, \"udtmem2\": \"the answer\"}".getBytes(UTF_8), Schema.STRING);
runTaskWithRecords(record);
// Verify that the record was inserted properly in the database.
List<Row> results = session.execute("SELECT bigintcol, udtcol, intcol FROM types").all();
assertThat(results.size()).isEqualTo(1);
Row row = results.get(0);
assertThat(row.getLong("bigintcol")).isEqualTo(98761234L);
UserDefinedType udt = new UserDefinedTypeBuilder(keyspaceName, "myudt").withField("udtmem1", DataTypes.INT).withField("udtmem2", DataTypes.TEXT).build();
udt.attach(session.getContext());
assertThat(row.getUdtValue("udtcol")).isEqualTo(udt.newValue(42, "the answer"));
assertThat(row.getInt("intcol")).isEqualTo(42);
}
use of com.datastax.oss.sink.pulsar.PulsarRecordImpl in project pulsar-sink by datastax.
the class JsonEndToEndCCMIT method json_key_struct_value.
@Test
void json_key_struct_value() {
// Map various fields from the key and value to columns.
taskConfigs.add(makeConnectorProperties("bigintcol=key.bigint, " + "booleancol=value.boolean, " + "doublecol=key.double, " + "floatcol=value.float, " + "intcol=key.int, " + "textcol=key.text "));
// Use a Struct for the value.
Long baseValue = 98761234L;
GenericRecordImpl structValue = new GenericRecordImpl().put("bigint", baseValue).put("boolean", (baseValue.intValue() & 1) == 1).put("double", (double) baseValue + 0.123).put("float", baseValue.floatValue() + 0.987f).put("int", baseValue.intValue()).put("text", baseValue.toString());
// Use JSON for the key.
Long baseKey = 1234567L;
String jsonKey = String.format("{\"bigint\": %d, " + "\"boolean\": %b, " + "\"double\": %f, " + "\"float\": %f, " + "\"int\": %d, " + "\"smallint\": %d, " + "\"text\": \"%s\", " + "\"tinyint\": %d}", baseKey, (baseKey.intValue() & 1) == 1, (double) baseKey + 0.123, baseKey.floatValue() + 0.987f, baseKey.intValue(), baseKey.shortValue(), baseKey.toString(), baseKey.byteValue());
PulsarRecordImpl record = new PulsarRecordImpl("persistent://tenant/namespace/mytopic", jsonKey, structValue, recordType);
runTaskWithRecords(record);
// Verify that the record was inserted properly in the database.
List<Row> results = session.execute("SELECT * FROM types").all();
assertThat(results.size()).isEqualTo(1);
Row row = results.get(0);
assertThat(row.getLong("bigintcol")).isEqualTo(baseKey);
assertThat(row.getBoolean("booleancol")).isEqualTo((baseValue.intValue() & 1) == 1);
assertThat(row.getDouble("doublecol")).isEqualTo((double) baseKey + 0.123);
assertThat(row.getFloat("floatcol")).isEqualTo(baseValue.floatValue() + 0.987f);
assertThat(row.getInt("intcol")).isEqualTo(baseKey.intValue());
assertThat(row.getString("textcol")).isEqualTo(baseKey.toString());
}
Aggregations