use of com.datastax.oss.sink.pulsar.PulsarRecordImpl in project pulsar-sink by datastax.
the class WriteTimestampAndTtlCCMIT method should_use_ttl_from_config_and_use_as_ttl.
@Test
void should_use_ttl_from_config_and_use_as_ttl() {
// given
taskConfigs.add(makeConnectorProperties("bigintcol=value.bigint, doublecol=value.double", ImmutableMap.of(String.format("topic.mytopic.%s.%s.ttl", keyspaceName, "types"), "100")));
// when
GenericRecordImpl value = new GenericRecordImpl().put("bigint", 1234567L).put("double", 1000.0);
PulsarRecordImpl record = new PulsarRecordImpl("persistent://tenant/namespace/mytopic", null, value, recordType);
runTaskWithRecords(record);
// then
List<Row> results = session.execute("SELECT bigintcol, doublecol, ttl(doublecol) FROM types").all();
assertThat(results.size()).isEqualTo(1);
Row row = results.get(0);
assertThat(row.getLong("bigintcol")).isEqualTo(1234567L);
assertThat(row.getDouble("doublecol")).isEqualTo(1000.0);
assertTtl(row.getInt(2), 100);
}
use of com.datastax.oss.sink.pulsar.PulsarRecordImpl in project pulsar-sink by datastax.
the class WriteTimestampAndTtlCCMIT method should_extract_write_timestamp_from_json_and_use_as_write_time_column.
/**
* Test for KAF-46.
*/
@Test
void should_extract_write_timestamp_from_json_and_use_as_write_time_column() {
// given
taskConfigs.add(makeConnectorProperties("bigintcol=value.bigint, doublecol=value.double, __timestamp = value.timestampcol"));
// when
String json = "{\"bigint\": 1234567, \"double\": 42.0, \"timestampcol\": 1000}";
PulsarRecordImpl record = new PulsarRecordImpl("persistent://tenant/namespace/mytopic", null, json, Schema.STRING);
runTaskWithRecords(record);
// then
List<Row> results = session.execute("SELECT bigintcol, doublecol, writetime(doublecol) FROM types").all();
assertThat(results.size()).isEqualTo(1);
Row row = results.get(0);
assertThat(row.getLong("bigintcol")).isEqualTo(1234567L);
assertThat(row.getDouble("doublecol")).isEqualTo(42.0);
assertThat(row.getLong(2)).isEqualTo(1000L);
}
use of com.datastax.oss.sink.pulsar.PulsarRecordImpl in project pulsar-sink by datastax.
the class SimpleEndToEndSimulacronIT method makeRecord.
private Record<GenericRecord> makeRecord(int partition, Object key, String value, long timestamp, long offset) {
PulsarRecordImpl res = new PulsarRecordImpl("persistent://tenant/namespace/mytopic", key != null ? key.toString() : null, new GenericRecordImpl().put("field1", value), recordType, timestamp);
// does not matter to the sink
res.setPartitionId(partition + "");
// does not matter to the sink
res.setRecordSequence(offset);
return res;
}
use of com.datastax.oss.sink.pulsar.PulsarRecordImpl in project pulsar-sink by datastax.
the class WriteTimestampAndTtlCCMIT method should_insert_record_with_ttl_provided_via_mapping.
/**
* Test for KAF-107.
*/
@Test
void should_insert_record_with_ttl_provided_via_mapping() {
taskConfigs.add(makeConnectorProperties("bigintcol=value.bigint, doublecol=value.double, __ttl = value.ttlcol"));
RecordSchemaBuilder builder = org.apache.pulsar.client.api.schema.SchemaBuilder.record("MyBean");
builder.field("bigint").type(SchemaType.INT64);
builder.field("double").type(SchemaType.DOUBLE);
builder.field("ttlcol").type(SchemaType.INT64);
Schema schema = org.apache.pulsar.client.api.Schema.generic(builder.build(SchemaType.AVRO));
Number ttlValue = 1_000_000L;
GenericRecordImpl value = new GenericRecordImpl().put("bigint", 1234567L).put("double", 42.0).put("ttlcol", ttlValue.longValue());
PulsarRecordImpl record = new PulsarRecordImpl("persistent://tenant/namespace/mytopic", null, value, schema, 153000987L);
runTaskWithRecords(record);
// Verify that the record was inserted properly in the database.
List<Row> results = session.execute("SELECT bigintcol, doublecol, ttl(doublecol) FROM types").all();
assertThat(results.size()).isEqualTo(1);
Row row = results.get(0);
assertThat(row.getLong("bigintcol")).isEqualTo(1234567L);
assertThat(row.getDouble("doublecol")).isEqualTo(42.0);
assertTtl(row.getInt(2), ttlValue);
}
use of com.datastax.oss.sink.pulsar.PulsarRecordImpl in project pulsar-sink by datastax.
the class PlaintextAuthCCMIT method should_insert_successfully_with_correct_credentials.
@ParameterizedTest(name = "[{index}] extras={0}")
@MethodSource("correctCredentialsProvider")
void should_insert_successfully_with_correct_credentials(Map<String, String> extras) {
taskConfigs.add(makeConnectorProperties(extras));
PulsarRecordImpl record = new PulsarRecordImpl("persistent://tenant/namespace/mytopic", null, new GenericRecordImpl().put("bigint", 5725368L), recordType);
runTaskWithRecords(record);
// Verify that the record was inserted properly in the database.
List<Row> results = session.execute("SELECT bigintcol FROM types").all();
assertThat(results.size()).isEqualTo(1);
Row row = results.get(0);
assertThat(row.getLong("bigintcol")).isEqualTo(5725368L);
// auth.provider was coerced to the database
assertThat(task.getInstanceState().getConfig().getAuthenticatorConfig().getProvider()).isEqualTo(Provider.PLAIN);
}
Aggregations