use of io.confluent.kafka.streams.serdes.avro.SpecificAvroSerializer in project kafka-streams-examples by confluentinc.
the class KafkaMusicExampleDriver method main.
public static void main(String[] args) throws Exception {
final String bootstrapServers = args.length > 0 ? args[0] : "localhost:9092";
final String schemaRegistryUrl = args.length > 1 ? args[1] : "http://localhost:8081";
System.out.println("Connecting to Kafka cluster via bootstrap servers " + bootstrapServers);
System.out.println("Connecting to Confluent schema registry at " + schemaRegistryUrl);
// Read comma-delimited file of songs into Array
final List<Song> songs = new ArrayList<>();
final String SONGFILENAME = "song_source.csv";
final InputStream inputStream = KafkaMusicExample.class.getClassLoader().getResourceAsStream(SONGFILENAME);
final InputStreamReader streamReader = new InputStreamReader(inputStream, UTF_8);
try (final BufferedReader br = new BufferedReader(streamReader)) {
String line = null;
while ((line = br.readLine()) != null) {
final String[] values = line.split(",");
final Song newSong = new Song(Long.parseLong(values[0]), values[1], values[2], values[3], values[4]);
songs.add(newSong);
}
}
final Properties props = new Properties();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
final Map<String, String> serdeConfig = Collections.singletonMap(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
final SpecificAvroSerializer<PlayEvent> playEventSerializer = new SpecificAvroSerializer<>();
playEventSerializer.configure(serdeConfig, false);
final SpecificAvroSerializer<Song> songSerializer = new SpecificAvroSerializer<>();
songSerializer.configure(serdeConfig, false);
final KafkaProducer<String, PlayEvent> playEventProducer = new KafkaProducer<>(props, Serdes.String().serializer(), playEventSerializer);
final KafkaProducer<Long, Song> songProducer = new KafkaProducer<>(props, new LongSerializer(), songSerializer);
songs.forEach(song -> {
System.out.println("Writing song information for '" + song.getName() + "' to input topic " + KafkaMusicExample.SONG_FEED);
songProducer.send(new ProducerRecord<>(KafkaMusicExample.SONG_FEED, song.getId(), song));
});
songProducer.close();
final long duration = 60 * 1000L;
final Random random = new Random();
// send a play event every 100 milliseconds
while (true) {
final Song song = songs.get(random.nextInt(songs.size()));
System.out.println("Writing play event for song " + song.getName() + " to input topic " + KafkaMusicExample.PLAY_EVENTS);
playEventProducer.send(new ProducerRecord<>(KafkaMusicExample.PLAY_EVENTS, "uk", new PlayEvent(song.getId(), duration)));
Thread.sleep(100L);
}
}
use of io.confluent.kafka.streams.serdes.avro.SpecificAvroSerializer in project kafka-streams-examples by confluentinc.
the class KafkaMusicExampleTest method shouldCreateChartsAndAccessThemViaInteractiveQueries.
@Test
public void shouldCreateChartsAndAccessThemViaInteractiveQueries() throws Exception {
final Properties props = new Properties();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
final Map<String, String> serdeConfig = Collections.singletonMap(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, CLUSTER.schemaRegistryUrl());
final SpecificAvroSerializer<PlayEvent> playEventSerializer = new SpecificAvroSerializer<>();
playEventSerializer.configure(serdeConfig, false);
final SpecificAvroSerializer<Song> songSerializer = new SpecificAvroSerializer<>();
songSerializer.configure(serdeConfig, false);
final KafkaProducer<String, PlayEvent> playEventProducer = new KafkaProducer<>(props, Serdes.String().serializer(), playEventSerializer);
final KafkaProducer<Long, Song> songProducer = new KafkaProducer<>(props, new LongSerializer(), songSerializer);
final List<Song> songs = Arrays.asList(new Song(1L, "Fresh Fruit For Rotting Vegetables", "Dead Kennedys", "Chemical Warfare", "Punk"), new Song(2L, "We Are the League", "Anti-Nowhere League", "Animal", "Punk"), new Song(3L, "Live In A Dive", "Subhumans", "All Gone Dead", "Punk"), new Song(4L, "PSI", "Wheres The Pope?", "Fear Of God", "Punk"), new Song(5L, "Totally Exploited", "The Exploited", "Punks Not Dead", "Punk"), new Song(6L, "The Audacity Of Hype", "Jello Biafra And The Guantanamo School Of " + "Medicine", "Three Strikes", "Punk"), new Song(7L, "Licensed to Ill", "The Beastie Boys", "Fight For Your Right", "Hip Hop"), new Song(8L, "De La Soul Is Dead", "De La Soul", "Oodles Of O's", "Hip Hop"), new Song(9L, "Straight Outta Compton", "N.W.A", "Gangsta Gangsta", "Hip Hop"), new Song(10L, "Fear Of A Black Planet", "Public Enemy", "911 Is A Joke", "Hip Hop"), new Song(11L, "Curtain Call - The Hits", "Eminem", "Fack", "Hip Hop"), new Song(12L, "The Calling", "Hilltop Hoods", "The Calling", "Hip Hop"));
songs.forEach(song -> songProducer.send(new ProducerRecord<Long, Song>(KafkaMusicExample.SONG_FEED, song.getId(), song)));
songProducer.flush();
songProducer.close();
// create the play events we can use for charting
sendPlayEvents(6, songs.get(0), playEventProducer);
sendPlayEvents(5, songs.get(1), playEventProducer);
sendPlayEvents(4, songs.get(2), playEventProducer);
sendPlayEvents(3, songs.get(3), playEventProducer);
sendPlayEvents(2, songs.get(4), playEventProducer);
sendPlayEvents(1, songs.get(5), playEventProducer);
sendPlayEvents(6, songs.get(6), playEventProducer);
sendPlayEvents(5, songs.get(7), playEventProducer);
sendPlayEvents(4, songs.get(8), playEventProducer);
sendPlayEvents(3, songs.get(9), playEventProducer);
sendPlayEvents(2, songs.get(10), playEventProducer);
sendPlayEvents(1, songs.get(11), playEventProducer);
playEventProducer.close();
streams.start();
// wait until the StreamsMetadata is available as this indicates that
// KafkaStreams initialization has occurred
TestUtils.waitForCondition(() -> !StreamsMetadata.NOT_AVAILABLE.equals(streams.allMetadataForStore(KafkaMusicExample.TOP_FIVE_SONGS_STORE)), MAX_WAIT_MS, "StreamsMetadata should be available");
final String baseUrl = "http://localhost:" + appServerPort + "/kafka-music";
final Client client = ClientBuilder.newClient();
// Wait until the all-songs state store has some data in it
TestUtils.waitForCondition(() -> {
final ReadOnlyKeyValueStore<Long, Song> songsStore;
try {
songsStore = streams.store(KafkaMusicExample.ALL_SONGS, QueryableStoreTypes.<Long, Song>keyValueStore());
return songsStore.all().hasNext();
} catch (Exception e) {
return false;
}
}, MAX_WAIT_MS, KafkaMusicExample.ALL_SONGS + " should be non-empty");
final IntFunction<SongPlayCountBean> intFunction = index -> {
final Song song = songs.get(index);
return songCountPlayBean(song, 6L - (index % 6));
};
// Verify that the charts are as expected
verifyChart(baseUrl + "/charts/genre/punk", client, IntStream.range(0, 5).mapToObj(intFunction).collect(Collectors.toList()));
verifyChart(baseUrl + "/charts/genre/hip hop", client, IntStream.range(6, 11).mapToObj(intFunction).collect(Collectors.toList()));
verifyChart(baseUrl + "/charts/top-five", client, Arrays.asList(songCountPlayBean(songs.get(0), 6L), songCountPlayBean(songs.get(6), 6L), songCountPlayBean(songs.get(1), 5L), songCountPlayBean(songs.get(7), 5L), songCountPlayBean(songs.get(2), 4L)));
}
use of io.confluent.kafka.streams.serdes.avro.SpecificAvroSerializer in project kafka-streams-examples by confluentinc.
the class SessionWindowsExampleTest method shouldCountPlayEventsBySession.
@Test
public void shouldCountPlayEventsBySession() throws Exception {
final Map<String, String> serdeConfig = Collections.singletonMap(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, CLUSTER.schemaRegistryUrl());
final SpecificAvroSerializer<PlayEvent> playEventSerializer = new SpecificAvroSerializer<>();
playEventSerializer.configure(serdeConfig, false);
final Properties producerProperties = new Properties();
producerProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
final KafkaProducer<String, PlayEvent> playEventProducer = new KafkaProducer<>(producerProperties, Serdes.String().serializer(), playEventSerializer);
final Properties consumerProps = new Properties();
consumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, CLUSTER.bootstrapServers());
consumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, "session-windows-consumer");
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
consumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, Serdes.String().deserializer().getClass());
consumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, Serdes.Long().deserializer().getClass());
final long start = System.currentTimeMillis();
final String userId = "erica";
playEventProducer.send(new ProducerRecord<>(SessionWindowsExample.PLAY_EVENTS, null, start, userId, new PlayEvent(1L, 10L)));
final List<KeyValue<String, Long>> firstSession = IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(consumerProps, SessionWindowsExample.PLAY_EVENTS_PER_SESSION, 1);
// should have a session for erica with start and end time the same
assertThat(firstSession.get(0), equalTo(KeyValue.pair(userId + "@" + start + "->" + start, 1L)));
// also look in the store to find the same session
final ReadOnlySessionStore<String, Long> playEventsPerSession = streams.store(SessionWindowsExample.PLAY_EVENTS_PER_SESSION, QueryableStoreTypes.<String, Long>sessionStore());
final KeyValue<Windowed<String>, Long> next = fetchSessionsFromLocalStore(userId, playEventsPerSession).get(0);
assertThat(next.key, equalTo(new Windowed<>(userId, new SessionWindow(start, start))));
assertThat(next.value, equalTo(1L));
// send another event that is after the inactivity gap, so we have 2 independent sessions
final long secondSessionStart = start + SessionWindowsExample.INACTIVITY_GAP + 1;
playEventProducer.send(new ProducerRecord<>(SessionWindowsExample.PLAY_EVENTS, null, secondSessionStart, userId, new PlayEvent(2L, 10L)));
final List<KeyValue<String, Long>> secondSession = IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(consumerProps, SessionWindowsExample.PLAY_EVENTS_PER_SESSION, 1);
// should have created a new session
assertThat(secondSession.get(0), equalTo(KeyValue.pair(userId + "@" + secondSessionStart + "->" + secondSessionStart, 1L)));
// should now have 2 active sessions in the store
final List<KeyValue<Windowed<String>, Long>> results = fetchSessionsFromLocalStore(userId, playEventsPerSession);
assertThat(results, equalTo(Arrays.asList(KeyValue.pair(new Windowed<>(userId, new SessionWindow(start, start)), 1L), KeyValue.pair(new Windowed<>(userId, new SessionWindow(secondSessionStart, secondSessionStart)), 1L))));
// create an event between the two sessions to demonstrate merging
final long mergeTime = start + SessionWindowsExample.INACTIVITY_GAP / 2;
playEventProducer.send(new ProducerRecord<>(SessionWindowsExample.PLAY_EVENTS, null, mergeTime, userId, new PlayEvent(3L, 10L)));
playEventProducer.close();
final List<KeyValue<String, Long>> merged = IntegrationTestUtils.waitUntilMinKeyValueRecordsReceived(consumerProps, SessionWindowsExample.PLAY_EVENTS_PER_SESSION, 3);
// should have merged all sessions into one and sent tombstones for the sessions that were
// merged
assertThat(merged, equalTo(Arrays.asList(KeyValue.pair(userId + "@" + start + "->" + start, null), KeyValue.pair(userId + "@" + secondSessionStart + "->" + secondSessionStart, null), KeyValue.pair(userId + "@" + start + "->" + secondSessionStart, 3L))));
// should only have the merged session in the store
final List<KeyValue<Windowed<String>, Long>> mergedResults = fetchSessionsFromLocalStore(userId, playEventsPerSession);
assertThat(mergedResults, equalTo(Collections.singletonList(KeyValue.pair(new Windowed<>(userId, new SessionWindow(start, secondSessionStart)), 3L))));
}
use of io.confluent.kafka.streams.serdes.avro.SpecificAvroSerializer in project kafka-streams-examples by confluentinc.
the class SessionWindowsExampleDriver method producePlayEvents.
private static void producePlayEvents(final String bootstrapServers, final String schemaRegistryUrl) {
final SpecificAvroSerializer<PlayEvent> playEventSerializer = new SpecificAvroSerializer<>();
final Map<String, String> serdeConfig = Collections.singletonMap(AbstractKafkaAvroSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl);
playEventSerializer.configure(serdeConfig, false);
final Properties producerProperties = new Properties();
producerProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
final KafkaProducer<String, PlayEvent> playEventProducer = new KafkaProducer<>(producerProperties, Serdes.String().serializer(), playEventSerializer);
final long start = System.currentTimeMillis();
final long billEvenTime = start + SessionWindowsExample.INACTIVITY_GAP / 10;
// create three sessions with different times
playEventProducer.send(new ProducerRecord<>(SessionWindowsExample.PLAY_EVENTS, null, start, "jo", new PlayEvent(1L, 10L)));
playEventProducer.send(new ProducerRecord<>(SessionWindowsExample.PLAY_EVENTS, null, billEvenTime, "bill", new PlayEvent(2L, 10L)));
playEventProducer.send(new ProducerRecord<>(SessionWindowsExample.PLAY_EVENTS, null, start + SessionWindowsExample.INACTIVITY_GAP / 5, "sarah", new PlayEvent(2L, 10L)));
// out-of-order event for jo that is outside inactivity gap so will create a new session
playEventProducer.send(new ProducerRecord<>(SessionWindowsExample.PLAY_EVENTS, null, start + SessionWindowsExample.INACTIVITY_GAP + 1, "jo", new PlayEvent(1L, 10L)));
// extend current session for bill
playEventProducer.send(new ProducerRecord<>(SessionWindowsExample.PLAY_EVENTS, null, start + SessionWindowsExample.INACTIVITY_GAP, "bill", new PlayEvent(2L, 10L)));
// new session for sarah
playEventProducer.send(new ProducerRecord<>(SessionWindowsExample.PLAY_EVENTS, null, start + 2 * SessionWindowsExample.INACTIVITY_GAP, "sarah", new PlayEvent(2L, 10L)));
// send earlier event for jo that will merge the 2 previous sessions
playEventProducer.send(new ProducerRecord<>(SessionWindowsExample.PLAY_EVENTS, null, start + SessionWindowsExample.INACTIVITY_GAP / 2, "jo", new PlayEvent(1L, 10L)));
// new session for bill
playEventProducer.send(new ProducerRecord<>(SessionWindowsExample.PLAY_EVENTS, null, start + 3 * SessionWindowsExample.INACTIVITY_GAP, "bill", new PlayEvent(2L, 10L)));
// extend session session for sarah
// new session for sarah
playEventProducer.send(new ProducerRecord<>(SessionWindowsExample.PLAY_EVENTS, null, start + 2 * SessionWindowsExample.INACTIVITY_GAP + SessionWindowsExample.INACTIVITY_GAP / 5, "sarah", new PlayEvent(2L, 10L)));
playEventProducer.close();
}
Aggregations