use of com.thinkbiganalytics.provenance.api.ProvenanceEventService in project kylo by Teradata.
the class TestProvenanceRest method testProvenanceRest.
// @Test
public void testProvenanceRest() {
ProvenanceEventService restProvenanceEventService = new KyloRestProvenanceEventService();
Map<String, String> params = new HashMap<>();
params.put(KyloRestProvenanceEventService.USERNAME_CONFIG, "dladmin");
params.put(KyloRestProvenanceEventService.PASSWORD_CONFIG, "thinkbig");
params.put(KyloRestProvenanceEventService.HOST_CONFIG, "localhost");
params.put(KyloRestProvenanceEventService.PORT_CONFIG, "8400");
restProvenanceEventService.configure(params);
String feedName = "provenance.provenance_test";
String flowfileId = UUID.randomUUID().toString();
DateTime startTime = DateTime.now().minusMinutes(1);
Long start = startTime.getMillis();
ProvenanceEventRecordDTO event1 = new ProvenanceEventDtoBuilder(feedName, flowfileId, "First Step").startingEvent(true).startTime(start).build();
ProvenanceEventRecordDTO event2 = new ProvenanceEventDtoBuilder(feedName, flowfileId, "Second Step").startTime(startTime.plusSeconds(30).getMillis()).build();
ProvenanceEventRecordDTO event3 = new ProvenanceEventDtoBuilder(feedName, flowfileId, "Final Step").endingEvent(true).build();
List<ProvenanceEventRecordDTO> events = new ArrayList<>();
events.add(event1);
events.add(event2);
events.add(event3);
try {
restProvenanceEventService.sendEvents(events);
} catch (ProvenanceException e) {
e.printStackTrace();
}
}
use of com.thinkbiganalytics.provenance.api.ProvenanceEventService in project kylo by Teradata.
the class ProvenanceServiceFactory method getProvenanceEventService.
public static ProvenanceEventService getProvenanceEventService(SparkProvenanceConfiguration config) {
ProvenanceEventService service = null;
Map<String, String> params = new HashMap<>();
log.info("Creating a new Service of type {} ", config.getType());
if (config.getType() == SparkProvenanceConfiguration.Type.KAFKA) {
service = new KyloKafkaProvenanceEventService();
params.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, config.getConnectionUrl());
service.configure(params);
} else if (config.getType() == SparkProvenanceConfiguration.Type.JMS) {
service = new KyloJmsProvenanceEventService();
params.put(KyloJmsProvenanceEventService.JMS_URL_CONFIG, config.getConnectionUrl());
service.configure(params);
} else {
throw new UnsupportedOperationException("Unable to create Provenance Event service. Unknown Provenance Type for: " + config.getType());
}
return service;
}
use of com.thinkbiganalytics.provenance.api.ProvenanceEventService in project kylo by Teradata.
the class SparkProvenance method run.
private void run(@Nonnull final PrintStream out, @Nonnull final String... args) {
// Check how many arguments were passed in
if (args.length < 5) {
String msg = "Proper Usage is: <flowfile-id> <job-flowfile-id> <feed-name (category.feed)> <connection-url (url to connect to JMS or KAFAK)> <type (JMS, KAFKA)>" + "You provided " + args.length + " args which are (comma separated): " + StringUtils.join(args, ",");
out.println(msg);
throw new IllegalArgumentException(msg);
}
ProvenanceEventService provenanceEventService = null;
final SparkContext sparkContext = SparkContext.getOrCreate();
try {
final SparkProvenanceConfiguration params = new SparkProvenanceConfiguration(args);
// Get the proper ProvenanceService
provenanceEventService = ProvenanceServiceFactory.getProvenanceEventService(params);
// Collection of custom Provenance Events we will be sending to Kylo
List<ProvenanceEventRecordDTO> events = new ArrayList<>();
// do some work. Look up the database names in Hive
final HiveContext hiveContext = new HiveContext(sparkContext);
// Do some work... i.e. look up the Databases in Hive
ProvenanceEventRecordDTO event = newEvent("Databases", params);
Dataset df = hiveContext.sql("show databases");
event.getAttributeMap().put("databases", df.toJSON().collectAsList().toString());
event.setEventTime(System.currentTimeMillis());
events.add(event);
event = newEvent("Another Step", params);
event.getAttributeMap().put("UUID 1", UUID.randomUUID().toString());
event.setEventTime(System.currentTimeMillis());
event.getAttributeMap().put("timestamp", String.valueOf(System.currentTimeMillis()));
events.add(event);
// Send the events off
provenanceEventService.sendEvents(events);
log.info("Spark app finished");
} catch (Exception e) {
log.error("Failed to run Spark Provenance Job: {}", e.toString(), e);
} finally {
provenanceEventService.closeConnection();
sparkContext.stop();
log.info("Exiting!!!!");
System.exit(0);
}
}
Aggregations