use of org.spark_project.guava.util.concurrent.RateLimiter in project auratrainingproject by liuqinghua666.
the class JavaKafkaEventProducer method main.
public static void main(String[] args) throws Exception {
String dataPath = "D:\\bigdata\\source\\auratrainingproject\\spark\\data\\IJCAI17_dataset";
String topic = KafkaRedisConfig.KAFKA_USER_PAY_TOPIC;
Properties props = getConfig();
Producer<String, String> producer = new KafkaProducer<String, String>(props);
// 准备文件路径
if (args.length > 0) {
dataPath = args[0];
}
String fileName = JavaSQLAliPayAnalyzer.getOSPath(dataPath + "/user_pay.txt");
// 使用RateLimiter做流量控制
int maxRatePerSecond = 10;
RateLimiter limiter = RateLimiter.create(maxRatePerSecond);
;
File file = new File(fileName);
BufferedReader reader = null;
try {
System.out.println("以行为单位读取文件内容,一次读一整行:");
reader = new BufferedReader(new FileReader(file));
String tempString = null;
int line = 1;
// 一次读入一行,直到读入null为文件结束
while ((tempString = reader.readLine()) != null) {
// 显示行号
// System.out.println("line[" + line + "]=" + tempString);
// 准备数据
String[] row = tempString.split(",");
if (row.length >= 3) {
// 每10ms产生1个消息
limiter.acquire();
// user_id
String key = "" + row[0];
// shop_id+”,”+time_stamp
String value = "" + row[1] + "," + row[2];
// 推送数据
producer.send(new ProducerRecord(topic, key, value));
System.out.println("Message[" + line + "] sent: " + key + "=>" + value);
producer.send(new ProducerRecord(topic, key, value));
line++;
// Thread.sleep(10);
}
}
reader.close();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (reader != null) {
try {
reader.close();
} catch (IOException e1) {
}
}
}
}
Aggregations