use of com.pamirs.attach.plugin.apache.kafka.origin.ConsumerMetaData in project LinkAgent by shulieTech.
the class ConsumerTraceInterceptor method beforeTrace.
@Override
public SpanRecord beforeTrace(Advice advice) {
Object result = advice.getReturnObj();
KafkaConsumer kafkaConsumer = (KafkaConsumer) advice.getTarget();
if (ConsumerHolder.isWorkWithOtherFramework(kafkaConsumer)) {
return null;
}
ConsumerMetaData consumerMetaData = ConsumerHolder.getConsumerMetaData(kafkaConsumer);
ConsumerRecords consumerRecords = (ConsumerRecords) result;
if (consumerRecords.isEmpty()) {
lastPollHasRecordsThreadLocal.set(false);
return null;
}
Iterator iterator = consumerRecords.iterator();
Object next = iterator.next();
if (!(next instanceof ConsumerRecord)) {
return null;
}
lastPollHasRecordsThreadLocal.set(true);
ConsumerRecord consumerRecord = (ConsumerRecord) next;
SpanRecord spanRecord = new SpanRecord();
if (PradarSwitcher.isKafkaMessageHeadersEnabled()) {
HeaderProcessor headerProcessor = HeaderProvider.getHeaderProcessor(consumerRecord);
Map<String, String> ctx = headerProcessor.getHeaders(consumerRecord);
spanRecord.setContext(ctx);
}
String topic = consumerRecord.topic();
// TODO 原生这里的kafka服务器信息 和通过spring获取的服务器信息,集群节点顺序不一致
spanRecord.setRemoteIp(consumerMetaData.getBootstrapServers());
spanRecord.setRequest(consumerRecords.count());
spanRecord.setService(consumerRecord.topic());
boolean clusterTestPrefix = Pradar.isClusterTestPrefix(topic);
spanRecord.setMethod(clusterTestPrefix ? consumerMetaData.getPtGroupId() : consumerMetaData.getGroupId());
return spanRecord;
}
use of com.pamirs.attach.plugin.apache.kafka.origin.ConsumerMetaData in project LinkAgent by shulieTech.
the class ConsumerPollInterceptor method doOriginIntercept.
private CutOffResult doOriginIntercept(Advice advice) {
if (GlobalConfig.getInstance().getMqWhiteList() == null || GlobalConfig.getInstance().getMqWhiteList().isEmpty()) {
return CutOffResult.PASSED;
}
KafkaConsumer consumer = (KafkaConsumer) advice.getTarget();
ConsumerMetaData consumerMetaData = ConsumerHolder.getConsumerMetaData(consumer);
if (consumerMetaData == null) {
return CutOffResult.passed();
}
Object[] args = advice.getParameterArray();
long timeout = 100L;
if (args[0] instanceof Long) {
timeout = (Long) args[0];
} else /* else if (args[0] instanceof Duration) {
timeout = ((Duration)args[0]).toMillis();
} */
if (args[0] instanceof Timer) {
timeout = ((Timer) args[0]).remainingMs();
}
if (consumerMetaData.isHasShadow()) {
ConsumerProxy consumerProxy = ConsumerHolder.getProxyOrCreate(consumer, timeout);
if (consumerProxy == null) {
return CutOffResult.PASSED;
}
return CutOffResult.cutoff(consumerProxy.poll(timeout));
} else {
if (warnAlready.compareAndSet(false, true)) {
LOGGER.warn("consumer with group id : {} topic : {} doesn't has shadow consumer config", consumerMetaData.getGroupId(), consumerMetaData.getTopics());
}
return CutOffResult.passed();
}
}
use of com.pamirs.attach.plugin.apache.kafka.origin.ConsumerMetaData in project LinkAgent by shulieTech.
the class ShadowConsumerDisableListenerImpl method disableBatchOriginKafka.
private void disableBatchOriginKafka(String key, String topic, String group) {
try {
int code = ConsumerHolder.getShadowProxyMapping().get(key);
ConsumerProxy consumerProxy = ConsumerHolder.getProxyMapping().get(code);
consumerProxy.closePtConsumer();
ConsumerHolder.getProxyMapping().remove(code);
ConsumerHolder.getCache().remove(code);
for (Map.Entry<Integer, ConsumerMetaData> entry : ConsumerHolder.getCache().entrySet()) {
if (entry.getValue().getTopics().contains(Pradar.addClusterTestPrefix(topic)) && entry.getValue().getGroupId().equals(Pradar.addClusterTestPrefix(group))) {
ConsumerHolder.getCache().remove(entry.getKey());
break;
}
}
ConsumerHolder.getShadowProxyMapping().remove(key);
} catch (Throwable t) {
logger.error("[apache-kafka]: {}", Throwables.getStackTraceAsString(t));
}
}
Aggregations