-
Notifications
You must be signed in to change notification settings - Fork 284
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Kafka-clients 3.8 support #7626
Draft
nayeem-kamal
wants to merge
6
commits into
master
Choose a base branch
from
nayeem-kamal/spring-kafka-fix
base: master
Could not load branches
Branch not found: {{ refName }}
Loading
Could not load tags
Nothing to show
Loading
Are you sure you want to change the base?
Some commits from the old base branch may be removed from the timeline,
and old review comments may become outdated.
Draft
Changes from 1 commit
Commits
Show all changes
6 commits
Select commit
Hold shift + click to select a range
a6829ea
WIP testing tests
nayeem-kamal 6a59de1
Working instrumentation
nayeem-kamal bcfa8af
restructured instrumentation to build java17 separately
nayeem-kamal 97ffb1a
marked tests flaky that pass locally
nayeem-kamal e6aab6e
updated dependencies
nayeem-kamal 855401d
update with master
nayeem-kamal File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
59 changes: 59 additions & 0 deletions
59
dd-java-agent/instrumentation/kafka-clients-3.1/build.gradle
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,59 @@ | ||
muzzle { | ||
pass { | ||
group = "org.apache.kafka" | ||
module = "kafka-clients" | ||
versions = "[3.1.0,)" | ||
assertInverse = false | ||
} | ||
} | ||
|
||
apply from: "$rootDir/gradle/java.gradle" | ||
|
||
addTestSuite('latestDepTest') | ||
addTestSuite('iastLatestDepTest3') | ||
|
||
|
||
java { | ||
toolchain { | ||
languageVersion.set(JavaLanguageVersion.of(17)) | ||
} | ||
} | ||
dependencies { | ||
compileOnly group: 'org.apache.kafka', name: 'kafka-clients', version: '3.1.0' | ||
implementation project(':dd-java-agent:instrumentation:kafka-common') | ||
|
||
testImplementation group: 'org.apache.kafka', name: 'kafka-clients', version: '3.1.0' | ||
testImplementation group: 'org.springframework.kafka', name: 'spring-kafka', version: '3.1.0' | ||
testImplementation group: 'org.springframework.kafka', name: 'spring-kafka-test', version: '3.1.0' | ||
testImplementation group: 'org.testcontainers', name: 'kafka', version: '1.17.0' | ||
testImplementation group: 'javax.xml.bind', name: 'jaxb-api', version: '2.2.3' | ||
testImplementation group: 'org.assertj', name: 'assertj-core', version: '2.9.+' | ||
testImplementation group: 'org.mockito', name: 'mockito-core', version: '2.19.0' | ||
testRuntimeOnly project(':dd-java-agent:instrumentation:spring-scheduling-3.1') | ||
testImplementation(testFixtures(project(':dd-java-agent:agent-iast'))) | ||
|
||
//IAST | ||
testRuntimeOnly project(':dd-java-agent:instrumentation:iast-instrumenter') | ||
testRuntimeOnly project(':dd-java-agent:instrumentation:java-lang') | ||
testRuntimeOnly project(':dd-java-agent:instrumentation:java-io') | ||
testRuntimeOnly project(':dd-java-agent:instrumentation:jackson-core') | ||
testImplementation(group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.9.10') | ||
|
||
|
||
// Include latest version of kafka itself along with latest version of client libs. | ||
// This seems to help with jar compatibility hell. | ||
latestDepTestImplementation group: 'org.apache.kafka', name: 'kafka_2.13', version: '2.+' | ||
latestDepTestImplementation group: 'org.apache.kafka', name: 'kafka-clients', version: '2.+' | ||
latestDepTestImplementation group: 'org.springframework.kafka', name: 'spring-kafka', version: '3.+' | ||
latestDepTestImplementation group: 'org.springframework.kafka', name: 'spring-kafka-test', version: '3.+' | ||
latestDepTestImplementation group: 'org.assertj', name: 'assertj-core', version: '3.19.+' | ||
latestDepTestImplementation libs.guava | ||
|
||
} | ||
|
||
configurations.testRuntimeClasspath { | ||
// spock-core depends on assertj version that is not compatible with kafka-clients | ||
resolutionStrategy.force 'org.assertj:assertj-core:2.9.1' | ||
} | ||
|
||
|
54 changes: 54 additions & 0 deletions
54
...ts-3.1/src/main/java/datadog/trace/instrumentation/kafka_clients38/KafkaConsumerInfo.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change | ||||
---|---|---|---|---|---|---|
@@ -0,0 +1,54 @@ | ||||||
package datadog.trace.instrumentation.kafka_clients38; | ||||||
|
||||||
import java.util.Objects; | ||||||
import javax.annotation.Nullable; | ||||||
import org.apache.kafka.clients.consumer.ConsumerGroupMetadata; | ||||||
|
||||||
public class KafkaConsumerInfo { | ||||||
private final String consumerGroup; | ||||||
private final ConsumerGroupMetadata clientMetadata; | ||||||
private final String bootstrapServers; | ||||||
|
||||||
public KafkaConsumerInfo( | ||||||
String consumerGroup, ConsumerGroupMetadata clientMetadata, String bootstrapServers) { | ||||||
this.consumerGroup = consumerGroup; | ||||||
this.clientMetadata = clientMetadata; | ||||||
this.bootstrapServers = bootstrapServers; | ||||||
} | ||||||
|
||||||
public KafkaConsumerInfo(String consumerGroup, String bootstrapServers) { | ||||||
this.consumerGroup = consumerGroup; | ||||||
this.clientMetadata = null; | ||||||
this.bootstrapServers = bootstrapServers; | ||||||
} | ||||||
|
||||||
@Nullable | ||||||
public String getConsumerGroup() { | ||||||
return consumerGroup; | ||||||
} | ||||||
|
||||||
@Nullable | ||||||
public ConsumerGroupMetadata getClientMetadata() { | ||||||
return clientMetadata; | ||||||
} | ||||||
|
||||||
@Nullable | ||||||
public String getBootstrapServers() { | ||||||
return bootstrapServers; | ||||||
} | ||||||
|
||||||
@Override | ||||||
public boolean equals(Object o) { | ||||||
if (this == o) return true; | ||||||
if (o == null || getClass() != o.getClass()) return false; | ||||||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. ⚪ Code Quality Violation
Suggested change
single if statement should be wrapped in a brace (...read more)Omitting braces |
||||||
KafkaConsumerInfo consumerInfo = (KafkaConsumerInfo) o; | ||||||
return Objects.equals(consumerGroup, consumerInfo.consumerGroup) | ||||||
&& Objects.equals(clientMetadata, consumerInfo.clientMetadata); | ||||||
} | ||||||
|
||||||
@Override | ||||||
public int hashCode() { | ||||||
return 31 * (null == consumerGroup ? 0 : consumerGroup.hashCode()) | ||||||
+ (null == clientMetadata ? 0 : clientMetadata.hashCode()); | ||||||
} | ||||||
} |
191 changes: 191 additions & 0 deletions
191
.../java/datadog/trace/instrumentation/kafka_clients38/KafkaConsumerInfoInstrumentation.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,191 @@ | ||
package datadog.trace.instrumentation.kafka_clients38; | ||
|
||
import static datadog.trace.agent.tooling.bytebuddy.matcher.HierarchyMatchers.extendsClass; | ||
import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.nameStartsWith; | ||
import static datadog.trace.agent.tooling.bytebuddy.matcher.NameMatchers.named; | ||
import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activateSpan; | ||
import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.activeSpan; | ||
import static datadog.trace.bootstrap.instrumentation.api.AgentTracer.startSpan; | ||
import static datadog.trace.bootstrap.instrumentation.api.InstrumentationTags.KAFKA_RECORDS_COUNT; | ||
import static datadog.trace.instrumentation.kafka_clients38.KafkaDecorator.KAFKA_POLL; | ||
import static net.bytebuddy.matcher.ElementMatchers.isConstructor; | ||
import static net.bytebuddy.matcher.ElementMatchers.isMethod; | ||
import static net.bytebuddy.matcher.ElementMatchers.isPublic; | ||
import static net.bytebuddy.matcher.ElementMatchers.returns; | ||
import static net.bytebuddy.matcher.ElementMatchers.takesArgument; | ||
import static net.bytebuddy.matcher.ElementMatchers.takesArguments; | ||
|
||
import com.google.auto.service.AutoService; | ||
import datadog.trace.agent.tooling.Instrumenter; | ||
import datadog.trace.agent.tooling.InstrumenterModule; | ||
import datadog.trace.api.Config; | ||
import datadog.trace.bootstrap.InstrumentationContext; | ||
import datadog.trace.bootstrap.instrumentation.api.AgentScope; | ||
import datadog.trace.bootstrap.instrumentation.api.AgentSpan; | ||
import java.util.HashMap; | ||
import java.util.List; | ||
import java.util.Map; | ||
import net.bytebuddy.asm.Advice; | ||
import net.bytebuddy.description.type.TypeDescription; | ||
import net.bytebuddy.matcher.ElementMatcher; | ||
import org.apache.kafka.clients.consumer.ConsumerConfig; | ||
import org.apache.kafka.clients.consumer.ConsumerGroupMetadata; | ||
import org.apache.kafka.clients.consumer.ConsumerRecord; | ||
import org.apache.kafka.clients.consumer.ConsumerRecords; | ||
import org.apache.kafka.clients.consumer.KafkaConsumer; | ||
|
||
/** | ||
* This instrumentation saves additional information from the KafkaConsumer, such as consumer group | ||
* and cluster ID, in the context store for later use. | ||
*/ | ||
@AutoService(InstrumenterModule.class) | ||
public final class KafkaConsumerInfoInstrumentation extends InstrumenterModule.Tracing | ||
implements Instrumenter.ForTypeHierarchy { | ||
|
||
public KafkaConsumerInfoInstrumentation() { | ||
super("kafka"); | ||
} | ||
|
||
@Override | ||
public Map<String, String> contextStore() { | ||
Map<String, String> contextStores = new HashMap<>(); | ||
contextStores.put("org.apache.kafka.clients.Metadata", "java.lang.String"); | ||
contextStores.put( | ||
"org.apache.kafka.clients.consumer.ConsumerRecords", KafkaConsumerInfo.class.getName()); | ||
contextStores.put( | ||
"org.apache.kafka.clients.consumer.internals.ConsumerCoordinator", | ||
KafkaConsumerInfo.class.getName()); | ||
contextStores.put( | ||
"org.apache.kafka.clients.consumer.KafkaConsumer", KafkaConsumerInfo.class.getName()); | ||
return contextStores; | ||
} | ||
|
||
@Override | ||
public String hierarchyMarkerType() { | ||
return "org.apache.kafka.clients.consumer.KafkaConsumer"; | ||
} | ||
|
||
@Override | ||
public ElementMatcher<TypeDescription> hierarchyMatcher() { | ||
return extendsClass(nameStartsWith(hierarchyMarkerType())); | ||
} | ||
|
||
@Override | ||
public String[] helperClassNames() { | ||
return new String[] { | ||
packageName + ".KafkaDecorator", packageName + ".KafkaConsumerInfo", | ||
}; | ||
} | ||
|
||
@Override | ||
public void methodAdvice(MethodTransformer transformer) { | ||
transformer.applyAdvice( | ||
isConstructor() | ||
.and(takesArgument(0, named("org.apache.kafka.clients.consumer.ConsumerConfig"))) | ||
.and(takesArgument(1, named("org.apache.kafka.common.serialization.Deserializer"))) | ||
.and(takesArgument(2, named("org.apache.kafka.common.serialization.Deserializer"))), | ||
KafkaConsumerInfoInstrumentation.class.getName() + "$ConstructorAdvice"); | ||
|
||
transformer.applyAdvice( | ||
isMethod() | ||
.and(isPublic()) | ||
.and(named("poll")) | ||
.and(takesArguments(1)) | ||
.and(returns(named("org.apache.kafka.clients.consumer.ConsumerRecords"))), | ||
KafkaConsumerInfoInstrumentation.class.getName() + "$RecordsAdvice"); | ||
} | ||
|
||
public static class ConstructorAdvice { | ||
@Advice.OnMethodExit(suppress = Throwable.class) | ||
public static void captureGroup( | ||
@Advice.This KafkaConsumer consumer, @Advice.Argument(0) ConsumerConfig consumerConfig) { | ||
ConsumerGroupMetadata groupMetadata = consumer.groupMetadata(); | ||
|
||
String consumerGroup = consumerConfig.getString(ConsumerConfig.GROUP_ID_CONFIG); | ||
String normalizedConsumerGroup = | ||
consumerGroup != null && !consumerGroup.isEmpty() ? consumerGroup : null; | ||
|
||
if (normalizedConsumerGroup == null) { | ||
if (groupMetadata != null) { | ||
normalizedConsumerGroup = groupMetadata.groupId(); | ||
} | ||
} | ||
List<String> bootstrapServersList = | ||
consumerConfig.getList(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG); | ||
String bootstrapServers = null; | ||
if (bootstrapServersList != null && !bootstrapServersList.isEmpty()) { | ||
bootstrapServers = String.join(",", bootstrapServersList); | ||
} | ||
|
||
KafkaConsumerInfo kafkaConsumerInfo; | ||
if (Config.get().isDataStreamsEnabled()) { | ||
kafkaConsumerInfo = | ||
new KafkaConsumerInfo(normalizedConsumerGroup, groupMetadata, bootstrapServers); | ||
} else { | ||
kafkaConsumerInfo = new KafkaConsumerInfo(normalizedConsumerGroup, bootstrapServers); | ||
} | ||
|
||
if (kafkaConsumerInfo.getConsumerGroup() != null | ||
|| kafkaConsumerInfo.getClientMetadata() != null) { | ||
InstrumentationContext.get(KafkaConsumer.class, KafkaConsumerInfo.class) | ||
.put(consumer, kafkaConsumerInfo); | ||
// if (coordinator != null) { | ||
// InstrumentationContext.get(ConsumerCoordinator.class, KafkaConsumerInfo.class) | ||
// .put(coordinator, kafkaConsumerInfo); | ||
// } | ||
} | ||
} | ||
|
||
public static void muzzleCheck(ConsumerRecord record) { | ||
// KafkaConsumerInstrumentation only applies for kafka versions with headers | ||
// Make an explicit call so KafkaConsumerGroupInstrumentation does the same | ||
record.headers(); | ||
} | ||
} | ||
|
||
/** | ||
* this method transfers the consumer group from the KafkaConsumer class key to the | ||
* ConsumerRecords key. This is necessary because in the poll method, we don't have access to the | ||
* KafkaConsumer class. | ||
*/ | ||
public static class RecordsAdvice { | ||
@Advice.OnMethodEnter(suppress = Throwable.class) | ||
public static AgentScope onEnter() { | ||
boolean dataStreamsEnabled; | ||
if (activeSpan() != null) { | ||
dataStreamsEnabled = activeSpan().traceConfig().isDataStreamsEnabled(); | ||
} else { | ||
dataStreamsEnabled = Config.get().isDataStreamsEnabled(); | ||
} | ||
if (dataStreamsEnabled) { | ||
final AgentSpan span = startSpan(KAFKA_POLL); | ||
return activateSpan(span); | ||
} | ||
return null; | ||
} | ||
|
||
@Advice.OnMethodExit(suppress = Throwable.class) | ||
public static void captureGroup( | ||
@Advice.Enter final AgentScope scope, | ||
@Advice.This KafkaConsumer consumer, | ||
@Advice.Return ConsumerRecords records) { | ||
int recordsCount = 0; | ||
if (records != null) { | ||
KafkaConsumerInfo kafkaConsumerInfo = | ||
InstrumentationContext.get(KafkaConsumer.class, KafkaConsumerInfo.class).get(consumer); | ||
if (kafkaConsumerInfo != null) { | ||
InstrumentationContext.get(ConsumerRecords.class, KafkaConsumerInfo.class) | ||
.put(records, kafkaConsumerInfo); | ||
} | ||
recordsCount = records.count(); | ||
} | ||
if (scope == null) { | ||
return; | ||
} | ||
AgentSpan span = scope.span(); | ||
span.setTag(KAFKA_RECORDS_COUNT, recordsCount); | ||
span.finish(); | ||
scope.close(); | ||
} | ||
} | ||
} |
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
⚪ Code Quality Violation
single if statement should be wrapped in a brace (...read more)
Omitting braces
{}
is valid in multiple statements, such as, for loops, if statements, and while loops. However, enforcing the use of control braces throughout your codebase will make the code more consistent and can make it easier to add statements in the future.