Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
import io.sentry.util.SpanUtils;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.Callable;
import org.apache.kafka.clients.consumer.ConsumerRecord;
Expand Down Expand Up @@ -215,9 +215,8 @@ private void finishTransaction(
private <K, V> @Nullable TransactionContext continueTrace(
final @NotNull IScopes forkedScopes, final @NotNull ConsumerRecord<K, V> record) {
final @Nullable String sentryTrace = headerValue(record, SentryTraceHeader.SENTRY_TRACE_HEADER);
final @Nullable String baggage = headerValue(record, BaggageHeader.BAGGAGE_HEADER);
final @Nullable List<String> baggageHeaders =
baggage != null ? Collections.singletonList(baggage) : null;
headerValues(record, BaggageHeader.BAGGAGE_HEADER);
return forkedScopes.continueTrace(sentryTrace, baggageHeaders);
}

Expand Down Expand Up @@ -265,4 +264,18 @@ private void finishTransaction(
}
return new String(header.value(), StandardCharsets.UTF_8);
}

private <K, V> @Nullable List<String> headerValues(
final @NotNull ConsumerRecord<K, V> record, final @NotNull String headerName) {
@Nullable List<String> values = null;
for (final @NotNull Header header : record.headers().headers(headerName)) {
if (header.value() != null) {
if (values == null) {
values = new ArrayList<>();
}
values.add(new String(header.value(), StandardCharsets.UTF_8));
}
}
return values;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -111,6 +111,21 @@ class SentryKafkaConsumerTracingTest {
verify(lifecycleToken).close()
}

@Test
fun `withTracing passes all baggage headers to continueTrace`() {
val sentryTraceValue = "2722d9f6ec019ade60c776169d9a8904-cedf5b7571cb4972-1"
val record =
createRecord(
sentryTrace = sentryTraceValue,
baggageHeaders = listOf("third=party", "sentry-sample_rate=1"),
)

tracing.withTracingImpl(record, Callable { "done" })

verify(forkedScopes)
.continueTrace(eq(sentryTraceValue), eq(listOf("third=party", "sentry-sample_rate=1")))
}

@Test
fun `withTracing skips scope forking when queue tracing is disabled`() {
options.isEnableQueueTracing = false
Expand Down Expand Up @@ -193,6 +208,7 @@ class SentryKafkaConsumerTracingTest {
topic: String = "my-topic",
sentryTrace: String? = null,
baggage: String? = null,
baggageHeaders: List<String>? = null,
messageId: String? = null,
deliveryAttempt: Int? = null,
enqueuedTime: String? = null,
Expand All @@ -205,6 +221,9 @@ class SentryKafkaConsumerTracingTest {
baggage?.let {
headers.add(BaggageHeader.BAGGAGE_HEADER, it.toByteArray(StandardCharsets.UTF_8))
}
baggageHeaders?.forEach {
headers.add(BaggageHeader.BAGGAGE_HEADER, it.toByteArray(StandardCharsets.UTF_8))
}
messageId?.let {
headers.add(SpanDataConvention.MESSAGING_MESSAGE_ID, it.toByteArray(StandardCharsets.UTF_8))
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
import io.sentry.util.SpanUtils;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.ArrayList;
import java.util.List;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
Expand Down Expand Up @@ -126,9 +126,8 @@ private boolean isIgnored() {
private @Nullable TransactionContext continueTrace(
final @NotNull IScopes forkedScopes, final @NotNull ConsumerRecord<K, V> record) {
final @Nullable String sentryTrace = headerValue(record, SentryTraceHeader.SENTRY_TRACE_HEADER);
final @Nullable String baggage = headerValue(record, BaggageHeader.BAGGAGE_HEADER);
final @Nullable List<String> baggageHeaders =
baggage != null ? Collections.singletonList(baggage) : null;
headerValues(record, BaggageHeader.BAGGAGE_HEADER);
return forkedScopes.continueTrace(sentryTrace, baggageHeaders);
}

Expand Down Expand Up @@ -243,6 +242,20 @@ private void finishSpan(final @NotNull SpanStatus status, final @Nullable Throwa
return new String(header.value(), StandardCharsets.UTF_8);
}

private @Nullable List<String> headerValues(
final @NotNull ConsumerRecord<K, V> record, final @NotNull String headerName) {
@Nullable List<String> values = null;
for (final @NotNull Header header : record.headers().headers(headerName)) {
if (header.value() != null) {
if (values == null) {
values = new ArrayList<>();
}
values.add(new String(header.value(), StandardCharsets.UTF_8));
}
}
return values;
}

private static final class SentryRecordContext {
final @NotNull ISentryLifecycleToken lifecycleToken;
final @Nullable ITransaction transaction;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ class SentryKafkaRecordInterceptorTest {
private fun createRecordWithHeaders(
sentryTrace: String? = null,
baggage: String? = null,
baggageHeaders: List<String>? = null,
enqueuedTime: String? = null,
deliveryAttempt: Int? = null,
): ConsumerRecord<String, String> {
Expand All @@ -91,6 +92,9 @@ class SentryKafkaRecordInterceptorTest {
baggage?.let {
headers.add(BaggageHeader.BAGGAGE_HEADER, it.toByteArray(StandardCharsets.UTF_8))
}
baggageHeaders?.forEach {
headers.add(BaggageHeader.BAGGAGE_HEADER, it.toByteArray(StandardCharsets.UTF_8))
}
enqueuedTime?.let {
headers.add(
SentryKafkaProducerInterceptor.SENTRY_ENQUEUED_TIME_HEADER,
Expand Down Expand Up @@ -141,6 +145,25 @@ class SentryKafkaRecordInterceptorTest {
verify(forkedScopes).continueTrace(org.mockito.kotlin.isNull(), org.mockito.kotlin.isNull())
}

@Test
fun `intercept passes all baggage headers to continueTrace`() {
val interceptor = SentryKafkaRecordInterceptor<String, String>(scopes)
val sentryTraceValue = "2722d9f6ec019ade60c776169d9a8904-cedf5b7571cb4972-1"
val record =
createRecordWithHeaders(
sentryTrace = sentryTraceValue,
baggageHeaders = listOf("third=party", "sentry-sample_rate=1"),
)

interceptor.intercept(record, consumer)

verify(forkedScopes)
.continueTrace(
org.mockito.kotlin.eq(sentryTraceValue),
org.mockito.kotlin.eq(listOf("third=party", "sentry-sample_rate=1")),
)
}

@Test
fun `sets retry count from delivery attempt header`() {
val interceptor = SentryKafkaRecordInterceptor<String, String>(scopes)
Expand Down
Loading