Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
19 commits
Select commit Hold shift + click to select a range
85cff2c
feat(api): add off-CPU and wall-clock park hooks to ProfilingContextI…
kaahos Apr 26, 2026
65eab27
feat(profiling): wire TaskBlock, park, ticks, and encode to JavaProfiler
kaahos Apr 27, 2026
3ccd0cd
feat(profiling): delegate park/TaskBlock/ticks in DatadogProfilingInt…
kaahos Apr 27, 2026
88b1f2c
feat(profiling): add LockSupport.park and Object.wait instrumentation
kaahos Apr 27, 2026
9701ba6
fix
kaahos Apr 27, 2026
769b169
test
kaahos May 3, 2026
3614653
Merge branch 'master' into paul.fournillon/wallclock-signals-mitigation
kaahos May 3, 2026
e2f9451
fix test
kaahos May 3, 2026
eba5e29
fix(profiling): fix intrumentation
kaahos May 3, 2026
ec177c9
fix(profiling): add lock-support and object-wait en var to supported …
kaahos May 4, 2026
0084005
chore: rename lock-support-profiling in lock-support and object-wait-…
kaahos May 4, 2026
f56f267
feat(profiling): add shared task block helper
kaahos May 7, 2026
0e5a046
fix(profiling): use task block helper for object wait
kaahos May 7, 2026
fb099b6
test(profiling): harden lock support parked state coverage
kaahos May 7, 2026
9c1deae
Merge branch 'master' into paul.fournillon/wallclock-signals-mitigation
kaahos May 7, 2026
bc251ce
fix(profiling): instrument LockSupport from bootstrap
kaahos May 11, 2026
5b35daf
test(profiling): add LockSupport TaskBlock smoke test
kaahos May 11, 2026
584d2fc
fix: LockSupportTaskBlock _dd.trace.operation
kaahos May 11, 2026
9119ec3
Merge branch 'master' into paul.fournillon/wallclock-signals-mitigation
kaahos May 12, 2026
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
133 changes: 122 additions & 11 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,10 @@ variables:
description: "Enable flaky tests"
value: "false"

JAVA_PROFILER_REF:
description: "When non-empty, clone DataDog/java-profiler at this Git ref (branch or tag), build ddprof, and use it as ddprof.jar for Gradle jobs instead of the Maven dependency."
value: "paul.fournillon/wallclock_precheck"

# One pipeline injection package size ratchet
OCI_PACKAGE_MAX_SIZE_BYTES: 40_000_000
LIB_INJECTION_IMAGE_MAX_SIZE_BYTES: 40_000_000
Expand Down Expand Up @@ -172,9 +176,21 @@ default:
echo "Failed to find base ref for PR" >&2
fi

# When build_java_profiler_ddprof ran, its artifact is available at custom-ddprof/ddprof.jar.
# Append root project property expected by dd-java-agent/ddprof-lib/build.gradle.
.inject_custom_ddprof_jar: &inject_custom_ddprof_jar
- |
if [ -f "${CI_PROJECT_DIR}/custom-ddprof/ddprof.jar" ]; then
echo "ddprof.jar=${CI_PROJECT_DIR}/custom-ddprof/ddprof.jar" >> gradle.properties
echo "Using custom ddprof.jar from java-profiler build"
fi

.gradle_build: &gradle_build
image: ${BUILDER_IMAGE_REPO}:${BUILDER_IMAGE_VERSION_PREFIX}base
stage: build
needs:
- job: build_java_profiler_ddprof
optional: true
variables:
MAVEN_OPTS: "-Xms256M -Xmx1024M"
GRADLE_WORKERS: 6
Expand Down Expand Up @@ -224,6 +240,7 @@ default:
org.gradle.java.installations.auto-download=false
org.gradle.java.installations.fromEnv=$JAVA_HOMES
EOF
- *inject_custom_ddprof_jar
- mkdir -p .gradle
- export GRADLE_USER_HOME=$(pwd)/.gradle
# replace maven central part by MAVEN_REPOSITORY_PROXY in .mvn/wrapper/maven-wrapper.properties
Expand Down Expand Up @@ -293,8 +310,73 @@ dd-octo-sts-pre-release-check:
max: 2
when: always

# Builds java-profiler from JAVA_PROFILER_REF and publishes custom-ddprof/ddprof.jar for downstream Gradle jobs.
# Uses :ddprof-lib:assembleReleaseJar (not assembleRelease, which is native-only). JDK 21+ for release + JDK 17+ for Gradle 9.
build_java_profiler_ddprof:
image: ${BUILDER_IMAGE_REPO}:${BUILDER_IMAGE_VERSION_PREFIX}base
stage: build
rules:
- if: '$JAVA_PROFILER_REF =~ /.+/'
when: on_success
variables:
FF_USE_FASTZIP: "true"
CACHE_COMPRESSION_LEVEL: "slowest"
KUBERNETES_CPU_REQUEST: 10
KUBERNETES_MEMORY_REQUEST: 20Gi
KUBERNETES_MEMORY_LIMIT: 20Gi
before_script:
- |
# java-profiler uses Gradle 9.x; Gradle requires JVM 17+. Builder image default java is often JDK 8.
if [ -n "${JAVA_21_HOME:-}" ] && [ -x "${JAVA_21_HOME}/bin/java" ]; then
export JAVA_HOME="$JAVA_21_HOME"
elif [ -n "${JAVA_17_HOME:-}" ] && [ -x "${JAVA_17_HOME}/bin/java" ]; then
export JAVA_HOME="$JAVA_17_HOME"
else
shopt -s nullglob
for d in /usr/lib/jvm/java-21-* /usr/lib/jvm/temurin-21-* /usr/lib/jvm/java-17-*; do
if [ -x "${d}/bin/java" ]; then
export JAVA_HOME="$d"
break
fi
done
shopt -u nullglob
fi
if [ -z "${JAVA_HOME:-}" ] || ! [ -x "${JAVA_HOME}/bin/java" ]; then
echo "Could not find JDK 17+ for Gradle 9 (set JAVA_21_HOME or JAVA_17_HOME, or install JDK 21 under /usr/lib/jvm)." >&2
ls -la /usr/lib/jvm 2>/dev/null || true
exit 1
fi
export PATH="${JAVA_HOME}/bin:${PATH}"
java -version
script:
- |
set -euo pipefail
mkdir -p "${CI_PROJECT_DIR}/custom-ddprof"
SRCDIR="${CI_PROJECT_DIR}/java-profiler-src"
rm -rf "$SRCDIR"
git clone --depth 1 --branch "$JAVA_PROFILER_REF" https://github.com/DataDog/java-profiler.git "$SRCDIR"
cd "$SRCDIR"
chmod +x ./gradlew
./gradlew --version
# assembleRelease is the native link/assemble task only; the packaged jar is assembleReleaseJar.
./gradlew :ddprof-lib:assembleReleaseJar -Pskip-tests -Pskip-gtest
JAR=$(find ddprof-lib/build/libs -maxdepth 1 -type f \( -name 'ddprof-*.jar' \) ! -name '*-sources*' ! -name '*-javadoc*' | head -1)
if [ -z "$JAR" ] || [ ! -f "$JAR" ]; then
echo "No ddprof jar found under ddprof-lib/build/libs" >&2
ls -la ddprof-lib/build/libs 2>/dev/null || ls -laR ddprof-lib/build 2>/dev/null || true
exit 1
fi
cp "$JAR" "${CI_PROJECT_DIR}/custom-ddprof/ddprof.jar"
ls -la "${CI_PROJECT_DIR}/custom-ddprof/"
artifacts:
when: on_success
paths:
- custom-ddprof/ddprof.jar

build:
needs:
- job: build_java_profiler_ddprof
optional: true
- job: maven-central-pre-release-check
optional: true
- job: dd-octo-sts-pre-release-check
Expand Down Expand Up @@ -405,7 +487,9 @@ publish-artifacts-to-s3:
spotless:
extends: .gradle_build
stage: tests
needs: []
needs:
- job: build_java_profiler_ddprof
optional: true
variables:
GRADLE_MEMORY_MAX: 6G
script:
Expand All @@ -415,15 +499,19 @@ spotless:
check-instrumentation-naming:
extends: .gradle_build
stage: tests
needs: [ ]
needs:
- job: build_java_profiler_ddprof
optional: true
script:
- ./gradlew --version
- ./gradlew checkInstrumentationNaming

config-inversion-linter:
extends: .gradle_build
stage: tests
needs: []
needs:
- job: build_java_profiler_ddprof
optional: true
script:
- ./gradlew --version
- ./gradlew checkConfigurations
Expand All @@ -432,7 +520,10 @@ test_published_artifacts:
extends: .gradle_build
image: ${BUILDER_IMAGE_REPO}:${BUILDER_IMAGE_VERSION_PREFIX}7 # Needs Java7 for some tests
stage: tests
needs: [ build ]
needs:
- job: build_java_profiler_ddprof
optional: true
- build
variables:
CACHE_TYPE: "lib"
script:
Expand All @@ -459,7 +550,10 @@ test_published_artifacts:

.check_job:
extends: .gradle_build
needs: [ build ]
needs:
- job: build_java_profiler_ddprof
optional: true
- build
stage: tests
variables:
CACHE_TYPE: "lib"
Expand Down Expand Up @@ -495,7 +589,9 @@ test_published_artifacts:

check_build_src:
extends: .check_job
needs: []
needs:
- job: build_java_profiler_ddprof
optional: true
variables:
GRADLE_TARGET: ":buildSrc:build"

Expand Down Expand Up @@ -530,7 +626,10 @@ check_debugger:

muzzle:
extends: .gradle_build
needs: [ build_tests ]
needs:
- job: build_java_profiler_ddprof
optional: true
- build_tests
stage: tests
rules:
- if: '$CI_COMMIT_BRANCH =~ /^mq-working-branch-/'
Expand Down Expand Up @@ -568,7 +667,10 @@ muzzle:

muzzle-dep-report:
extends: .gradle_build
needs: [ build_tests ]
needs:
- job: build_java_profiler_ddprof
optional: true
- build_tests
stage: tests
rules:
- if: '$CI_COMMIT_BRANCH =~ /^mq-working-branch-/'
Expand Down Expand Up @@ -611,7 +713,10 @@ muzzle-dep-report:
extends: .gradle_build
image: ${BUILDER_IMAGE_REPO}:${BUILDER_IMAGE_VERSION_PREFIX}$testJvm
tags: [ "docker-in-docker:amd64" ] # use docker-in-docker runner for testcontainers
needs: [ build_tests ]
needs:
- job: build_java_profiler_ddprof
optional: true
- build_tests
stage: tests
variables:
GRADLE_PARAMS: "-PskipFlakyTests"
Expand Down Expand Up @@ -919,7 +1024,10 @@ deploy_to_di_backend:manual:
deploy_to_maven_central:
extends: .gradle_build
stage: publish
needs: [ build ]
needs:
- job: build_java_profiler_ddprof
optional: true
- build
variables:
CACHE_TYPE: "lib"
rules:
Expand Down Expand Up @@ -947,7 +1055,10 @@ deploy_to_maven_central:
deploy_snapshot_with_ddprof_snapshot:
extends: .gradle_build
stage: publish
needs: [ build ]
needs:
- job: build_java_profiler_ddprof
optional: true
- build
variables:
CACHE_TYPE: "lib"
rules:
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
package datadog.trace.bootstrap.instrumentation.java.concurrent;

import datadog.trace.bootstrap.instrumentation.api.AgentSpan;
import datadog.trace.bootstrap.instrumentation.api.AgentTracer;
import datadog.trace.bootstrap.instrumentation.api.ProfilerContext;
import datadog.trace.bootstrap.instrumentation.api.ProfilingContextIntegration;
import java.util.concurrent.ConcurrentHashMap;

/** Helper for profiling {@code LockSupport.park*} intervals from bootstrap classes. */
public final class LockSupportHelper {
/** Maps target thread to the span ID of the thread that called {@code unpark()} on it. */
public static final ConcurrentHashMap<Thread, Long> UNPARKING_SPAN = new ConcurrentHashMap<>();

private LockSupportHelper() {}

/** Captured state for a {@code LockSupport.park*} interval. */
public static final class ParkState {
public final ProfilingContextIntegration profiling;
public final long blockerHash;
public final long spanId;
public final long rootSpanId;

public ParkState(
ProfilingContextIntegration profiling, long blockerHash, long spanId, long rootSpanId) {
this.profiling = profiling;
this.blockerHash = blockerHash;
this.spanId = spanId;
this.rootSpanId = rootSpanId;
}
}

public static ParkState captureState(Object blocker) {
return captureState(blocker, AgentTracer.get().getProfilingContext(), AgentTracer.activeSpan());
}

public static ParkState captureState(
Object blocker, ProfilingContextIntegration profiling, AgentSpan span) {
if (profiling == null) {
return null;
}
// Always call parkEnter for signal suppression, even without an active span.
// spanId/rootSpanId = 0 when no active span, and native TaskBlock eligibility filters out
// zero-span intervals at exit.
long spanId = 0L;
long rootSpanId = 0L;
if (span != null && span.context() instanceof ProfilerContext) {
ProfilerContext ctx = (ProfilerContext) span.context();
spanId = ctx.getSpanId();
rootSpanId = ctx.getRootSpanId();
}
profiling.parkEnter(spanId, rootSpanId);
long blockerHash = blocker != null ? System.identityHashCode(blocker) : 0L;
return new ParkState(profiling, blockerHash, spanId, rootSpanId);
}

public static void finish(ParkState state) {
// Always drain the map entry before any early return. If we returned first, a stale
// unblocking-span ID placed by a prior unpark() would persist and be incorrectly
// attributed to the next TaskBlock event emitted on this thread.
Long unblockingSpanId = UNPARKING_SPAN.remove(Thread.currentThread());
finish(state, unblockingSpanId != null ? unblockingSpanId : 0L);
}

public static void finish(ParkState state, long unblockingSpanId) {
if (state == null) {
return;
}
// parkExit() clears native parked state and records an eligible TaskBlock using the entry
// tick saved by parkEnter().
state.profiling.parkExit(state.blockerHash, unblockingSpanId);
}

public static void recordUnpark(Thread thread) {
if (thread == null) {
return;
}
AgentSpan span = AgentTracer.activeSpan();
if (span == null || !(span.context() instanceof ProfilerContext)) {
return;
}
ProfilerContext ctx = (ProfilerContext) span.context();
UNPARKING_SPAN.put(thread, ctx.getSpanId());
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
package datadog.trace.bootstrap.instrumentation.java.concurrent;

import datadog.trace.bootstrap.instrumentation.api.AgentSpan;
import datadog.trace.bootstrap.instrumentation.api.AgentTracer;
import datadog.trace.bootstrap.instrumentation.api.ProfilerContext;
import datadog.trace.bootstrap.instrumentation.api.ProfilingContextIntegration;

/** Helper for Java-level instrumentation that emits {@code datadog.TaskBlock} intervals. */
public final class TaskBlockHelper {
static final long MIN_TASK_BLOCK_NANOS = 1_000_000L;

private TaskBlockHelper() {}

/** Captured state for a potential blocking interval. */
public static final class State {
final ProfilingContextIntegration profiling;
final long startTicks;
final long startNanos;
final long spanId;
final long rootSpanId;
final long blocker;

State(
ProfilingContextIntegration profiling,
long startTicks,
long startNanos,
long spanId,
long rootSpanId,
long blocker) {
this.profiling = profiling;
this.startTicks = startTicks;
this.startNanos = startNanos;
this.spanId = spanId;
this.rootSpanId = rootSpanId;
this.blocker = blocker;
}
}

public static State capture(long blocker) {
return capture(blocker, AgentTracer.get().getProfilingContext(), AgentTracer.activeSpan());
}

static State capture(long blocker, ProfilingContextIntegration profiling, AgentSpan span) {
if (profiling == null || span == null || !(span.context() instanceof ProfilerContext)) {
return null;
}
ProfilerContext context = (ProfilerContext) span.context();
return new State(
profiling,
profiling.getCurrentTicks(),
System.nanoTime(),
context.getSpanId(),
context.getRootSpanId(),
blocker);
}

public static void finish(State state) {
if (state == null || System.nanoTime() - state.startNanos < MIN_TASK_BLOCK_NANOS) {
return;
}
state.profiling.recordTaskBlock(
state.startTicks, state.spanId, state.rootSpanId, state.blocker, 0L);
}
}
Loading
Loading