Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -758,7 +758,7 @@ public void attach(String pid, String agentPath, String sysCp, String bootCp) th
}
}

void connectAndListProbes(String host, CommandListener listener) throws IOException {
public void connectAndListProbes(String host, CommandListener listener) throws IOException {
if (sock != null) {
throw new IllegalStateException();
}
Expand Down Expand Up @@ -1071,11 +1071,11 @@ public synchronized void close() throws IOException {
reset();
}

boolean isDisconnected() {
public boolean isDisconnected() {
return disconnected;
}

void disconnect() throws IOException {
public void disconnect() throws IOException {
disconnected = true;
if (log.isDebugEnabled()) {
log.debug("sending DISCONNECT request to agent");
Expand Down
79 changes: 79 additions & 0 deletions btrace-dist/src/main/resources/samples/GpuBridge.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
import org.openjdk.btrace.core.annotations.*;
import org.openjdk.btrace.core.extensions.Injected;
import org.openjdk.btrace.gpu.GpuBridgeService;

import static org.openjdk.btrace.core.BTraceUtils.*;

/**
* Traces GPU model inference via ONNX Runtime and DJL (Deep Java Library).
* Tracks inference latency, batch sizes, and model load times.
*
* <p>Attach to a JVM running ONNX or DJL inference:
* <pre>
* btrace &lt;pid&gt; GpuBridge.java
* </pre>
*/
@BTrace
public class GpuBridge {

@Injected
private static GpuBridgeService gpu;

// ==================== ONNX Runtime ====================

@OnMethod(
clazz = "ai.onnxruntime.OrtSession",
method = "run",
location = @Location(Kind.RETURN))
public static void onOnnxInference(@Duration long dur) {
gpu.recordInference("onnx", "session", dur);
}

@OnMethod(
clazz = "ai.onnxruntime.OrtSession",
method = "<init>",
location = @Location(Kind.RETURN))
public static void onOnnxModelLoad(@Duration long dur) {
gpu.recordModelLoad("onnx", "session", dur);
}

// ==================== DJL (Deep Java Library) ====================

@OnMethod(
clazz = "/ai\\.djl\\.inference\\.Predictor/",
method = "predict",
location = @Location(Kind.RETURN))
public static void onDjlPredict(@Duration long dur) {
gpu.recordInference("djl", "predictor", dur);
}

@OnMethod(
clazz = "/ai\\.djl\\.repository\\.zoo\\.ModelZoo/",
method = "loadModel",
location = @Location(Kind.RETURN))
public static void onDjlModelLoad(@Duration long dur) {
gpu.recordModelLoad("djl", "model-zoo", dur);
}

// ==================== TensorFlow Java ====================

@OnMethod(
clazz = "/org\\.tensorflow\\.Session/",
method = "run",
location = @Location(Kind.RETURN))
public static void onTensorFlowRun(@Duration long dur) {
gpu.recordInference("tensorflow", "session", dur);
}

// ==================== Periodic summary ====================

@OnTimer(30000)
public static void periodicSummary() {
println(gpu.getSummary());
}

@OnEvent("summary")
public static void onDemandSummary() {
println(gpu.getSummary());
}
}
114 changes: 114 additions & 0 deletions btrace-dist/src/main/resources/samples/LlmTrace.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
/*
* Copyright (c) 2024, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the Classpath exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/

import static org.openjdk.btrace.core.BTraceUtils.*;

import org.openjdk.btrace.core.annotations.*;
import org.openjdk.btrace.llm.LlmTraceService;

/**
* Sample BTrace script that traces LLM API calls using the btrace-llm-trace extension.
*
* <p>This is a generic template. It intercepts Langchain4j's ChatLanguageModel.generate()
* method and records call metrics. Adapt the @OnMethod annotations to target your specific
* LLM SDK (OpenAI Java SDK, Anthropic Java SDK, Spring AI, etc.).
*
* <p>Usage:
* btrace &lt;PID&gt; LlmTrace.java
*
* <p>Send a named event "summary" to print the current stats:
* (Ctrl-C, option 3, enter "summary")
*/
@BTrace
public class LlmTrace {

@Injected
private static LlmTraceService llm;

/**
* Trace Langchain4j ChatLanguageModel.generate() calls.
* Captures latency on every call completion.
*/
@OnMethod(
clazz = "+dev.langchain4j.model.chat.ChatLanguageModel",
method = "generate",
location = @Location(Kind.RETURN))
public static void onLangchain4jGenerate(
@ProbeClassName String className,
@ProbeMethodName String methodName,
@Duration long duration) {
// Model name extracted from the class; token counts need return value parsing
// For a production script, parse the Response<AiMessage> return value
llm.recordCall("langchain4j", className, 0, 0, duration);
println(strcat(strcat(strcat("LLM call: ", className), " "),
strcat(str(duration / 1000000L), "ms")));
}

/**
* Trace Langchain4j StreamingChatLanguageModel calls.
*/
@OnMethod(
clazz = "+dev.langchain4j.model.chat.StreamingChatLanguageModel",
method = "generate",
location = @Location(Kind.RETURN))
public static void onLangchain4jStreaming(
@ProbeClassName String className,
@Duration long duration) {
llm.recordStreamingCall(className, 0, 0, duration, 0);
}

/**
* Trace errors from any ChatLanguageModel implementation.
*/
@OnMethod(
clazz = "+dev.langchain4j.model.chat.ChatLanguageModel",
method = "generate",
location = @Location(Kind.ERROR))
public static void onLangchain4jError(
@ProbeClassName String className,
@Duration long duration,
Throwable error) {
llm.recordError(className, Strings.str(error.getClass()), duration);
println(strcat("LLM ERROR: ", Strings.str(error)));
}

/**
* Print summary on named event "summary".
*/
@OnEvent("summary")
public static void onSummary() {
println(llm.getSummary());
}

/**
* Print summary periodically (every 30 seconds).
*/
@OnTimer(30000)
public static void onTimer() {
if (llm.getTotalCalls() > 0) {
println(llm.getSummary());
}
}
}
73 changes: 73 additions & 0 deletions btrace-dist/src/main/resources/samples/RagQuality.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
import org.openjdk.btrace.core.annotations.*;
import org.openjdk.btrace.core.extensions.Injected;
import org.openjdk.btrace.rag.RagQualityService;

import static org.openjdk.btrace.core.BTraceUtils.*;

/**
* Traces RAG pipeline performance: vector DB query latency, similarity
* scores, and empty retrieval rates. Targets Pinecone Java client.
*
* <p>Attach to a JVM running a RAG pipeline:
* <pre>
* btrace &lt;pid&gt; RagQuality.java
* </pre>
*/
@BTrace
public class RagQuality {

@Injected
private static RagQualityService rag;

// ==================== Pinecone ====================

@OnMethod(
clazz = "/io\\.pinecone\\..*/",
method = "query",
location = @Location(Kind.RETURN))
public static void onPineconeQuery(@Duration long dur) {
rag.recordQuery("pinecone", dur);
}

// ==================== Milvus ====================

@OnMethod(
clazz = "/io\\.milvus\\.client\\..*/",
method = "search",
location = @Location(Kind.RETURN))
public static void onMilvusSearch(@Duration long dur) {
rag.recordQuery("milvus", dur);
}

// ==================== Weaviate ====================

@OnMethod(
clazz = "/io\\.weaviate\\.client\\..*/",
method = "/get|search/",
location = @Location(Kind.RETURN))
public static void onWeaviateQuery(@Duration long dur) {
rag.recordQuery("weaviate", dur);
}

// ==================== Chroma ====================

@OnMethod(
clazz = "/tech\\.amikos\\.chromadb\\..*/",
method = "query",
location = @Location(Kind.RETURN))
public static void onChromaQuery(@Duration long dur) {
rag.recordQuery("chroma", dur);
}

// ==================== Periodic summary ====================

@OnTimer(30000)
public static void periodicSummary() {
println(rag.getSummary());
}

@OnEvent("summary")
public static void onDemandSummary() {
println(rag.getSummary());
}
}
69 changes: 69 additions & 0 deletions btrace-dist/src/main/resources/samples/VibeGuard.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
import org.openjdk.btrace.core.annotations.*;
import org.openjdk.btrace.core.extensions.Injected;
import org.openjdk.btrace.vibeguard.VibeGuardService;

import static org.openjdk.btrace.core.BTraceUtils.*;

/**
* Runtime behavioral contracts for AI-generated code. Enforces latency
* budgets, call rate limits, and null-safety on methods that may have
* been generated by an LLM.
*
* <p>Attach to a JVM running AI-generated service code:
* <pre>
* btrace &lt;pid&gt; VibeGuard.java
* </pre>
*/
@BTrace
public class VibeGuard {

@Injected
private static VibeGuardService guard;

// ==================== Latency budgets ====================

/**
* Enforce 500ms latency budget on all methods in the AI-generated package.
* Adjust the clazz pattern to match your project structure.
*/
@OnMethod(
clazz = "/com\\.myapp\\.ai\\..*/",
method = "/.*/",
location = @Location(Kind.RETURN))
public static void checkAiLatency(
@ProbeClassName String cls,
@ProbeMethodName String method,
@Duration long dur) {
guard.checkLatency(Strings.strcat(cls, Strings.strcat(".", method)),
dur, 500_000_000L);
}

// ==================== Null safety on return values ====================

@OnMethod(
clazz = "/com\\.myapp\\.ai\\..*/",
method = "/.*/",
location = @Location(Kind.RETURN))
public static void checkNullReturn(
@ProbeClassName String cls,
@ProbeMethodName String method,
@Return Object ret) {
guard.checkNotNull(
Strings.strcat(cls, Strings.strcat(".", method)), ret);
}

// ==================== Alert on violations ====================

@OnTimer(10000)
public static void checkViolations() {
if (guard.hasViolations()) {
println("=== VIBE GUARD ALERT ===");
println(guard.getSummary());
}
}

@OnEvent("summary")
public static void onDemandSummary() {
println(guard.getSummary());
}
}
33 changes: 33 additions & 0 deletions btrace-extensions/btrace-gpu-bridge/build.gradle
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
plugins {
id 'org.openjdk.btrace.extension'
alias(libs.plugins.shadow)
}

java {
sourceCompatibility = 8
targetCompatibility = 8
}

compileJava {
javaCompiler = javaToolchains.compilerFor {
languageVersion.set(JavaLanguageVersion.of(11))
}
}

btraceExtension {
id = 'btrace-gpu-bridge'
name = 'BTrace GPU Bridge'
description = 'GPU and model inference observability: ONNX Runtime, DJL, TensorFlow Java, and Panama FFM compute tracing'
services = ['org.openjdk.btrace.gpu.GpuBridgeService']
}

dependencies {
apiCompileOnly project(':btrace-core')
implImplementation project(':btrace-core')

testImplementation libs.junit.jupiter
}

test {
useJUnitPlatform()
}
Loading
Loading