diff --git a/openig-ai/pom.xml b/openig-ai/pom.xml new file mode 100644 index 000000000..0b8a98b65 --- /dev/null +++ b/openig-ai/pom.xml @@ -0,0 +1,72 @@ + + + + 4.0.0 + + org.openidentityplatform.openig + openig-project + 6.0.3-SNAPSHOT + + + openig-ai + OpenIG AI Module + + + org.openidentityplatform.openig + openig-core + ${project.version} + + + + + com.bucket4j + bucket4j_jdk11-core + 8.15.0 + + + com.github.ben-manes.caffeine + caffeine + 3.2.3 + + + + + + org.mockito + mockito-core + test + + + org.testng + testng + test + + + org.assertj + assertj-core + test + + + com.google.guava + guava-testlib + 33.5.0-jre + test + + + \ No newline at end of file diff --git a/openig-ai/src/main/java/org/openidentityplatform/openig/ai/AiClassAliasResolver.java b/openig-ai/src/main/java/org/openidentityplatform/openig/ai/AiClassAliasResolver.java new file mode 100644 index 000000000..f54277de2 --- /dev/null +++ b/openig-ai/src/main/java/org/openidentityplatform/openig/ai/AiClassAliasResolver.java @@ -0,0 +1,41 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.0.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.0.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2026 3A Systems LLC. + */ + +package org.openidentityplatform.openig.ai; + +import org.forgerock.openig.alias.ClassAliasResolver; +import org.openidentityplatform.openig.ai.filter.LLMProxyFilter; +import org.openidentityplatform.openig.ai.filter.MCPServerFeaturesFilter; + +import java.util.HashMap; +import java.util.Map; + +/** + * Register all the aliases supported by the {@literal openig-ai2} module. + */ +public class AiClassAliasResolver implements ClassAliasResolver { + private static final Map> ALIASES = new HashMap<>(); + + static { + ALIASES.put("LLMProxyFilter", LLMProxyFilter.class); + ALIASES.put("MCPServerFeaturesFilter", MCPServerFeaturesFilter.class); + } + + @Override + public Class resolve(final String alias) { + return ALIASES.get(alias); + } +} diff --git a/openig-ai/src/main/java/org/openidentityplatform/openig/ai/filter/LLMProxyFilter.java b/openig-ai/src/main/java/org/openidentityplatform/openig/ai/filter/LLMProxyFilter.java new file mode 100644 index 000000000..cadbea7f0 --- /dev/null +++ b/openig-ai/src/main/java/org/openidentityplatform/openig/ai/filter/LLMProxyFilter.java @@ -0,0 +1,315 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.0.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.0.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2026 3A Systems LLC. + */ + +package org.openidentityplatform.openig.ai.filter; + +import com.github.benmanes.caffeine.cache.Ticker; +import org.forgerock.http.Filter; +import org.forgerock.http.Handler; +import org.forgerock.http.protocol.Request; +import org.forgerock.http.protocol.Response; +import org.forgerock.http.protocol.Status; +import org.forgerock.json.JsonValue; +import org.forgerock.openig.el.Expression; +import org.forgerock.openig.heap.GenericHeaplet; +import org.forgerock.openig.heap.HeapException; +import org.forgerock.services.context.Context; +import org.forgerock.util.promise.NeverThrowsException; +import org.forgerock.util.promise.Promise; +import org.forgerock.util.promise.Promises; +import org.forgerock.util.time.Duration; +import org.openidentityplatform.openig.ai.filter.llm.LLMProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.net.URI; +import java.net.URISyntaxException; +import java.util.concurrent.TimeUnit; + +import static org.forgerock.json.JsonValue.json; +import static org.forgerock.json.JsonValueFunctions.duration; +import static org.forgerock.openig.el.Bindings.bindings; + + +/** + * Identity-aware LLM proxy filter. + * + *
    + *
  1. Provider normalization – rewrites the upstream URI and injects the + * correct authentication header for the configured {@link LLMProvider}.
  2. + *
  3. Identity extraction – reads the caller's identity from a configurable + * request attribute (populated upstream by, e.g., {@code OAuth2ResourceServerFilter}).
  4. + *
  5. Token-based rate limiting – estimates the prompt-token cost of each + * request and enforces per-identity limits via {@link TokenRateLimiter}. + * Returns {@code 429 Too Many Requests} (with an exact {@code Retry-After} header) + * when the bucket is exhausted.
  6. + *
  7. Response enrichment – adds {@code X-LLM-*} observability headers.
  8. + *
+ * + * + *

Heap configuration

+ *
{@code
+ * {
+ *   "name": "OpenAIProxy",
+ *   "type": "LLMProxyFilter",
+ *   "config": {
+ *     "provider"          : "OPENAI",
+ *     "baseUrl"           : "https://api.openai.com/v1",  // optional
+ *     "apiKey"            : "${system['llm.apiKey']}",
+ *     "sub"               : "${attributes.sub}",          // optional, expression, default "anonymous"
+ *     "rateLimitEnabled"  : true,                         // optional, default true
+ *     "rate": {
+ *       "numberOfTokens"    : 10000,    // tokens per window (burst capacity)
+ *       "duration"          : "1 minute"
+ *       "cleaningInterval"  : "5 minutes"  // optional, bucket eviction period
+ *     }
+ *   }
+ * }
+ * }
+ * + *

Response headers

+ * + */ +public class LLMProxyFilter implements Filter { + + private static final Logger logger = LoggerFactory.getLogger(LLMProxyFilter.class); + + public static final String HEADER_LLM_PROVIDER = "X-LLM-Provider"; + public static final String HEADER_LLM_IDENTITY = "X-LLM-Identity"; + public static final String HEADER_RATE_LIMIT_REMAINING = "X-RateLimit-Remaining"; + + static final long DEFAULT_RATE_LIMIT_NUMBER_OF_TOKENS = 10_000L; + static final String DEFAULT_RATE_LIMIT_DURATION = "1 minute"; + static final String DEFAULT_CLEANING_INTERVAL = "5 minutes"; + + static final long CHARS_PER_TOKEN = 4L; + + private final LLMProvider provider; + private final String baseUrl; + private final String apiKey; + + private final Expression sub; + + private final boolean rateLimitEnabled; + + private final TokenRateLimiter rateLimiter; + + public LLMProxyFilter(LLMProvider provider, + String baseUrl, + String apiKey, + Expression sub, + boolean rateLimitEnabled, + TokenRateLimiter rateLimiter) { + this.provider = provider; + this.baseUrl = (baseUrl != null && !baseUrl.isEmpty()) ? baseUrl : provider.getDefaultBaseUrl(); + this.apiKey = apiKey; + this.sub = sub; + this.rateLimitEnabled = rateLimitEnabled; + this.rateLimiter = rateLimiter; + } + + @Override + public Promise filter(Context context, Request request, Handler next) { + String identity = getIdentity(context, request); + + if (rateLimitEnabled) { + + long tokenCost = estimateTokenCost(request); + + long waitNs = rateLimiter.tryConsume(identity, tokenCost); + + if (waitNs > 0) { + logger.info("LLMProxyFilter: rate limit exceeded " + + "identity={} cost={} waitNs={}", identity, tokenCost, waitNs); + return Promises.newResultPromise( + rateLimitedResponse(identity, waitNs)); + } + } + + rewriteRequest(request); + + return next.handle(context, request) + .thenOnResult(response -> enrichResponse(response, identity)); + } + + private String getIdentity(Context context, Request request) { + String identity = sub.eval(bindings(context, request)); + if(identity == null || identity.isEmpty()) { + identity = "anonymous"; + } + return identity; + } + + + /** + * Estimates prompt-token cost: sums {@code content} lengths across all + * {@code messages} entries and divides by 4 (chars-per-token rule of thumb). + * Falls back to 500 if the body is absent or unparseable. + */ + + long estimateTokenCost(Request request) { + try { + JsonValue jsonEntity; + byte[] body; + try { + body = request.getEntity().getBytes(); + jsonEntity = json(request.getEntity().getJson()); + } catch (IOException e) { + logger.debug("Error parsing JSON request body", e); + throw e; + } + JsonValue messages = jsonEntity.get("messages"); + if (messages.isNull() || !messages.isList()) { + return Math.max(1L, body.length / CHARS_PER_TOKEN); + } + long charCount = 0; + for (Object msg : messages.asList()) { + JsonValue content = json(msg).get("content"); + if (content.isString()) { + charCount += content.asString().length(); + } else if (content.isList()) { + for (Object block : content.asList()) { + JsonValue text = json(block).get("text"); + if (text.isString()) charCount += text.asString().length(); + } + } + } + return Math.max(1L, charCount / CHARS_PER_TOKEN); + } catch (Exception e) { + logger.debug("LLMProxyFilter: token cost estimation failed — using 500", e); + return 500L; + } + } + + + /** + * Rewrites the upstream request: + *
    + *
  • Removes the original {@code Authorization} / {@code x-api-key} headers.
  • + *
  • Injects the provider's auth header with the configured API key.
  • + *
  • Rewrites the URI: replaces the scheme+host+port with the configured base URL, + * keeping the original path and query.
  • + *
+ */ + void rewriteRequest(Request request) { + // set the auth header according to the LLM provider settings + request.getHeaders().remove("Authorization"); + request.getHeaders().remove("x-api-key"); + request.getHeaders().put(provider.getAuthHeaderName(), + provider.buildAuthHeaderValue(apiKey)); + + // URI rewrite + URI original = request.getUri().asURI(); + String query = original.getRawQuery(); + + try { + String newUriStr = baseUrl + + (query != null ? "?" + query : ""); + request.setUri(new URI(newUriStr)); + } catch (URISyntaxException e) { + logger.warn("LLMProxyFilter: could not rewrite URI – leaving original", e); + } + } + + + private void enrichResponse(Response response, String identity) { + response.getHeaders().put(HEADER_LLM_PROVIDER, provider.name()); + response.getHeaders().put(HEADER_LLM_IDENTITY, identity); + if (rateLimitEnabled) { + response.getHeaders().put(HEADER_RATE_LIMIT_REMAINING, + String.valueOf(rateLimiter.availableTokens(identity))); + } + } + + /** + * Builds a {@code 429 Too Many Requests} response. + * {@code Retry-After} is the ceiling-seconds computed directly from the + * nanosecond wait returned by {@link TokenRateLimiter#tryConsume}. + */ + private Response rateLimitedResponse(String identity, long waitNs) { + long retryAfterSec = TimeUnit.NANOSECONDS.toSeconds(waitNs); + Response response = new Response(Status.TOO_MANY_REQUESTS); + response.getHeaders().put("Content-Type", "application/json"); + response.getHeaders().put("Retry-After", String.valueOf(retryAfterSec)); + response.getHeaders().put(HEADER_LLM_PROVIDER, provider.name()); + response.getHeaders().put(HEADER_LLM_IDENTITY, identity); + if (rateLimitEnabled) { + response.getHeaders().put(HEADER_RATE_LIMIT_REMAINING, "0"); + } + response.setEntity("{\"error\":{\"message\":\"Token rate limit exceeded. " + + "Retry after " + retryAfterSec + " second(s).\"," + + "\"type\":\"rate_limit_error\",\"code\":\"rate_limit_exceeded\"}}"); + return response; + } + + public static class Heaplet extends GenericHeaplet { + + @Override + public Object create() throws HeapException { + + JsonValue evaluatedConfig = config.as(evaluatedWithHeapProperties()); + + String providerStr = evaluatedConfig.get("provider").required().asString(); + LLMProvider provider; + try { + provider = LLMProvider.valueOf(providerStr.toUpperCase()); + } catch (IllegalArgumentException e) { + throw new HeapException("Unknown LLM provider '" + providerStr + + "'. Valid values: OPENAI, ANTHROPIC, MISTRAL, AZURE_OPENAI, OPENAI_COMPATIBLE", e); + } + String apiKey = evaluatedConfig.get("apiKey").required().asString(); + + String baseUrl = evaluatedConfig.get("baseUrl").defaultTo(provider.getDefaultBaseUrl()).asString(); + + boolean rateLimitEnabled = evaluatedConfig.get("rateLimitEnabled").defaultTo(true).asBoolean(); + + Expression sub = evaluatedConfig.get("sub").defaultTo("anonymous").as(expression(String.class)); + + TokenRateLimiter rateLimiter = null; + if (rateLimitEnabled) { + JsonValue rate = config.get("rate").defaultTo(null); + + long numberOfTokens = DEFAULT_RATE_LIMIT_NUMBER_OF_TOKENS; + + Duration duration = Duration.duration(DEFAULT_RATE_LIMIT_DURATION); + + Duration cleaningInterval = Duration.duration(DEFAULT_CLEANING_INTERVAL); + + if (rate != null && !rate.isNull()) { + numberOfTokens = rate.get("numberOfTokens") + .defaultTo(DEFAULT_RATE_LIMIT_NUMBER_OF_TOKENS).asLong(); + duration = rate.get("duration").defaultTo(DEFAULT_RATE_LIMIT_DURATION).as(duration()); + + cleaningInterval = rate.get("cleaningInterval").defaultTo(DEFAULT_CLEANING_INTERVAL).as(duration()); + } + + try { + rateLimiter = new TokenRateLimiter(numberOfTokens, duration, + Ticker.systemTicker(), cleaningInterval); + } catch (IllegalArgumentException e) { + throw new HeapException("LLMProxyFilter: invalid rate configuration", e); + } + } + + return new LLMProxyFilter(provider, baseUrl, apiKey, sub, rateLimitEnabled, rateLimiter); + } + } +} diff --git a/openig-core/src/main/java/org/openidentityplatform/openig/filter/MCPServerFeaturesFilter.java b/openig-ai/src/main/java/org/openidentityplatform/openig/ai/filter/MCPServerFeaturesFilter.java similarity index 98% rename from openig-core/src/main/java/org/openidentityplatform/openig/filter/MCPServerFeaturesFilter.java rename to openig-ai/src/main/java/org/openidentityplatform/openig/ai/filter/MCPServerFeaturesFilter.java index 419182b92..5b53c7f53 100644 --- a/openig-core/src/main/java/org/openidentityplatform/openig/filter/MCPServerFeaturesFilter.java +++ b/openig-ai/src/main/java/org/openidentityplatform/openig/ai/filter/MCPServerFeaturesFilter.java @@ -14,7 +14,7 @@ * Copyright 2026 3A Systems LLC. */ -package org.openidentityplatform.openig.filter; +package org.openidentityplatform.openig.ai.filter; import org.forgerock.http.Filter; import org.forgerock.http.Handler; @@ -23,7 +23,6 @@ import org.forgerock.http.protocol.Status; import org.forgerock.json.JsonValue; import org.forgerock.openig.heap.GenericHeaplet; -import org.forgerock.openig.heap.HeapException; import org.forgerock.services.context.Context; import org.forgerock.util.promise.NeverThrowsException; import org.forgerock.util.promise.Promise; @@ -270,7 +269,7 @@ public List filterResponseFeature(MCPFeature mcpFeature, public static class Heaplet extends GenericHeaplet { @Override - public Object create() throws HeapException { + public Object create() { MCPServerFeaturesFilter filter = new MCPServerFeaturesFilter(); JsonValue evaluatedConfig = config.as(evaluatedWithHeapProperties()); JsonValue allowConfig = evaluatedConfig.get("allow"); diff --git a/openig-ai/src/main/java/org/openidentityplatform/openig/ai/filter/TokenRateLimiter.java b/openig-ai/src/main/java/org/openidentityplatform/openig/ai/filter/TokenRateLimiter.java new file mode 100644 index 000000000..f00c0a437 --- /dev/null +++ b/openig-ai/src/main/java/org/openidentityplatform/openig/ai/filter/TokenRateLimiter.java @@ -0,0 +1,156 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.0.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.0.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2026 3A Systems LLC. + */ + +package org.openidentityplatform.openig.ai.filter; + +import com.github.benmanes.caffeine.cache.Caffeine; +import com.github.benmanes.caffeine.cache.LoadingCache; +import com.github.benmanes.caffeine.cache.Ticker; +import io.github.bucket4j.Bandwidth; +import io.github.bucket4j.BandwidthBuilder; +import io.github.bucket4j.Bucket; +import io.github.bucket4j.ConsumptionProbe; +import io.github.bucket4j.TimeMeter; +import org.forgerock.util.time.Duration; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.util.concurrent.TimeUnit; + + +/** + * Identity-aware, in-memory token-bucket rate limiter for LLM prompt tokens. + * + *

Configuration

+ *
{@code
+ *   "rate": {
+ *     "numberOfTokens": 10000,
+ *     "duration": "1 minute"
+ *   }
+ * }
+ */ + +public class TokenRateLimiter { + private static final Logger logger = LoggerFactory.getLogger(TokenRateLimiter.class); + + private final long numberOfTokens; + + /** Refill window in nanoseconds. */ + private final long windowMs; + + private final LoadingCache rateLimiters; + + private final Ticker ticker; + + + /** + * Minimal constructor, with system ticker and no bucket eviction + * + * @param numberOfTokens maximum tokens per window (= burst capacity) + * @param duration window size — OpenIG {@link Duration} syntax + */ + public TokenRateLimiter(long numberOfTokens, Duration duration) { + this(numberOfTokens, duration, Ticker.systemTicker(), null); + } + + /** + * Full constructor, matching the pattern of + * {@code TokenBucketThrottlingStrategy(Ticker, ScheduledExecutorService, Duration)}. + * + *

When {@code cleaningInterval} is non-null, a + * periodic eviction task removes buckets that have been idle for at least one full + * window, preventing unbounded heap growth in long-running deployments. + * + * @param numberOfTokens maximum tokens per window (= burst capacity) + * @param duration window size — OpenIG {@link Duration} syntax + * @param ticker monotonic time source; use {@link Ticker#systemTicker()} + * in production and {@code FakeTicker} in tests + * @param cleaningInterval eviction period; ignored when executor is {@code null} + */ + public TokenRateLimiter(long numberOfTokens, + Duration duration, + Ticker ticker, + Duration cleaningInterval) { + if (numberOfTokens <= 0) { + throw new IllegalArgumentException("numberOfTokens must be > 0, got: " + numberOfTokens); + } + if (duration == null || duration.isUnlimited()) { + throw new IllegalArgumentException("duration must be a finite positive value"); + } + long windowMs = duration.to(TimeUnit.MILLISECONDS); + if (windowMs <= 0) { + throw new IllegalArgumentException("duration must resolve to > 0 ms, got: " + duration); + } + + this.numberOfTokens = numberOfTokens; + this.windowMs = windowMs; + + this.ticker = ticker; + + + Caffeine builder = Caffeine.newBuilder(); + + if (cleaningInterval != null) { + long intervalMs = cleaningInterval.to(TimeUnit.MILLISECONDS); + builder.expireAfterAccess(intervalMs, TimeUnit.MILLISECONDS).ticker(this.ticker); + } + rateLimiters = builder.build(this::createNewBucket); + + + } + private Bucket createNewBucket(String sub) { + + Bandwidth limit = BandwidthBuilder.builder().capacity(numberOfTokens) + .refillGreedy(numberOfTokens, java.time.Duration.ofMillis(windowMs)) + .build(); + + return Bucket.builder() + .addLimit(limit) + .withCustomTimePrecision(new TimeMeter() { + @Override + public long currentTimeNanos() { + return ticker.read(); + } + + @Override + public boolean isWallClockBased() { + return false; + } + }) + .build(); + } + + /** + * Try to consume N tokens (e.g. costly endpoints) for {@code identity} + */ + public long tryConsume(String sub, long cost) { + Bucket bucket = rateLimiters.get(sub); + ConsumptionProbe probe = bucket.tryConsumeAndReturnRemaining(cost); + return probe.getNanosToWaitForRefill(); + } + + + public long availableTokens(String sub) { + Bucket bucket = rateLimiters.get(sub); + if (bucket == null) return numberOfTokens; + return bucket.getAvailableTokens(); + } + + public void stop() { + rateLimiters.cleanUp(); + logger.debug("TokenRateLimiter: stopped"); + } +} diff --git a/openig-ai/src/main/java/org/openidentityplatform/openig/ai/filter/llm/LLMProvider.java b/openig-ai/src/main/java/org/openidentityplatform/openig/ai/filter/llm/LLMProvider.java new file mode 100644 index 000000000..f51f54588 --- /dev/null +++ b/openig-ai/src/main/java/org/openidentityplatform/openig/ai/filter/llm/LLMProvider.java @@ -0,0 +1,60 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.0.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.0.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2026 3A Systems LLC. + */ + +package org.openidentityplatform.openig.ai.filter.llm; + +/** + * LLM providers whose APIs can be proxied by {@link org.openidentityplatform.openig.ai.filter.LLMProxyFilter}. + */ +public enum LLMProvider { + + OPENAI("https://api.openai.com/v1", "Authorization", "Bearer"), + + ANTHROPIC("https://api.anthropic.com/v1", "x-api-key", ""), + + MISTRAL("https://api.mistral.ai/v1", "Authorization", "Bearer"), + + AZURE_OPENAI("", "api-key", ""), + + OPENAI_COMPATIBLE("", "Authorization", "Bearer"); + + private final String defaultBaseUrl; + + private final String authHeaderName; + + private final String authScheme; + + LLMProvider(String defaultBaseUrl, String authHeaderName, String authScheme) { + this.defaultBaseUrl = defaultBaseUrl; + this.authHeaderName = authHeaderName; + this.authScheme = authScheme; + } + + public String getDefaultBaseUrl() { + return defaultBaseUrl; + } + + public String getAuthHeaderName() { + return authHeaderName; + } + + public String buildAuthHeaderValue(String apiKey) { + if (authScheme == null || authScheme.isEmpty()) { + return apiKey; + } + return authScheme + " " + apiKey; + } +} \ No newline at end of file diff --git a/openig-ai/src/main/resources/META-INF/services/org.forgerock.openig.alias.ClassAliasResolver b/openig-ai/src/main/resources/META-INF/services/org.forgerock.openig.alias.ClassAliasResolver new file mode 100644 index 000000000..714db3d2b --- /dev/null +++ b/openig-ai/src/main/resources/META-INF/services/org.forgerock.openig.alias.ClassAliasResolver @@ -0,0 +1,17 @@ +# +# The contents of this file are subject to the terms of the Common Development and +# Distribution License (the License). You may not use this file except in compliance with the +# License. +# +# You can obtain a copy of the License at legal/CDDLv1.0.txt. See the License for the +# specific language governing permission and limitations under the License. +# +# When distributing Covered Software, include this CDDL Header Notice in each file and include +# the License file at legal/CDDLv1.0.txt. If applicable, add the following below the CDDL +# Header, with the fields enclosed by brackets [] replaced by your own identifying +# information: "Portions copyright [year] [name of copyright owner]". +# +# Copyright 2026 3A Systems LLC. +# + +org.openidentityplatform.openig.ai.AiClassAliasResolver \ No newline at end of file diff --git a/openig-ai/src/test/java/org/openidentityplatform/openig/ai/filter/LLMProxyFilterTest.java b/openig-ai/src/test/java/org/openidentityplatform/openig/ai/filter/LLMProxyFilterTest.java new file mode 100644 index 000000000..473c2e8e2 --- /dev/null +++ b/openig-ai/src/test/java/org/openidentityplatform/openig/ai/filter/LLMProxyFilterTest.java @@ -0,0 +1,226 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.0.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.0.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2026 3A Systems LLC. + */ + +package org.openidentityplatform.openig.ai.filter; + +import org.forgerock.http.Handler; +import org.forgerock.http.protocol.Request; +import org.forgerock.http.protocol.Response; +import org.forgerock.http.protocol.Status; +import org.forgerock.services.context.AttributesContext; +import org.forgerock.services.context.RootContext; +import org.forgerock.util.promise.Promises; +import org.openidentityplatform.openig.ai.filter.llm.LLMProvider; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +import java.net.URI; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.forgerock.json.JsonValue.json; +import static org.forgerock.openig.util.JsonValues.expression; +import static org.forgerock.util.time.Duration.duration; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.argThat; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.never; +import static org.mockito.Mockito.times; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; + +public class LLMProxyFilterTest { + + private static final String CHAT_BODY = "{" + + "\"model\":\"gpt-5.2\"," + + "\"messages\":[" + + " {\"role\":\"user\",\"content\":\"Hello, how are you?\"}" + + "]}"; + + private static final String LONG_CHAT_BODY = "{" + + "\"model\":\"gpt-4o\"," + + "\"messages\":[" + + " {\"role\":\"user\",\"content\":\"" + + "a".repeat(400) // 400 chars ≈ 100 tokens + + "\"}" + + "]}"; + + private Handler mockNextHandler; + + @BeforeMethod + public void setUp() { + mockNextHandler = mock(Handler.class); + Response upstreamResponse = new Response(Status.OK); + upstreamResponse.setEntity("{\"choices\":[]}"); + when(mockNextHandler.handle(any(), any())) + .thenReturn(Promises.newResultPromise(upstreamResponse)); + } + + @Test + public void testValidRequest() throws Exception { + + LLMProxyFilter filter = buildFilter(LLMProvider.OPENAI, false); + Request request = chatRequest(); + Response response = filter.filter(new RootContext(), request, mockNextHandler).get(); + + verify(mockNextHandler, times(1)).handle(any(), any()); + assertThat(response.getStatus()).isEqualTo(Status.OK); + } + + @Test + public void shouldAddHeadersToResponse() throws Exception { + LLMProxyFilter filter = buildFilter(LLMProvider.OPENAI, true); + Response response = filter.filter(contextWithSubject("alice"), chatRequest(), mockNextHandler).get(); + + assertThat(response.getHeaders().getFirst(LLMProxyFilter.HEADER_LLM_PROVIDER)) + .isEqualTo("OPENAI"); + + assertThat(response.getHeaders().getFirst(LLMProxyFilter.HEADER_LLM_IDENTITY)) + .isEqualTo("alice"); + + assertThat(response.getHeaders().getFirst(LLMProxyFilter.HEADER_RATE_LIMIT_REMAINING)) + .isNotNull(); + } + + @Test + public void shouldNotAddRateLimitHeaderWhenRateLimitDisabled() throws Exception { + LLMProxyFilter filter = buildFilter(LLMProvider.OPENAI, false); + Response response = filter.filter(new RootContext(), chatRequest(), mockNextHandler).get(); + + assertThat(response.getHeaders().getFirst(LLMProxyFilter.HEADER_RATE_LIMIT_REMAINING)) + .isNull(); + } + + @Test + public void shouldRewriteUriToProviderBaseUrl() throws Exception { + LLMProxyFilter filter = buildFilter(LLMProvider.OPENAI, false); + Request request = chatRequest(); + request.setUri(new URI("http://openig.local/v1/chat/completions")); + + filter.filter(new RootContext(), request, mockNextHandler).get(); + + verify(mockNextHandler).handle(any(), argThat(req -> + req.getUri().toString().startsWith("https://api.openai.com/v1"))); + } + + @Test + public void shouldInjectBearerTokenForOpenAI() throws Exception { + LLMProxyFilter filter = buildFilter(LLMProvider.OPENAI, false); + Request request = chatRequest(); + request.getHeaders().put("Authorization", "Bearer dummy-key"); + + filter.filter(new RootContext(), request, mockNextHandler).get(); + + verify(mockNextHandler).handle(any(), argThat(req -> { + String auth = req.getHeaders().getFirst("Authorization"); + return auth != null && auth.equals("Bearer test-api-key"); + })); + } + + @Test + public void shouldInjectXApiKeyHeaderForAnthropic() throws Exception { + LLMProxyFilter filter = buildFilter(LLMProvider.ANTHROPIC, false); + + filter.filter(new RootContext(), chatRequest(), mockNextHandler).get(); + + verify(mockNextHandler).handle(any(), argThat(req -> { + String xApiKey = req.getHeaders().getFirst("x-api-key"); + return "test-api-key".equals(xApiKey); + })); + } + @Test + public void shouldUseCustomBaseUrlOverProviderDefault() throws Exception { + LLMProxyFilter filter = new LLMProxyFilter( + LLMProvider.OPENAI_COMPATIBLE, "http://localhost:11434/v1", + "ollama-key", json("${attributes.sub}").as(expression(String.class)), false, null); + + Request request = chatRequest(); + request.setUri(new URI("http://openig.local/v1/chat/completions")); + + filter.filter(new RootContext(), request, mockNextHandler).get(); + + verify(mockNextHandler).handle(any(), argThat(req -> + req.getUri().toString().startsWith("http://localhost:11434/v1"))); + } + + + @Test + public void shouldReturn429WhenTokenBucketExhausted() throws Exception { + // Tiny bucket – 50 tokens, slow refill + TokenRateLimiter limiter = new TokenRateLimiter(50, duration("1 hour")); + LLMProxyFilter filter = new LLMProxyFilter( + LLMProvider.OPENAI, "https://api.openai.com/v1", "key", + json("${attributes.sub}").as(expression(String.class)), true, limiter); + + // Drain the bucket with a big request (LONG_CHAT_BODY ≈ 100 tokens) + Request bigRequest = new Request(); + bigRequest.setMethod("POST"); + bigRequest.setUri(new URI("http://openig.local/v1/chat/completions")); + bigRequest.setEntity(LONG_CHAT_BODY); + + Response response = filter.filter(new RootContext(), bigRequest, mockNextHandler).get(); + + assertThat(response.getStatus()).isEqualTo(Status.TOO_MANY_REQUESTS); + assertThat(response.getHeaders().getFirst("Retry-After")).isNotNull(); + + verify(mockNextHandler, never()).handle(any(), any()); + } + + @Test + public void shouldAllowRequestForDifferentIdentityWhenOneIsExhausted() throws Exception { + TokenRateLimiter limiter = new TokenRateLimiter(500, duration("1 hour")); + LLMProxyFilter filter = new LLMProxyFilter(LLMProvider.OPENAI, + LLMProvider.OPENAI.getDefaultBaseUrl(), + "test-api-key", + json("${attributes.sub}").as(expression(String.class)), + true, + limiter); + + // Drain alice's bucket + limiter.tryConsume("alice", 500); + + // Bob's request should still go through + Response response = filter.filter(contextWithSubject("bob"), chatRequest(), mockNextHandler).get(); + assertThat(response.getStatus()).isEqualTo(Status.OK); + } + + private LLMProxyFilter buildFilter(LLMProvider provider, + boolean rateLimitEnabled) { + TokenRateLimiter limiter = rateLimitEnabled + ? new TokenRateLimiter(10000, duration("1 minute")) + : null; + return new LLMProxyFilter(provider, + provider.getDefaultBaseUrl(), + "test-api-key", + json("${attributes.sub}").as(expression(String.class)), + rateLimitEnabled, + limiter); + } + + private Request chatRequest() throws Exception { + Request request = new Request(); + request.setMethod("POST"); + request.setUri(new URI("http://openig.local/v1/chat/completions")); + request.getHeaders().put("Content-Type", "application/json"); + request.setEntity(CHAT_BODY); + return request; + } + + private AttributesContext contextWithSubject(String sub) { + AttributesContext ctx = new AttributesContext(new RootContext()); + ctx.getAttributes().put("sub", sub); + return ctx; + } +} \ No newline at end of file diff --git a/openig-core/src/test/java/org/openidentityplatform/openig/filter/MCPServerFeaturesFilterTest.java b/openig-ai/src/test/java/org/openidentityplatform/openig/ai/filter/MCPServerFeaturesFilterTest.java similarity index 88% rename from openig-core/src/test/java/org/openidentityplatform/openig/filter/MCPServerFeaturesFilterTest.java rename to openig-ai/src/test/java/org/openidentityplatform/openig/ai/filter/MCPServerFeaturesFilterTest.java index 10fe67613..cb2c6801a 100644 --- a/openig-core/src/test/java/org/openidentityplatform/openig/filter/MCPServerFeaturesFilterTest.java +++ b/openig-ai/src/test/java/org/openidentityplatform/openig/ai/filter/MCPServerFeaturesFilterTest.java @@ -14,22 +14,25 @@ * Copyright 2026 3A Systems LLC. */ -package org.openidentityplatform.openig.filter; +package org.openidentityplatform.openig.ai.filter; import org.forgerock.http.Handler; import org.forgerock.http.protocol.Request; import org.forgerock.http.protocol.Response; import org.forgerock.http.protocol.Status; +import org.forgerock.http.routing.Router; import org.forgerock.json.JsonValue; +import org.forgerock.openig.decoration.baseuri.BaseUriDecorator; import org.forgerock.openig.heap.HeapException; import org.forgerock.openig.heap.HeapImpl; -import org.forgerock.openig.heap.HeapUtilsTest; import org.forgerock.openig.heap.Name; +import org.forgerock.openig.http.EndpointRegistry; import org.forgerock.services.context.Context; import org.forgerock.services.context.RootContext; import org.forgerock.util.promise.NeverThrowsException; import org.forgerock.util.promise.Promise; import org.forgerock.util.promise.Promises; +import org.forgerock.util.time.TimeService; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.mockito.stubbing.Answer; @@ -41,10 +44,15 @@ import java.util.stream.Collectors; import static org.assertj.core.api.Assertions.assertThat; +import static org.forgerock.http.io.IO.newTemporaryStorage; import static org.forgerock.json.JsonValue.array; import static org.forgerock.json.JsonValue.field; import static org.forgerock.json.JsonValue.json; import static org.forgerock.json.JsonValue.object; +import static org.forgerock.openig.heap.Keys.BASEURI_HEAP_KEY; +import static org.forgerock.openig.heap.Keys.ENDPOINT_REGISTRY_HEAP_KEY; +import static org.forgerock.openig.heap.Keys.TEMPORARY_STORAGE_HEAP_KEY; +import static org.forgerock.openig.heap.Keys.TIME_SERVICE_HEAP_KEY; import static org.forgerock.openig.util.JsonValues.readJson; import static org.mockito.Mockito.when; import static org.testng.Assert.assertTrue; @@ -60,11 +68,17 @@ public class MCPServerFeaturesFilterTest { private AutoCloseable closeable; @BeforeMethod - public void setUp() throws Exception { + public void setUp() { closeable = MockitoAnnotations.openMocks(this); - heap = HeapUtilsTest.buildDefaultHeap(); + + heap = new HeapImpl(Name.of("default")); + heap.put(TEMPORARY_STORAGE_HEAP_KEY, newTemporaryStorage()); + heap.put(BASEURI_HEAP_KEY, new BaseUriDecorator(BASEURI_HEAP_KEY)); + heap.put(ENDPOINT_REGISTRY_HEAP_KEY, new EndpointRegistry(new Router(), "/")); + heap.put(TIME_SERVICE_HEAP_KEY, TimeService.SYSTEM); + config = json(object()); config.put("allow", object( diff --git a/openig-ai/src/test/java/org/openidentityplatform/openig/ai/filter/TokenRateLimiterTest.java b/openig-ai/src/test/java/org/openidentityplatform/openig/ai/filter/TokenRateLimiterTest.java new file mode 100644 index 000000000..94cfd2448 --- /dev/null +++ b/openig-ai/src/test/java/org/openidentityplatform/openig/ai/filter/TokenRateLimiterTest.java @@ -0,0 +1,124 @@ +/* + * The contents of this file are subject to the terms of the Common Development and + * Distribution License (the License). You may not use this file except in compliance with the + * License. + * + * You can obtain a copy of the License at legal/CDDLv1.0.txt. See the License for the + * specific language governing permission and limitations under the License. + * + * When distributing Covered Software, include this CDDL Header Notice in each file and include + * the License file at legal/CDDLv1.0.txt. If applicable, add the following below the CDDL + * Header, with the fields enclosed by brackets [] replaced by your own identifying + * information: "Portions copyright [year] [name of copyright owner]". + * + * Copyright 2026 3A Systems LLC. + */ + +package org.openidentityplatform.openig.ai.filter; + +import com.google.common.testing.FakeTicker; +import org.testng.annotations.Test; + +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.assertj.core.api.Assertions.assertThat; +import static org.forgerock.util.time.Duration.duration; + +public class TokenRateLimiterTest { + + private static final String USER_A = "alice"; + private static final String USER_B = "bob"; + + @Test + public void shouldReturnZeroWhenAllowed() throws Exception { + TokenRateLimiter limiter = new TokenRateLimiter(10_000, duration("1 minute")); + long result = limiter.tryConsume(USER_A, 500); + assertThat(result).isEqualTo(0L); + } + + @Test + public void shouldReturnPositiveNanosWhenDenied() throws Exception { + TokenRateLimiter limiter = new TokenRateLimiter(100, duration("1 minute")); + // Cost exceeds full capacity + long result = limiter.tryConsume(USER_A, 200); + assertThat(result).isGreaterThan(0L); + } + + @Test + public void shouldReturnZeroForExactCapacityCost() throws Exception { + TokenRateLimiter limiter = new TokenRateLimiter(1_000, duration("1 minute")); + long result = limiter.tryConsume(USER_A, 1_000); + assertThat(result).isEqualTo(0L); + } + + @Test + public void shouldDenySubsequentRequestWhenBucketExhausted() throws Exception { + TokenRateLimiter limiter = new TokenRateLimiter(1_000, duration("1 minute")); + limiter.tryConsume(USER_A, 900); // 100 left + long result = limiter.tryConsume(USER_A, 200); // needs 200 + assertThat(result).isGreaterThan(0L); + } + + @Test + public void shouldTrackBucketsPerIdentityIndependently() throws Exception { + TokenRateLimiter limiter = new TokenRateLimiter(500, duration("1 minute")); + + assertThat(limiter.tryConsume(USER_A, 500)).isEqualTo(0L); + assertThat(limiter.tryConsume(USER_A, 1)).isGreaterThan(0L); + // USER_B has a full bucket + assertThat(limiter.tryConsume(USER_B, 500)).isEqualTo(0L); + } + + @Test + public void shouldRefillTokensAfterElapsedTime() throws Exception { + FakeTicker ticker = new FakeTicker(); + + TokenRateLimiter limiter = new TokenRateLimiter(1_000, duration("1 minute"), ticker::read, null); + limiter.tryConsume(USER_A, 970); + ticker.advance(3, TimeUnit.SECONDS); + long result = limiter.tryConsume(USER_A, 60); + assertThat(result).isEqualTo(0L); + } + + @Test + public void shouldNotExceedCapacityDuringRefill() { + FakeTicker ticker = new FakeTicker(); + TokenRateLimiter limiter = new TokenRateLimiter(1_000, duration("1 second"), ticker::read, null); + + ticker.advance(10, TimeUnit.MINUTES); + + assertThat(limiter.availableTokens(USER_A)).isEqualTo(1_000); + } + + @Test + public void shouldHandleConcurrentConsumersWithoutExceedingCapacity() throws InterruptedException { + final int THREADS = 100; + final long CAPACITY = 1_000; + TokenRateLimiter limiter = new TokenRateLimiter(CAPACITY, duration("1 minute")); + + AtomicInteger allowed = new AtomicInteger(0); + Thread[] threads = new Thread[THREADS]; + for (int i = 0; i < THREADS; i++) { + threads[i] = new Thread(() -> { + try { + if (limiter.tryConsume(USER_A, 100) == 0L) { + allowed.incrementAndGet(); + } + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } + for (Thread t : threads) { + t.start(); + } + for (Thread t : threads) { + t.join(); + } + + assertThat(allowed.get()).isLessThanOrEqualTo((int) (CAPACITY / 100)); + } + + +} diff --git a/openig-core/src/test/resources/org/openidentityplatrform/openig/mcp/allowed-tool-call-req.json b/openig-ai/src/test/resources/org/openidentityplatrform/openig/mcp/allowed-tool-call-req.json similarity index 100% rename from openig-core/src/test/resources/org/openidentityplatrform/openig/mcp/allowed-tool-call-req.json rename to openig-ai/src/test/resources/org/openidentityplatrform/openig/mcp/allowed-tool-call-req.json diff --git a/openig-core/src/test/resources/org/openidentityplatrform/openig/mcp/allowed-tool-call-resp.json b/openig-ai/src/test/resources/org/openidentityplatrform/openig/mcp/allowed-tool-call-resp.json similarity index 100% rename from openig-core/src/test/resources/org/openidentityplatrform/openig/mcp/allowed-tool-call-resp.json rename to openig-ai/src/test/resources/org/openidentityplatrform/openig/mcp/allowed-tool-call-resp.json diff --git a/openig-core/src/test/resources/org/openidentityplatrform/openig/mcp/disallowed-tool-call-req.json b/openig-ai/src/test/resources/org/openidentityplatrform/openig/mcp/disallowed-tool-call-req.json similarity index 100% rename from openig-core/src/test/resources/org/openidentityplatrform/openig/mcp/disallowed-tool-call-req.json rename to openig-ai/src/test/resources/org/openidentityplatrform/openig/mcp/disallowed-tool-call-req.json diff --git a/openig-core/src/test/resources/org/openidentityplatrform/openig/mcp/tools-list-req.json b/openig-ai/src/test/resources/org/openidentityplatrform/openig/mcp/tools-list-req.json similarity index 100% rename from openig-core/src/test/resources/org/openidentityplatrform/openig/mcp/tools-list-req.json rename to openig-ai/src/test/resources/org/openidentityplatrform/openig/mcp/tools-list-req.json diff --git a/openig-core/src/test/resources/org/openidentityplatrform/openig/mcp/tools-list-resp.json b/openig-ai/src/test/resources/org/openidentityplatrform/openig/mcp/tools-list-resp.json similarity index 100% rename from openig-core/src/test/resources/org/openidentityplatrform/openig/mcp/tools-list-resp.json rename to openig-ai/src/test/resources/org/openidentityplatrform/openig/mcp/tools-list-resp.json diff --git a/openig-core/src/main/java/org/forgerock/openig/alias/CoreClassAliasResolver.java b/openig-core/src/main/java/org/forgerock/openig/alias/CoreClassAliasResolver.java index 4d6a6e75c..2ad37dc94 100644 --- a/openig-core/src/main/java/org/forgerock/openig/alias/CoreClassAliasResolver.java +++ b/openig-core/src/main/java/org/forgerock/openig/alias/CoreClassAliasResolver.java @@ -63,7 +63,6 @@ import org.forgerock.openig.thread.ScheduledExecutorServiceHeaplet; import org.openidentityplatform.openig.filter.ICAPFilter; import org.openidentityplatform.openig.filter.JwtBuilderFilter; -import org.openidentityplatform.openig.filter.MCPServerFeaturesFilter; import org.openidentityplatform.openig.mq.EmbeddedKafka; import org.openidentityplatform.openig.mq.MQ_IBM; import org.openidentityplatform.openig.mq.MQ_Kafka; @@ -125,9 +124,6 @@ public class CoreClassAliasResolver implements ClassAliasResolver { ALIASES.put("MQ_IBM", MQ_IBM.class); ALIASES.put("ICAP", ICAPFilter.class); - //AI features - ALIASES.put("MCPServerFeaturesFilter", MCPServerFeaturesFilter.class); - //Secrets ALIASES.put("SystemAndEnvSecretStore", SystemAndEnvSecretStore.class); } diff --git a/openig-doc/src/main/asciidoc/reference/filters-conf.adoc b/openig-doc/src/main/asciidoc/reference/filters-conf.adoc index 3e349e5ed..e7b1c25ac 100644 --- a/openig-doc/src/main/asciidoc/reference/filters-conf.adoc +++ b/openig-doc/src/main/asciidoc/reference/filters-conf.adoc @@ -863,6 +863,108 @@ Default: `true` ==== Javadoc link:{apidocs-url}/index.html?org/forgerock/openig/filter/HttpBasicAuthFilter.html[org.forgerock.openig.filter.HttpBasicAuthFilter, window=\_blank] +''' +[#LLMProxyFilter] +=== LLMProxyFilter — controls token usage per user + +==== Description + +* Provider normalization – rewrites the upstream URI and injects the +correct authentication header for the configured provider. +* Token-based rate limiting – estimates the prompt-token cost of each +request and enforces per-identity limits. +Returns `429 Too Many Requests` with an exact `Retry-After` header when the bucket is exhausted. + +==== Usage + +[source, javascript] +---- +{ + "type": "LLMProxyFilter", + "config": { + "provider" : string, expression, required, + "baseUrl" : string, expression, optional, + "apiKey" : string, expression, required, + "sub" : string, optional, expression, default "anonymous" + "rateLimitEnabled" : boolean, optional, default true + "rate": { + "numberOfTokens" : optional, expression, number, + "duration" : duration, + "cleaningInterval" : duration, expression, optional + } + } +} +---- + +==== Properties + +`"provider"`: __expression, required__:: +LLM Provider, allowed values are: `OPENAI`, `ANTHROPIC`, `MISTRAL`, `AZURE_OPENAI`, +`OPENAI_COMPATIBLE`. + +`"baseUrl"`: __expression, optional__:: +Custom URL for an LLM provider. + +`"apiKey"`: __expression, required__:: +API-key for an LLM provider, if using Ollama, set any non-empty value. + +`"sub"`: __expression, optional__:: +Identity subject to restrict token usage. ++ +Default `anonymous`. + +`"rateLimitEnabled"`: __expression, optional__:: +Enables token usage control. ++ +Default: true + +`"rate"`: __object, optional__:: +[open] +==== +Rate limit settings: + +`"numberOfTokens"`: __number, optional:: +Amount of tokens per time window. ++ +Default: 10000 + +`"duration"`: __expression, optional:: +Token usage duration (time window). ++ +Default: 1 minute + +`"cleaningInterval"`: __expression, optional:: +Token bucket eviction period ++ +Default: 5 minutes +==== + +==== Example + +[source, json] +---- +{ + "name": "OllamaProxy", + "type": "LLMProxyFilter", + "config": { + "provider" : "OPENAI_COMPATIBLE", + "baseUrl" : "http://ollama.local:11434/v1/chat/completions", + "apiKey" : "test-api-key", + "sub" : "${attributes.sub}", + "rateLimitEnabled" : true, + "rate": { + "numberOfTokens" : 10000, + "duration" : "1 minute", + "cleaningInterval" : "5 minutes" + } + } +} +---- + +==== Javadoc +link:{apidocs-url}/index.html/org/openidentityplatform/openig/ai/filter/LLMProxyFilter.java.html[org.openidentityplatform.openig.ai.filter.LLMProxyFilter, window=\_blank] + + ''' [#LocationHeaderFilter] === LocationHeaderFilter — rewrites Location headers @@ -1012,7 +1114,7 @@ Default: Empty object. ---- ==== Javadoc -link:{apidocs-url}/index.html?org/openidentityplatform/openig/filter/MCPServerFeaturesFilter[org.openidentityplatform.openig.filter.MCPServerFeaturesFilter, window=\_blank] +link:{apidocs-url}/org/openidentityplatform/openig/ai/filter/MCPServerFeaturesFilter[org.openidentityplatform.openig.ai.filter.MCPServerFeaturesFilter, window=\_blank] ''' diff --git a/openig-war/pom.xml b/openig-war/pom.xml index 6624823cd..b0e27b0b3 100644 --- a/openig-war/pom.xml +++ b/openig-war/pom.xml @@ -71,6 +71,11 @@ openig-openam ${project.version} + + org.openidentityplatform.openig + openig-ai + ${project.version} + org.openidentityplatform.commons.audit handler-csv diff --git a/pom.xml b/pom.xml index d579046d4..c2c767ab1 100644 --- a/pom.xml +++ b/pom.xml @@ -148,6 +148,7 @@ openig-openam openig-ui openig-docker + openig-ai