Merge pull request #1910 from code-yeongyu/fix/1753-context-window-hardcoded

fix: use ModelCacheState for context window limit instead of env var (#1753)
This commit is contained in:
YeonGyu-Kim
2026-02-17 10:53:58 +09:00
committed by GitHub
15 changed files with 420 additions and 41 deletions

View File

@@ -3,6 +3,7 @@ import type { HookName, OhMyOpenCodeConfig } from "./config"
import type { LoadedSkill } from "./features/opencode-skill-loader/types"
import type { BackgroundManager } from "./features/background-agent"
import type { PluginContext } from "./plugin/types"
import type { ModelCacheState } from "./plugin-state"
import { createCoreHooks } from "./plugin/hooks/create-core-hooks"
import { createContinuationHooks } from "./plugin/hooks/create-continuation-hooks"
@@ -13,6 +14,7 @@ export type CreatedHooks = ReturnType<typeof createHooks>
export function createHooks(args: {
ctx: PluginContext
pluginConfig: OhMyOpenCodeConfig
modelCacheState: ModelCacheState
backgroundManager: BackgroundManager
isHookEnabled: (hookName: HookName) => boolean
safeHookEnabled: boolean
@@ -22,6 +24,7 @@ export function createHooks(args: {
const {
ctx,
pluginConfig,
modelCacheState,
backgroundManager,
isHookEnabled,
safeHookEnabled,
@@ -32,6 +35,7 @@ export function createHooks(args: {
const core = createCoreHooks({
ctx,
pluginConfig,
modelCacheState,
isHookEnabled,
safeHookEnabled,
})

View File

@@ -1,6 +1,28 @@
import { describe, it, expect, mock, beforeEach } from "bun:test"
/// <reference types="bun-types" />
import { describe, it, expect, mock, beforeEach, afterEach } from "bun:test"
import { createContextWindowMonitorHook } from "./context-window-monitor"
const ANTHROPIC_CONTEXT_ENV_KEY = "ANTHROPIC_1M_CONTEXT"
const VERTEX_CONTEXT_ENV_KEY = "VERTEX_ANTHROPIC_1M_CONTEXT"
const originalAnthropicContextEnv = process.env[ANTHROPIC_CONTEXT_ENV_KEY]
const originalVertexContextEnv = process.env[VERTEX_CONTEXT_ENV_KEY]
function resetContextLimitEnv(): void {
if (originalAnthropicContextEnv === undefined) {
delete process.env[ANTHROPIC_CONTEXT_ENV_KEY]
} else {
process.env[ANTHROPIC_CONTEXT_ENV_KEY] = originalAnthropicContextEnv
}
if (originalVertexContextEnv === undefined) {
delete process.env[VERTEX_CONTEXT_ENV_KEY]
} else {
process.env[VERTEX_CONTEXT_ENV_KEY] = originalVertexContextEnv
}
}
function createMockCtx() {
return {
client: {
@@ -17,6 +39,12 @@ describe("context-window-monitor", () => {
beforeEach(() => {
ctx = createMockCtx()
delete process.env[ANTHROPIC_CONTEXT_ENV_KEY]
delete process.env[VERTEX_CONTEXT_ENV_KEY]
})
afterEach(() => {
resetContextLimitEnv()
})
// #given event caches token info from message.updated
@@ -218,4 +246,81 @@ describe("context-window-monitor", () => {
)
expect(output.output).toBe("test")
})
it("should use 1M limit when model cache flag is enabled", async () => {
//#given
const hook = createContextWindowMonitorHook(ctx as never, {
anthropicContext1MEnabled: true,
})
const sessionID = "ses_1m_flag"
await hook.event({
event: {
type: "message.updated",
properties: {
info: {
role: "assistant",
sessionID,
providerID: "anthropic",
finish: true,
tokens: {
input: 300000,
output: 1000,
reasoning: 0,
cache: { read: 0, write: 0 },
},
},
},
},
})
//#when
const output = { title: "", output: "original", metadata: null }
await hook["tool.execute.after"](
{ tool: "bash", sessionID, callID: "call_1" },
output
)
//#then
expect(output.output).toBe("original")
})
it("should keep env var fallback when model cache flag is disabled", async () => {
//#given
process.env[ANTHROPIC_CONTEXT_ENV_KEY] = "true"
const hook = createContextWindowMonitorHook(ctx as never, {
anthropicContext1MEnabled: false,
})
const sessionID = "ses_env_fallback"
await hook.event({
event: {
type: "message.updated",
properties: {
info: {
role: "assistant",
sessionID,
providerID: "anthropic",
finish: true,
tokens: {
input: 300000,
output: 1000,
reasoning: 0,
cache: { read: 0, write: 0 },
},
},
},
},
})
//#when
const output = { title: "", output: "original", metadata: null }
await hook["tool.execute.after"](
{ tool: "bash", sessionID, callID: "call_1" },
output
)
//#then
expect(output.output).toBe("original")
})
})

View File

@@ -2,13 +2,21 @@ import type { PluginInput } from "@opencode-ai/plugin"
import { createSystemDirective, SystemDirectiveTypes } from "../shared/system-directive"
const ANTHROPIC_DISPLAY_LIMIT = 1_000_000
const ANTHROPIC_ACTUAL_LIMIT =
process.env.ANTHROPIC_1M_CONTEXT === "true" ||
process.env.VERTEX_ANTHROPIC_1M_CONTEXT === "true"
? 1_000_000
: 200_000
const DEFAULT_ANTHROPIC_ACTUAL_LIMIT = 200_000
const CONTEXT_WARNING_THRESHOLD = 0.70
type ModelCacheStateLike = {
anthropicContext1MEnabled: boolean
}
function getAnthropicActualLimit(modelCacheState?: ModelCacheStateLike): number {
return (modelCacheState?.anthropicContext1MEnabled ?? false) ||
process.env.ANTHROPIC_1M_CONTEXT === "true" ||
process.env.VERTEX_ANTHROPIC_1M_CONTEXT === "true"
? 1_000_000
: DEFAULT_ANTHROPIC_ACTUAL_LIMIT
}
const CONTEXT_REMINDER = `${createSystemDirective(SystemDirectiveTypes.CONTEXT_WINDOW_MONITOR)}
You are using Anthropic Claude with 1M context window.
@@ -31,7 +39,10 @@ function isAnthropicProvider(providerID: string): boolean {
return providerID === "anthropic" || providerID === "google-vertex-anthropic"
}
export function createContextWindowMonitorHook(_ctx: PluginInput) {
export function createContextWindowMonitorHook(
_ctx: PluginInput,
modelCacheState?: ModelCacheStateLike,
) {
const remindedSessions = new Set<string>()
const tokenCache = new Map<string, CachedTokenState>()
@@ -51,7 +62,8 @@ export function createContextWindowMonitorHook(_ctx: PluginInput) {
const lastTokens = cached.tokens
const totalInputTokens = (lastTokens?.input ?? 0) + (lastTokens?.cache?.read ?? 0)
const actualUsagePercentage = totalInputTokens / ANTHROPIC_ACTUAL_LIMIT
const actualUsagePercentage =
totalInputTokens / getAnthropicActualLimit(modelCacheState)
if (actualUsagePercentage < CONTEXT_WARNING_THRESHOLD) return

View File

@@ -27,9 +27,12 @@ interface EventInput {
};
}
export function createDirectoryAgentsInjectorHook(ctx: PluginInput) {
export function createDirectoryAgentsInjectorHook(
ctx: PluginInput,
modelCacheState?: { anthropicContext1MEnabled: boolean },
) {
const sessionCaches = new Map<string, Set<string>>();
const truncator = createDynamicTruncator(ctx);
const truncator = createDynamicTruncator(ctx, modelCacheState);
const toolExecuteAfter = async (input: ToolExecuteInput, output: ToolExecuteOutput) => {
const toolName = input.tool.toLowerCase();

View File

@@ -27,9 +27,12 @@ interface EventInput {
};
}
export function createDirectoryReadmeInjectorHook(ctx: PluginInput) {
export function createDirectoryReadmeInjectorHook(
ctx: PluginInput,
modelCacheState?: { anthropicContext1MEnabled: boolean },
) {
const sessionCaches = new Map<string, Set<string>>();
const truncator = createDynamicTruncator(ctx);
const truncator = createDynamicTruncator(ctx, modelCacheState);
const toolExecuteAfter = async (input: ToolExecuteInput, output: ToolExecuteOutput) => {
const toolName = input.tool.toLowerCase();

View File

@@ -1,4 +1,26 @@
import { describe, it, expect, mock, beforeEach } from "bun:test"
/// <reference types="bun-types" />
import { describe, it, expect, mock, beforeEach, afterEach } from "bun:test"
const ANTHROPIC_CONTEXT_ENV_KEY = "ANTHROPIC_1M_CONTEXT"
const VERTEX_CONTEXT_ENV_KEY = "VERTEX_ANTHROPIC_1M_CONTEXT"
const originalAnthropicContextEnv = process.env[ANTHROPIC_CONTEXT_ENV_KEY]
const originalVertexContextEnv = process.env[VERTEX_CONTEXT_ENV_KEY]
function resetContextLimitEnv(): void {
if (originalAnthropicContextEnv === undefined) {
delete process.env[ANTHROPIC_CONTEXT_ENV_KEY]
} else {
process.env[ANTHROPIC_CONTEXT_ENV_KEY] = originalAnthropicContextEnv
}
if (originalVertexContextEnv === undefined) {
delete process.env[VERTEX_CONTEXT_ENV_KEY]
} else {
process.env[VERTEX_CONTEXT_ENV_KEY] = originalVertexContextEnv
}
}
const logMock = mock(() => {})
@@ -29,6 +51,12 @@ describe("preemptive-compaction", () => {
beforeEach(() => {
ctx = createMockCtx()
logMock.mockClear()
delete process.env[ANTHROPIC_CONTEXT_ENV_KEY]
delete process.env[VERTEX_CONTEXT_ENV_KEY]
})
afterEach(() => {
resetContextLimitEnv()
})
// #given event caches token info from message.updated
@@ -238,4 +266,81 @@ describe("preemptive-compaction", () => {
error: String(summarizeError),
})
})
it("should use 1M limit when model cache flag is enabled", async () => {
//#given
const hook = createPreemptiveCompactionHook(ctx as never, {
anthropicContext1MEnabled: true,
})
const sessionID = "ses_1m_flag"
await hook.event({
event: {
type: "message.updated",
properties: {
info: {
role: "assistant",
sessionID,
providerID: "anthropic",
modelID: "claude-sonnet-4-5",
finish: true,
tokens: {
input: 300000,
output: 1000,
reasoning: 0,
cache: { read: 0, write: 0 },
},
},
},
},
})
//#when
await hook["tool.execute.after"](
{ tool: "bash", sessionID, callID: "call_1" },
{ title: "", output: "test", metadata: null }
)
//#then
expect(ctx.client.session.summarize).not.toHaveBeenCalled()
})
it("should keep env var fallback when model cache flag is disabled", async () => {
//#given
process.env[ANTHROPIC_CONTEXT_ENV_KEY] = "true"
const hook = createPreemptiveCompactionHook(ctx as never, {
anthropicContext1MEnabled: false,
})
const sessionID = "ses_env_fallback"
await hook.event({
event: {
type: "message.updated",
properties: {
info: {
role: "assistant",
sessionID,
providerID: "anthropic",
modelID: "claude-sonnet-4-5",
finish: true,
tokens: {
input: 300000,
output: 1000,
reasoning: 0,
cache: { read: 0, write: 0 },
},
},
},
},
})
//#when
await hook["tool.execute.after"](
{ tool: "bash", sessionID, callID: "call_1" },
{ title: "", output: "test", metadata: null }
)
//#then
expect(ctx.client.session.summarize).not.toHaveBeenCalled()
})
})

View File

@@ -2,11 +2,17 @@ import { log } from "../shared/logger"
const DEFAULT_ACTUAL_LIMIT = 200_000
const ANTHROPIC_ACTUAL_LIMIT =
process.env.ANTHROPIC_1M_CONTEXT === "true" ||
process.env.VERTEX_ANTHROPIC_1M_CONTEXT === "true"
type ModelCacheStateLike = {
anthropicContext1MEnabled: boolean
}
function getAnthropicActualLimit(modelCacheState?: ModelCacheStateLike): number {
return (modelCacheState?.anthropicContext1MEnabled ?? false) ||
process.env.ANTHROPIC_1M_CONTEXT === "true" ||
process.env.VERTEX_ANTHROPIC_1M_CONTEXT === "true"
? 1_000_000
: DEFAULT_ACTUAL_LIMIT
}
const PREEMPTIVE_COMPACTION_THRESHOLD = 0.78
@@ -43,7 +49,10 @@ type PluginInput = {
directory: string
}
export function createPreemptiveCompactionHook(ctx: PluginInput) {
export function createPreemptiveCompactionHook(
ctx: PluginInput,
modelCacheState?: ModelCacheStateLike,
) {
const compactionInProgress = new Set<string>()
const compactedSessions = new Set<string>()
const tokenCache = new Map<string, CachedCompactionState>()
@@ -60,7 +69,7 @@ export function createPreemptiveCompactionHook(ctx: PluginInput) {
const actualLimit =
isAnthropicProvider(cached.providerID)
? ANTHROPIC_ACTUAL_LIMIT
? getAnthropicActualLimit(modelCacheState)
: DEFAULT_ACTUAL_LIMIT
const lastTokens = cached.tokens

View File

@@ -29,8 +29,11 @@ interface EventInput {
const TRACKED_TOOLS = ["read", "write", "edit", "multiedit"];
export function createRulesInjectorHook(ctx: PluginInput) {
const truncator = createDynamicTruncator(ctx);
export function createRulesInjectorHook(
ctx: PluginInput,
modelCacheState?: { anthropicContext1MEnabled: boolean },
) {
const truncator = createDynamicTruncator(ctx, modelCacheState);
const { getSessionCache, clearSessionCache } = createSessionCacheStore();
const { processFilePathForInjection } = createRuleInjectionProcessor({
workspaceDirectory: ctx.directory,

View File

@@ -27,11 +27,12 @@ const TOOL_SPECIFIC_MAX_TOKENS: Record<string, number> = {
}
interface ToolOutputTruncatorOptions {
modelCacheState?: { anthropicContext1MEnabled: boolean }
experimental?: ExperimentalConfig
}
export function createToolOutputTruncatorHook(ctx: PluginInput, options?: ToolOutputTruncatorOptions) {
const truncator = createDynamicTruncator(ctx)
const truncator = createDynamicTruncator(ctx, options?.modelCacheState)
const truncateAll = options?.experimental?.truncate_all_tool_outputs ?? false
const toolExecuteAfter = async (

View File

@@ -56,6 +56,7 @@ const OhMyOpenCodePlugin: Plugin = async (ctx) => {
const hooks = createHooks({
ctx,
pluginConfig,
modelCacheState,
backgroundManager: managers.backgroundManager,
isHookEnabled,
safeHookEnabled,

View File

@@ -1,5 +1,6 @@
import type { HookName, OhMyOpenCodeConfig } from "../../config"
import type { PluginContext } from "../types"
import type { ModelCacheState } from "../../plugin-state"
import { createSessionHooks } from "./create-session-hooks"
import { createToolGuardHooks } from "./create-tool-guard-hooks"
@@ -8,14 +9,16 @@ import { createTransformHooks } from "./create-transform-hooks"
export function createCoreHooks(args: {
ctx: PluginContext
pluginConfig: OhMyOpenCodeConfig
modelCacheState: ModelCacheState
isHookEnabled: (hookName: HookName) => boolean
safeHookEnabled: boolean
}) {
const { ctx, pluginConfig, isHookEnabled, safeHookEnabled } = args
const { ctx, pluginConfig, modelCacheState, isHookEnabled, safeHookEnabled } = args
const session = createSessionHooks({
ctx,
pluginConfig,
modelCacheState,
isHookEnabled,
safeHookEnabled,
})
@@ -23,6 +26,7 @@ export function createCoreHooks(args: {
const tool = createToolGuardHooks({
ctx,
pluginConfig,
modelCacheState,
isHookEnabled,
safeHookEnabled,
})

View File

@@ -1,4 +1,5 @@
import type { OhMyOpenCodeConfig, HookName } from "../../config"
import type { ModelCacheState } from "../../plugin-state"
import type { PluginContext } from "../types"
import {
@@ -55,21 +56,24 @@ export type SessionHooks = {
export function createSessionHooks(args: {
ctx: PluginContext
pluginConfig: OhMyOpenCodeConfig
modelCacheState: ModelCacheState
isHookEnabled: (hookName: HookName) => boolean
safeHookEnabled: boolean
}): SessionHooks {
const { ctx, pluginConfig, isHookEnabled, safeHookEnabled } = args
const { ctx, pluginConfig, modelCacheState, isHookEnabled, safeHookEnabled } = args
const safeHook = <T>(hookName: HookName, factory: () => T): T | null =>
safeCreateHook(hookName, factory, { enabled: safeHookEnabled })
const contextWindowMonitor = isHookEnabled("context-window-monitor")
? safeHook("context-window-monitor", () => createContextWindowMonitorHook(ctx))
? safeHook("context-window-monitor", () =>
createContextWindowMonitorHook(ctx, modelCacheState))
: null
const preemptiveCompaction =
isHookEnabled("preemptive-compaction") &&
pluginConfig.experimental?.preemptive_compaction
? safeHook("preemptive-compaction", () => createPreemptiveCompactionHook(ctx))
? safeHook("preemptive-compaction", () =>
createPreemptiveCompactionHook(ctx, modelCacheState))
: null
const sessionRecovery = isHookEnabled("session-recovery")

View File

@@ -1,4 +1,5 @@
import type { HookName, OhMyOpenCodeConfig } from "../../config"
import type { ModelCacheState } from "../../plugin-state"
import type { PluginContext } from "../types"
import {
@@ -35,10 +36,11 @@ export type ToolGuardHooks = {
export function createToolGuardHooks(args: {
ctx: PluginContext
pluginConfig: OhMyOpenCodeConfig
modelCacheState: ModelCacheState
isHookEnabled: (hookName: HookName) => boolean
safeHookEnabled: boolean
}): ToolGuardHooks {
const { ctx, pluginConfig, isHookEnabled, safeHookEnabled } = args
const { ctx, pluginConfig, modelCacheState, isHookEnabled, safeHookEnabled } = args
const safeHook = <T>(hookName: HookName, factory: () => T): T | null =>
safeCreateHook(hookName, factory, { enabled: safeHookEnabled })
@@ -48,7 +50,10 @@ export function createToolGuardHooks(args: {
const toolOutputTruncator = isHookEnabled("tool-output-truncator")
? safeHook("tool-output-truncator", () =>
createToolOutputTruncatorHook(ctx, { experimental: pluginConfig.experimental }))
createToolOutputTruncatorHook(ctx, {
modelCacheState,
experimental: pluginConfig.experimental,
}))
: null
let directoryAgentsInjector: ReturnType<typeof createDirectoryAgentsInjectorHook> | null = null
@@ -62,12 +67,14 @@ export function createToolGuardHooks(args: {
nativeVersion: OPENCODE_NATIVE_AGENTS_INJECTION_VERSION,
})
} else {
directoryAgentsInjector = safeHook("directory-agents-injector", () => createDirectoryAgentsInjectorHook(ctx))
directoryAgentsInjector = safeHook("directory-agents-injector", () =>
createDirectoryAgentsInjectorHook(ctx, modelCacheState))
}
}
const directoryReadmeInjector = isHookEnabled("directory-readme-injector")
? safeHook("directory-readme-injector", () => createDirectoryReadmeInjectorHook(ctx))
? safeHook("directory-readme-injector", () =>
createDirectoryReadmeInjectorHook(ctx, modelCacheState))
: null
const emptyTaskResponseDetector = isHookEnabled("empty-task-response-detector")
@@ -75,7 +82,8 @@ export function createToolGuardHooks(args: {
: null
const rulesInjector = isHookEnabled("rules-injector")
? safeHook("rules-injector", () => createRulesInjectorHook(ctx))
? safeHook("rules-injector", () =>
createRulesInjectorHook(ctx, modelCacheState))
: null
const tasksTodowriteDisabler = isHookEnabled("tasks-todowrite-disabler")

View File

@@ -0,0 +1,102 @@
/// <reference types="bun-types" />
import { describe, expect, it, afterEach } from "bun:test"
import { getContextWindowUsage } from "./dynamic-truncator"
const ANTHROPIC_CONTEXT_ENV_KEY = "ANTHROPIC_1M_CONTEXT"
const VERTEX_CONTEXT_ENV_KEY = "VERTEX_ANTHROPIC_1M_CONTEXT"
const originalAnthropicContextEnv = process.env[ANTHROPIC_CONTEXT_ENV_KEY]
const originalVertexContextEnv = process.env[VERTEX_CONTEXT_ENV_KEY]
function resetContextLimitEnv(): void {
if (originalAnthropicContextEnv === undefined) {
delete process.env[ANTHROPIC_CONTEXT_ENV_KEY]
} else {
process.env[ANTHROPIC_CONTEXT_ENV_KEY] = originalAnthropicContextEnv
}
if (originalVertexContextEnv === undefined) {
delete process.env[VERTEX_CONTEXT_ENV_KEY]
} else {
process.env[VERTEX_CONTEXT_ENV_KEY] = originalVertexContextEnv
}
}
function createContextUsageMockContext(inputTokens: number) {
return {
client: {
session: {
messages: async () => ({
data: [
{
info: {
role: "assistant",
tokens: {
input: inputTokens,
output: 0,
reasoning: 0,
cache: { read: 0, write: 0 },
},
},
},
],
}),
},
},
}
}
describe("getContextWindowUsage", () => {
afterEach(() => {
resetContextLimitEnv()
})
it("uses 1M limit when model cache flag is enabled", async () => {
//#given
delete process.env[ANTHROPIC_CONTEXT_ENV_KEY]
delete process.env[VERTEX_CONTEXT_ENV_KEY]
const ctx = createContextUsageMockContext(300000)
//#when
const usage = await getContextWindowUsage(ctx as never, "ses_1m_flag", {
anthropicContext1MEnabled: true,
})
//#then
expect(usage?.usagePercentage).toBe(0.3)
expect(usage?.remainingTokens).toBe(700000)
})
it("uses 200K limit when model cache flag is disabled and env vars are unset", async () => {
//#given
delete process.env[ANTHROPIC_CONTEXT_ENV_KEY]
delete process.env[VERTEX_CONTEXT_ENV_KEY]
const ctx = createContextUsageMockContext(150000)
//#when
const usage = await getContextWindowUsage(ctx as never, "ses_default", {
anthropicContext1MEnabled: false,
})
//#then
expect(usage?.usagePercentage).toBe(0.75)
expect(usage?.remainingTokens).toBe(50000)
})
it("keeps env var fallback when model cache flag is disabled", async () => {
//#given
process.env[ANTHROPIC_CONTEXT_ENV_KEY] = "true"
const ctx = createContextUsageMockContext(300000)
//#when
const usage = await getContextWindowUsage(ctx as never, "ses_env_fallback", {
anthropicContext1MEnabled: false,
})
//#then
expect(usage?.usagePercentage).toBe(0.3)
expect(usage?.remainingTokens).toBe(700000)
})
})

View File

@@ -1,14 +1,22 @@
import type { PluginInput } from "@opencode-ai/plugin";
import { normalizeSDKResponse } from "./normalize-sdk-response"
const ANTHROPIC_ACTUAL_LIMIT =
process.env.ANTHROPIC_1M_CONTEXT === "true" ||
process.env.VERTEX_ANTHROPIC_1M_CONTEXT === "true"
? 1_000_000
: 200_000;
const DEFAULT_ANTHROPIC_ACTUAL_LIMIT = 200_000;
const CHARS_PER_TOKEN_ESTIMATE = 4;
const DEFAULT_TARGET_MAX_TOKENS = 50_000;
type ModelCacheStateLike = {
anthropicContext1MEnabled: boolean;
}
function getAnthropicActualLimit(modelCacheState?: ModelCacheStateLike): number {
return (modelCacheState?.anthropicContext1MEnabled ?? false) ||
process.env.ANTHROPIC_1M_CONTEXT === "true" ||
process.env.VERTEX_ANTHROPIC_1M_CONTEXT === "true"
? 1_000_000
: DEFAULT_ANTHROPIC_ACTUAL_LIMIT;
}
interface AssistantMessageInfo {
role: "assistant";
tokens: {
@@ -110,6 +118,7 @@ export function truncateToTokenLimit(
export async function getContextWindowUsage(
ctx: PluginInput,
sessionID: string,
modelCacheState?: ModelCacheStateLike,
): Promise<{
usedTokens: number;
remainingTokens: number;
@@ -134,12 +143,13 @@ export async function getContextWindowUsage(
(lastTokens?.input ?? 0) +
(lastTokens?.cache?.read ?? 0) +
(lastTokens?.output ?? 0);
const remainingTokens = ANTHROPIC_ACTUAL_LIMIT - usedTokens;
const anthropicActualLimit = getAnthropicActualLimit(modelCacheState);
const remainingTokens = anthropicActualLimit - usedTokens;
return {
usedTokens,
remainingTokens,
usagePercentage: usedTokens / ANTHROPIC_ACTUAL_LIMIT,
usagePercentage: usedTokens / anthropicActualLimit,
};
} catch {
return null;
@@ -151,6 +161,7 @@ export async function dynamicTruncate(
sessionID: string,
output: string,
options: TruncationOptions = {},
modelCacheState?: ModelCacheStateLike,
): Promise<TruncationResult> {
if (typeof output !== 'string') {
return { result: String(output ?? ''), truncated: false };
@@ -161,7 +172,7 @@ export async function dynamicTruncate(
preserveHeaderLines = 3,
} = options;
const usage = await getContextWindowUsage(ctx, sessionID);
const usage = await getContextWindowUsage(ctx, sessionID, modelCacheState);
if (!usage) {
// Fallback: apply conservative truncation when context usage unavailable
@@ -183,15 +194,19 @@ export async function dynamicTruncate(
return truncateToTokenLimit(output, maxOutputTokens, preserveHeaderLines);
}
export function createDynamicTruncator(ctx: PluginInput) {
export function createDynamicTruncator(
ctx: PluginInput,
modelCacheState?: ModelCacheStateLike,
) {
return {
truncate: (
sessionID: string,
output: string,
options?: TruncationOptions,
) => dynamicTruncate(ctx, sessionID, output, options),
) => dynamicTruncate(ctx, sessionID, output, options, modelCacheState),
getUsage: (sessionID: string) => getContextWindowUsage(ctx, sessionID),
getUsage: (sessionID: string) =>
getContextWindowUsage(ctx, sessionID, modelCacheState),
truncateSync: (
output: string,