feat(hook): add context window monitor for Anthropic token usage tracking

This commit is contained in:
YeonGyu-Kim
2025-12-03 13:28:41 +09:00
parent 9054973f23
commit 9a11901590
3 changed files with 110 additions and 2 deletions

View File

@@ -0,0 +1,101 @@
import type { PluginInput } from "@opencode-ai/plugin"
const ANTHROPIC_CONTEXT_LIMIT = 1_000_000
const CONTEXT_WARNING_THRESHOLD = 0.75
const CONTEXT_REMINDER = `[SYSTEM REMINDER - 1M Context Window]
You are using Anthropic Claude with 1M context window.
Current usage has exceeded 75%.
RECOMMENDATIONS:
- Consider compacting the session if available
- Break complex tasks into smaller, focused sessions
- Be concise in your responses
- Avoid redundant file reads
You have access to 1M tokens - use them wisely. Do NOT rush or skip tasks.
Complete your work thoroughly despite the context usage warning.`
interface AssistantMessageInfo {
role: "assistant"
providerID: string
tokens: {
input: number
output: number
reasoning: number
cache: { read: number; write: number }
}
}
interface MessageWrapper {
info: { role: string } & Partial<AssistantMessageInfo>
}
export function createContextWindowMonitorHook(ctx: PluginInput) {
const remindedSessions = new Set<string>()
const toolExecuteAfter = async (
input: { tool: string; sessionID: string; callID: string },
output: { title: string; output: string; metadata: unknown }
) => {
const { sessionID } = input
if (remindedSessions.has(sessionID)) return
try {
const response = await ctx.client.session.messages({
path: { id: sessionID },
})
const messages = (response.data ?? response) as MessageWrapper[]
const assistantMessages = messages
.filter((m) => m.info.role === "assistant")
.map((m) => m.info as AssistantMessageInfo)
if (assistantMessages.length === 0) return
const lastAssistant = assistantMessages[assistantMessages.length - 1]
if (lastAssistant.providerID !== "anthropic") return
const totalInputTokens = assistantMessages.reduce((sum, m) => {
const inputTokens = m.tokens?.input ?? 0
const cacheReadTokens = m.tokens?.cache?.read ?? 0
return sum + inputTokens + cacheReadTokens
}, 0)
const usagePercentage = totalInputTokens / ANTHROPIC_CONTEXT_LIMIT
if (usagePercentage < CONTEXT_WARNING_THRESHOLD) return
remindedSessions.add(sessionID)
const usedPct = (usagePercentage * 100).toFixed(1)
const remainingPct = ((1 - usagePercentage) * 100).toFixed(1)
const usedTokens = totalInputTokens.toLocaleString()
const limitTokens = ANTHROPIC_CONTEXT_LIMIT.toLocaleString()
output.output += `\n\n${CONTEXT_REMINDER}
[Context Status: ${usedPct}% used (${usedTokens}/${limitTokens} tokens), ${remainingPct}% remaining]`
} catch {
// Graceful degradation - do not disrupt tool execution
}
}
const eventHandler = async ({ event }: { event: { type: string; properties?: unknown } }) => {
const props = event.properties as Record<string, unknown> | undefined
if (event.type === "session.deleted") {
const sessionInfo = props?.info as { id?: string } | undefined
if (sessionInfo?.id) {
remindedSessions.delete(sessionInfo.id)
}
}
}
return {
"tool.execute.after": toolExecuteAfter,
event: eventHandler,
}
}

View File

@@ -1 +1,2 @@
export * from "./todo-continuation-enforcer"
export * from "./context-window-monitor"

View File

@@ -1,9 +1,10 @@
import type { Plugin } from "@opencode-ai/plugin"
import { builtinAgents } from "./agents"
import { createTodoContinuationEnforcer } from "./hooks"
import { createTodoContinuationEnforcer, createContextWindowMonitorHook } from "./hooks"
const OhMyOpenCodePlugin: Plugin = async (ctx) => {
const todoContinuationEnforcer = createTodoContinuationEnforcer(ctx)
const contextWindowMonitor = createContextWindowMonitorHook(ctx)
return {
config: async (config) => {
@@ -13,7 +14,12 @@ const OhMyOpenCodePlugin: Plugin = async (ctx) => {
}
},
event: todoContinuationEnforcer,
event: async (input) => {
await todoContinuationEnforcer(input)
await contextWindowMonitor.event(input)
},
"tool.execute.after": contextWindowMonitor["tool.execute.after"],
}
}