diff --git a/src/hooks/anthropic-auto-compact/executor.ts b/src/hooks/anthropic-auto-compact/executor.ts new file mode 100644 index 0000000..eb97d97 --- /dev/null +++ b/src/hooks/anthropic-auto-compact/executor.ts @@ -0,0 +1,74 @@ +import type { AutoCompactState } from "./types" + +type Client = { + session: { + messages: (opts: { path: { id: string }; query?: { directory?: string } }) => Promise + summarize: (opts: { + path: { id: string } + body: { providerID: string; modelID: string } + query: { directory: string } + }) => Promise + } + tui: { + submitPrompt: (opts: { query: { directory: string } }) => Promise + } +} + +export async function getLastAssistant( + sessionID: string, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + client: any, + directory: string +): Promise | null> { + try { + const resp = await (client as Client).session.messages({ + path: { id: sessionID }, + query: { directory }, + }) + + const data = (resp as { data?: unknown[] }).data + if (!Array.isArray(data)) return null + + const reversed = [...data].reverse() + const last = reversed.find((m) => { + const msg = m as Record + const info = msg.info as Record | undefined + return info?.role === "assistant" + }) + if (!last) return null + return (last as { info?: Record }).info ?? null + } catch { + return null + } +} + +export async function executeCompact( + sessionID: string, + msg: Record, + autoCompactState: AutoCompactState, + // eslint-disable-next-line @typescript-eslint/no-explicit-any + client: any, + directory: string +): Promise { + try { + const providerID = msg.providerID as string | undefined + const modelID = msg.modelID as string | undefined + + if (providerID && modelID) { + await (client as Client).session.summarize({ + path: { id: sessionID }, + body: { providerID, modelID }, + query: { directory }, + }) + + setTimeout(async () => { + try { + await (client as Client).tui.submitPrompt({ query: { directory } }) + } catch {} + }, 500) + } + + autoCompactState.pendingCompact.delete(sessionID) + autoCompactState.errorDataBySession.delete(sessionID) + } catch {} +} diff --git a/src/hooks/anthropic-auto-compact/index.ts b/src/hooks/anthropic-auto-compact/index.ts new file mode 100644 index 0000000..71f98d5 --- /dev/null +++ b/src/hooks/anthropic-auto-compact/index.ts @@ -0,0 +1,123 @@ +import type { PluginInput } from "@opencode-ai/plugin" +import type { AutoCompactState, ParsedTokenLimitError } from "./types" +import { parseAnthropicTokenLimitError } from "./parser" +import { executeCompact, getLastAssistant } from "./executor" + +function createAutoCompactState(): AutoCompactState { + return { + pendingCompact: new Set(), + errorDataBySession: new Map(), + } +} + +export function createAnthropicAutoCompactHook(ctx: PluginInput) { + const autoCompactState = createAutoCompactState() + + const eventHandler = async ({ event }: { event: { type: string; properties?: unknown } }) => { + const props = event.properties as Record | undefined + + if (event.type === "session.deleted") { + const sessionInfo = props?.info as { id?: string } | undefined + if (sessionInfo?.id) { + autoCompactState.pendingCompact.delete(sessionInfo.id) + autoCompactState.errorDataBySession.delete(sessionInfo.id) + } + return + } + + if (event.type === "session.error") { + const sessionID = props?.sessionID as string | undefined + if (!sessionID) return + + const parsed = parseAnthropicTokenLimitError(props?.error) + if (parsed) { + autoCompactState.pendingCompact.add(sessionID) + autoCompactState.errorDataBySession.set(sessionID, parsed) + } + return + } + + if (event.type === "message.updated") { + const info = props?.info as Record | undefined + const sessionID = info?.sessionID as string | undefined + + if (sessionID && info?.role === "assistant" && info.error) { + const parsed = parseAnthropicTokenLimitError(info.error) + if (parsed) { + parsed.providerID = info.providerID as string | undefined + parsed.modelID = info.modelID as string | undefined + autoCompactState.pendingCompact.add(sessionID) + autoCompactState.errorDataBySession.set(sessionID, parsed) + } + } + return + } + + if (event.type === "session.idle") { + const sessionID = props?.sessionID as string | undefined + if (!sessionID) return + + if (!autoCompactState.pendingCompact.has(sessionID)) return + + const errorData = autoCompactState.errorDataBySession.get(sessionID) + if (errorData?.providerID && errorData?.modelID) { + await ctx.client.tui + .showToast({ + body: { + title: "Auto Compact", + message: "Token limit exceeded. Summarizing session...", + variant: "warning" as const, + duration: 3000, + }, + }) + .catch(() => {}) + + await executeCompact( + sessionID, + { providerID: errorData.providerID, modelID: errorData.modelID }, + autoCompactState, + ctx.client, + ctx.directory + ) + return + } + + const lastAssistant = await getLastAssistant(sessionID, ctx.client, ctx.directory) + if (!lastAssistant) { + autoCompactState.pendingCompact.delete(sessionID) + return + } + + if (lastAssistant.summary === true) { + autoCompactState.pendingCompact.delete(sessionID) + return + } + + if (!lastAssistant.modelID || !lastAssistant.providerID) { + autoCompactState.pendingCompact.delete(sessionID) + return + } + + await ctx.client.tui + .showToast({ + body: { + title: "Auto Compact", + message: "Token limit exceeded. Summarizing session...", + variant: "warning" as const, + duration: 3000, + }, + }) + .catch(() => {}) + + await executeCompact(sessionID, lastAssistant, autoCompactState, ctx.client, ctx.directory) + } + } + + return { + event: eventHandler, + } +} + +export type { AutoCompactState, ParsedTokenLimitError } from "./types" +export { parseAnthropicTokenLimitError } from "./parser" +export { executeCompact, getLastAssistant } from "./executor" diff --git a/src/hooks/anthropic-auto-compact/parser.ts b/src/hooks/anthropic-auto-compact/parser.ts new file mode 100644 index 0000000..68f18e6 --- /dev/null +++ b/src/hooks/anthropic-auto-compact/parser.ts @@ -0,0 +1,154 @@ +import type { ParsedTokenLimitError } from "./types" + +interface AnthropicErrorData { + type: "error" + error: { + type: string + message: string + } + request_id?: string +} + +const TOKEN_LIMIT_PATTERNS = [ + /(\d+)\s*tokens?\s*>\s*(\d+)\s*maximum/i, + /prompt.*?(\d+).*?tokens.*?exceeds.*?(\d+)/i, + /(\d+).*?tokens.*?limit.*?(\d+)/i, + /context.*?length.*?(\d+).*?maximum.*?(\d+)/i, + /max.*?context.*?(\d+).*?but.*?(\d+)/i, +] + +const TOKEN_LIMIT_KEYWORDS = [ + "prompt is too long", + "is too long", + "context_length_exceeded", + "max_tokens", + "token limit", + "context length", + "too many tokens", +] + +function extractTokensFromMessage(message: string): { current: number; max: number } | null { + for (const pattern of TOKEN_LIMIT_PATTERNS) { + const match = message.match(pattern) + if (match) { + const num1 = parseInt(match[1], 10) + const num2 = parseInt(match[2], 10) + return num1 > num2 ? { current: num1, max: num2 } : { current: num2, max: num1 } + } + } + return null +} + +function isTokenLimitError(text: string): boolean { + const lower = text.toLowerCase() + return TOKEN_LIMIT_KEYWORDS.some((kw) => lower.includes(kw.toLowerCase())) +} + +export function parseAnthropicTokenLimitError(err: unknown): ParsedTokenLimitError | null { + if (typeof err === "string") { + if (isTokenLimitError(err)) { + const tokens = extractTokensFromMessage(err) + return { + currentTokens: tokens?.current ?? 0, + maxTokens: tokens?.max ?? 0, + errorType: "token_limit_exceeded_string", + } + } + return null + } + + if (!err || typeof err !== "object") return null + + const errObj = err as Record + + const dataObj = errObj.data as Record | undefined + const responseBody = dataObj?.responseBody + const errorMessage = errObj.message as string | undefined + const errorData = errObj.error as Record | undefined + const nestedError = errorData?.error as Record | undefined + + const textSources: string[] = [] + + if (typeof responseBody === "string") textSources.push(responseBody) + if (typeof errorMessage === "string") textSources.push(errorMessage) + if (typeof errorData?.message === "string") textSources.push(errorData.message as string) + if (typeof errObj.body === "string") textSources.push(errObj.body as string) + if (typeof errObj.details === "string") textSources.push(errObj.details as string) + if (typeof errObj.reason === "string") textSources.push(errObj.reason as string) + if (typeof errObj.description === "string") textSources.push(errObj.description as string) + if (typeof nestedError?.message === "string") textSources.push(nestedError.message as string) + if (typeof dataObj?.message === "string") textSources.push(dataObj.message as string) + if (typeof dataObj?.error === "string") textSources.push(dataObj.error as string) + + if (textSources.length === 0) { + try { + const jsonStr = JSON.stringify(errObj) + if (isTokenLimitError(jsonStr)) { + textSources.push(jsonStr) + } + } catch {} + } + + const combinedText = textSources.join(" ") + if (!isTokenLimitError(combinedText)) return null + + if (typeof responseBody === "string") { + try { + const jsonPatterns = [ + /data:\s*(\{[\s\S]*?\})\s*$/m, + /(\{"type"\s*:\s*"error"[\s\S]*?\})/, + /(\{[\s\S]*?"error"[\s\S]*?\})/, + ] + + for (const pattern of jsonPatterns) { + const dataMatch = responseBody.match(pattern) + if (dataMatch) { + try { + const jsonData: AnthropicErrorData = JSON.parse(dataMatch[1]) + const message = jsonData.error?.message || "" + const tokens = extractTokensFromMessage(message) + + if (tokens) { + return { + currentTokens: tokens.current, + maxTokens: tokens.max, + requestId: jsonData.request_id, + errorType: jsonData.error?.type || "token_limit_exceeded", + } + } + } catch {} + } + } + + const bedrockJson = JSON.parse(responseBody) + if (typeof bedrockJson.message === "string" && isTokenLimitError(bedrockJson.message)) { + return { + currentTokens: 0, + maxTokens: 0, + errorType: "bedrock_input_too_long", + } + } + } catch {} + } + + for (const text of textSources) { + const tokens = extractTokensFromMessage(text) + if (tokens) { + return { + currentTokens: tokens.current, + maxTokens: tokens.max, + errorType: "token_limit_exceeded", + } + } + } + + if (isTokenLimitError(combinedText)) { + return { + currentTokens: 0, + maxTokens: 0, + errorType: "token_limit_exceeded_unknown", + } + } + + return null +} diff --git a/src/hooks/anthropic-auto-compact/types.ts b/src/hooks/anthropic-auto-compact/types.ts new file mode 100644 index 0000000..a15f052 --- /dev/null +++ b/src/hooks/anthropic-auto-compact/types.ts @@ -0,0 +1,13 @@ +export interface ParsedTokenLimitError { + currentTokens: number + maxTokens: number + requestId?: string + errorType: string + providerID?: string + modelID?: string +} + +export interface AutoCompactState { + pendingCompact: Set + errorDataBySession: Map +} diff --git a/src/hooks/index.ts b/src/hooks/index.ts index 0180d8c..f17970d 100644 --- a/src/hooks/index.ts +++ b/src/hooks/index.ts @@ -4,6 +4,6 @@ export { createSessionNotification } from "./session-notification"; export { createSessionRecoveryHook } from "./session-recovery"; export { createCommentCheckerHooks } from "./comment-checker"; export { createGrepOutputTruncatorHook } from "./grep-output-truncator"; -export { createPulseMonitorHook } from "./pulse-monitor"; export { createDirectoryAgentsInjectorHook } from "./directory-agents-injector"; export { createEmptyTaskResponseDetectorHook } from "./empty-task-response-detector"; +export { createAnthropicAutoCompactHook } from "./anthropic-auto-compact";