Files
oh-my-opencode-free-fork/src/hooks/anthropic-auto-compact/storage.ts
YeonGyu-Kim 8cf713e149 feat(config): add experimental config for gating unstable features (#110)
* feat(anthropic-auto-compact): add aggressive truncation and empty message recovery

Add truncateUntilTargetTokens method, empty content recovery mechanism, and
emptyContentAttemptBySession tracking for robust message handling.

🤖 GENERATED WITH ASSISTANCE OF [OhMyOpenCode](https://github.com/code-yeongyu/oh-my-opencode)

* feat(session-recovery): add auto-resume and recovery callbacks

Implement ResumeConfig, resumeSession() method, and callback support for
enhanced session recovery and resume functionality.

🤖 GENERATED WITH ASSISTANCE OF [OhMyOpenCode](https://github.com/code-yeongyu/oh-my-opencode)

* feat(config): add experimental config schema for gating unstable features

This adds a new 'experimental' config field to the OhMyOpenCode schema that enables fine-grained control over unstable/experimental features:

- aggressive_truncation: Enables aggressive token truncation in anthropic-auto-compact hook for more aggressive token limit handling
- empty_message_recovery: Enables empty message recovery mechanism in anthropic-auto-compact hook for fixing truncation-induced empty message errors
- auto_resume: Enables automatic session resume after recovery in session-recovery hook for seamless recovery experience

The experimental config is optional and all experimental features are disabled by default, ensuring backward compatibility while allowing early adopters to opt-in to cutting-edge features.

🤖 Generated with assistance of [OhMyOpenCode](https://github.com/code-yeongyu/oh-my-opencode)
2025-12-19 02:45:59 +09:00

258 lines
6.7 KiB
TypeScript

import { existsSync, readdirSync, readFileSync, writeFileSync } from "node:fs"
import { homedir } from "node:os"
import { join } from "node:path"
import { xdgData } from "xdg-basedir"
let OPENCODE_STORAGE = join(xdgData ?? "", "opencode", "storage")
// Fix for macOS where xdg-basedir points to ~/Library/Application Support
// but OpenCode (cli) uses ~/.local/share
if (process.platform === "darwin" && !existsSync(OPENCODE_STORAGE)) {
const localShare = join(homedir(), ".local", "share", "opencode", "storage")
if (existsSync(localShare)) {
OPENCODE_STORAGE = localShare
}
}
const MESSAGE_STORAGE = join(OPENCODE_STORAGE, "message")
const PART_STORAGE = join(OPENCODE_STORAGE, "part")
const TRUNCATION_MESSAGE =
"[TOOL RESULT TRUNCATED - Context limit exceeded. Original output was too large and has been truncated to recover the session. Please re-run this tool if you need the full output.]"
interface StoredToolPart {
id: string
sessionID: string
messageID: string
type: "tool"
callID: string
tool: string
state: {
status: "pending" | "running" | "completed" | "error"
input: Record<string, unknown>
output?: string
error?: string
time?: {
start: number
end?: number
compacted?: number
}
}
truncated?: boolean
originalSize?: number
}
export interface ToolResultInfo {
partPath: string
partId: string
messageID: string
toolName: string
outputSize: number
}
function getMessageDir(sessionID: string): string {
if (!existsSync(MESSAGE_STORAGE)) return ""
const directPath = join(MESSAGE_STORAGE, sessionID)
if (existsSync(directPath)) {
return directPath
}
for (const dir of readdirSync(MESSAGE_STORAGE)) {
const sessionPath = join(MESSAGE_STORAGE, dir, sessionID)
if (existsSync(sessionPath)) {
return sessionPath
}
}
return ""
}
function getMessageIds(sessionID: string): string[] {
const messageDir = getMessageDir(sessionID)
if (!messageDir || !existsSync(messageDir)) return []
const messageIds: string[] = []
for (const file of readdirSync(messageDir)) {
if (!file.endsWith(".json")) continue
const messageId = file.replace(".json", "")
messageIds.push(messageId)
}
return messageIds
}
export function findToolResultsBySize(sessionID: string): ToolResultInfo[] {
const messageIds = getMessageIds(sessionID)
const results: ToolResultInfo[] = []
for (const messageID of messageIds) {
const partDir = join(PART_STORAGE, messageID)
if (!existsSync(partDir)) continue
for (const file of readdirSync(partDir)) {
if (!file.endsWith(".json")) continue
try {
const partPath = join(partDir, file)
const content = readFileSync(partPath, "utf-8")
const part = JSON.parse(content) as StoredToolPart
if (part.type === "tool" && part.state?.output && !part.truncated) {
results.push({
partPath,
partId: part.id,
messageID,
toolName: part.tool,
outputSize: part.state.output.length,
})
}
} catch {
continue
}
}
}
return results.sort((a, b) => b.outputSize - a.outputSize)
}
export function findLargestToolResult(sessionID: string): ToolResultInfo | null {
const results = findToolResultsBySize(sessionID)
return results.length > 0 ? results[0] : null
}
export function truncateToolResult(partPath: string): {
success: boolean
toolName?: string
originalSize?: number
} {
try {
const content = readFileSync(partPath, "utf-8")
const part = JSON.parse(content) as StoredToolPart
if (!part.state?.output) {
return { success: false }
}
const originalSize = part.state.output.length
const toolName = part.tool
part.truncated = true
part.originalSize = originalSize
part.state.output = TRUNCATION_MESSAGE
if (!part.state.time) {
part.state.time = { start: Date.now() }
}
part.state.time.compacted = Date.now()
writeFileSync(partPath, JSON.stringify(part, null, 2))
return { success: true, toolName, originalSize }
} catch {
return { success: false }
}
}
export function getTotalToolOutputSize(sessionID: string): number {
const results = findToolResultsBySize(sessionID)
return results.reduce((sum, r) => sum + r.outputSize, 0)
}
export function countTruncatedResults(sessionID: string): number {
const messageIds = getMessageIds(sessionID)
let count = 0
for (const messageID of messageIds) {
const partDir = join(PART_STORAGE, messageID)
if (!existsSync(partDir)) continue
for (const file of readdirSync(partDir)) {
if (!file.endsWith(".json")) continue
try {
const content = readFileSync(join(partDir, file), "utf-8")
const part = JSON.parse(content)
if (part.truncated === true) {
count++
}
} catch {
continue
}
}
}
return count
}
export interface AggressiveTruncateResult {
success: boolean
sufficient: boolean
truncatedCount: number
totalBytesRemoved: number
targetBytesToRemove: number
truncatedTools: Array<{ toolName: string; originalSize: number }>
}
export function truncateUntilTargetTokens(
sessionID: string,
currentTokens: number,
maxTokens: number,
targetRatio: number = 0.8,
charsPerToken: number = 4
): AggressiveTruncateResult {
const targetTokens = Math.floor(maxTokens * targetRatio)
const tokensToReduce = currentTokens - targetTokens
const charsToReduce = tokensToReduce * charsPerToken
if (tokensToReduce <= 0) {
return {
success: true,
sufficient: true,
truncatedCount: 0,
totalBytesRemoved: 0,
targetBytesToRemove: 0,
truncatedTools: [],
}
}
const results = findToolResultsBySize(sessionID)
if (results.length === 0) {
return {
success: false,
sufficient: false,
truncatedCount: 0,
totalBytesRemoved: 0,
targetBytesToRemove: charsToReduce,
truncatedTools: [],
}
}
let totalRemoved = 0
let truncatedCount = 0
const truncatedTools: Array<{ toolName: string; originalSize: number }> = []
for (const result of results) {
const truncateResult = truncateToolResult(result.partPath)
if (truncateResult.success) {
truncatedCount++
const removedSize = truncateResult.originalSize ?? result.outputSize
totalRemoved += removedSize
truncatedTools.push({
toolName: truncateResult.toolName ?? result.toolName,
originalSize: removedSize,
})
}
}
const sufficient = totalRemoved >= charsToReduce
return {
success: truncatedCount > 0,
sufficient,
truncatedCount,
totalBytesRemoved: totalRemoved,
targetBytesToRemove: charsToReduce,
truncatedTools,
}
}