add OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX to override 32k default (#5679)

Co-authored-by: qio <handsomehust@gmail.com>
This commit is contained in:
Qio
2025-12-18 00:35:43 +08:00
committed by GitHub
parent 204e3bf382
commit 1e4bfbcf6f
4 changed files with 5 additions and 2 deletions

View File

@@ -15,7 +15,7 @@ import { Flag } from "@/flag/flag"
export namespace LLM {
const log = Log.create({ service: "llm" })
export const OUTPUT_TOKEN_MAX = 32_000
export const OUTPUT_TOKEN_MAX = Flag.OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX || 32_000
export type StreamInput = {
user: MessageV2.User

View File

@@ -28,6 +28,7 @@ import { LSP } from "../lsp"
import { ReadTool } from "../tool/read"
import { ListTool } from "../tool/ls"
import { FileTime } from "../file/time"
import { Flag } from "../flag/flag"
import { ulid } from "ulid"
import { spawn } from "child_process"
import { Command } from "../command"
@@ -48,7 +49,7 @@ globalThis.AI_SDK_LOG_WARNINGS = false
export namespace SessionPrompt {
const log = Log.create({ service: "session.prompt" })
export const OUTPUT_TOKEN_MAX = 32_000
export const OUTPUT_TOKEN_MAX = Flag.OPENCODE_EXPERIMENTAL_OUTPUT_TOKEN_MAX || 32_000
const state = Instance.state(
() => {