set promptCacheKey for openai compatible providers (#4203)

Co-authored-by: GitHub Action <action@github.com>
This commit is contained in:
Shantur Rathore
2025-11-14 17:41:01 +00:00
committed by GitHub
parent 5fc26c958a
commit 986c60353e
2 changed files with 9 additions and 4 deletions

View File

@@ -266,7 +266,7 @@ export namespace SessionPrompt {
: undefined,
topP: agent.topP ?? ProviderTransform.topP(model.providerID, model.modelID),
options: {
...ProviderTransform.options(model.providerID, model.modelID, input.sessionID),
...ProviderTransform.options(model.providerID, model.modelID, model.npm ?? "", input.sessionID),
...model.info.options,
...agent.options,
},
@@ -1819,7 +1819,7 @@ export namespace SessionPrompt {
const small =
(await Provider.getSmallModel(input.providerID)) ?? (await Provider.getModel(input.providerID, input.modelID))
const options = {
...ProviderTransform.options(small.providerID, small.modelID, input.session.id),
...ProviderTransform.options(small.providerID, small.modelID, small.npm ?? "", input.session.id),
...small.info.options,
}
if (small.providerID === "openai" || small.modelID.includes("gpt-5")) {