Skip to content

Commit f45afdc

Browse files
authored
Update inference options for GPT-5 (#951)
# why OpenAI decided to change their inference configuration # what changed New GPT-5 family models don't accept temperature as an option, instead they declare this new format: ``` text={ "format": { "type": "text" }, "verbosity": "low" }, reasoning={ "effort": "minimal" }, ``` # test plan
1 parent b769206 commit f45afdc

File tree

3 files changed

+22
-3
lines changed

3 files changed

+22
-3
lines changed

.changeset/bright-candies-cover.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"@browserbasehq/stagehand": patch
3+
---
4+
5+
Patch GPT-5 new api format

lib/inference.ts

Lines changed: 6 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,7 @@ export async function extract({
6969
type MetadataResponse = z.infer<typeof metadataSchema>;
7070

7171
const isUsingAnthropic = llmClient.type === "anthropic";
72+
const isGPT5 = llmClient.modelName.includes("gpt-5"); // TODO: remove this as we update support for gpt-5 configuration options
7273

7374
const extractCallMessages: ChatMessage[] = [
7475
buildExtractSystemPrompt(isUsingAnthropic, userProvidedInstructions),
@@ -100,7 +101,7 @@ export async function extract({
100101
schema,
101102
name: "Extraction",
102103
},
103-
temperature: 0.1,
104+
temperature: isGPT5 ? 1 : 0.1,
104105
top_p: 1,
105106
frequency_penalty: 0,
106107
presence_penalty: 0,
@@ -167,7 +168,7 @@ export async function extract({
167168
name: "Metadata",
168169
schema: metadataSchema,
169170
},
170-
temperature: 0.1,
171+
temperature: isGPT5 ? 1 : 0.1,
171172
top_p: 1,
172173
frequency_penalty: 0,
173174
presence_penalty: 0,
@@ -254,6 +255,8 @@ export async function observe({
254255
logInferenceToFile?: boolean;
255256
fromAct?: boolean;
256257
}) {
258+
const isGPT5 = llmClient.modelName.includes("gpt-5"); // TODO: remove this as we update support for gpt-5 configuration options
259+
257260
const observeSchema = z.object({
258261
elements: z
259262
.array(
@@ -321,7 +324,7 @@ export async function observe({
321324
schema: observeSchema,
322325
name: "Observation",
323326
},
324-
temperature: 0.1,
327+
temperature: isGPT5 ? 1 : 0.1,
325328
top_p: 1,
326329
frequency_penalty: 0,
327330
presence_penalty: 0,

lib/llm/aisdk.ts

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -158,12 +158,22 @@ export class AISdkClient extends LLMClient {
158158
});
159159

160160
let objectResponse: Awaited<ReturnType<typeof generateObject>>;
161+
const isGPT5 = this.model.modelId.includes("gpt-5");
161162
if (options.response_model) {
162163
try {
163164
objectResponse = await generateObject({
164165
model: this.model,
165166
messages: formattedMessages,
166167
schema: options.response_model.schema,
168+
temperature: options.temperature,
169+
providerOptions: isGPT5
170+
? {
171+
openai: {
172+
textVerbosity: "low", // Making these the default for gpt-5 for now
173+
reasoningEffort: "minimal",
174+
},
175+
}
176+
: undefined,
167177
});
168178
} catch (err) {
169179
if (NoObjectGeneratedError.isInstance(err)) {
@@ -267,6 +277,7 @@ export class AISdkClient extends LLMClient {
267277
const textResponse = await generateText({
268278
model: this.model,
269279
messages: formattedMessages,
280+
temperature: options.temperature,
270281
tools,
271282
});
272283

0 commit comments

Comments
 (0)