diff --git a/app/(chat)/api/chat/route.ts b/app/(chat)/api/chat/route.ts index a9b2428674..391faad83e 100644 --- a/app/(chat)/api/chat/route.ts +++ b/app/(chat)/api/chat/route.ts @@ -13,6 +13,7 @@ import { createResumableStreamContext } from "resumable-stream"; import { auth, type UserType } from "@/app/(auth)/auth"; import { entitlementsByUserType } from "@/lib/ai/entitlements"; import { type RequestHints, systemPrompt } from "@/lib/ai/prompts"; +import { allowedModelIds } from "@/lib/ai/models"; import { getLanguageModel } from "@/lib/ai/providers"; import { createDocument } from "@/lib/ai/tools/create-document"; import { getWeather } from "@/lib/ai/tools/get-weather"; @@ -74,6 +75,10 @@ export async function POST(request: Request) { return new ChatbotError("unauthorized:chat").toResponse(); } + if (!allowedModelIds.has(selectedChatModel)) { + return new ChatbotError("bad_request:api").toResponse(); + } + await checkIpRateLimit(ipAddress(request)); const userType: UserType = session.user.type; @@ -139,8 +144,9 @@ export async function POST(request: Request) { } const isReasoningModel = - selectedChatModel.includes("reasoning") || - selectedChatModel.includes("thinking"); + selectedChatModel.endsWith("-thinking") || + (selectedChatModel.includes("reasoning") && + !selectedChatModel.includes("non-reasoning")); const modelMessages = await convertToModelMessages(uiMessages); @@ -179,7 +185,9 @@ export async function POST(request: Request) { }, }); - dataStream.merge(result.toUIMessageStream({ sendReasoning: true })); + dataStream.merge( + result.toUIMessageStream({ sendReasoning: isReasoningModel }), + ); if (titlePromise) { const title = await titlePromise; diff --git a/components/message.tsx b/components/message.tsx index 5ffe67e75d..bc6d268b53 100644 --- a/components/message.tsx +++ b/components/message.tsx @@ -110,13 +110,14 @@ const PurePreviewMessage = ({ if (type === "reasoning") { const hasContent = part.text?.trim().length > 0; - const isStreaming = "state" in part && part.state === "streaming"; - if (hasContent || isStreaming) { + if (hasContent) { + const isStreaming = + "state" in part && part.state === "streaming"; return ( ); } diff --git a/lib/ai/models.ts b/lib/ai/models.ts index 2241611750..fb5cc70725 100644 --- a/lib/ai/models.ts +++ b/lib/ai/models.ts @@ -24,8 +24,8 @@ export const chatModels: ChatModel[] = [ description: "Fast and cost-effective for simple tasks", }, { - id: "openai/gpt-5.2", - name: "GPT-5.2", + id: "openai/gpt-5-mini", + name: "GPT-5 Mini", provider: "openai", description: "Most capable OpenAI model", }, @@ -65,6 +65,8 @@ export const chatModels: ChatModel[] = [ ]; // Group models by provider for UI +export const allowedModelIds = new Set(chatModels.map((m) => m.id)); + export const modelsByProvider = chatModels.reduce( (acc, model) => { if (!acc[model.provider]) { diff --git a/lib/ai/providers.ts b/lib/ai/providers.ts index 13ba6d3ef6..a93fc8a4b8 100644 --- a/lib/ai/providers.ts +++ b/lib/ai/providers.ts @@ -33,7 +33,8 @@ export function getLanguageModel(modelId: string) { } const isReasoningModel = - modelId.includes("reasoning") || modelId.endsWith("-thinking"); + modelId.endsWith("-thinking") || + (modelId.includes("reasoning") && !modelId.includes("non-reasoning")); if (isReasoningModel) { const gatewayModelId = modelId.replace(THINKING_SUFFIX_REGEX, "");