diff --git a/.speakeasy/gen.lock b/.speakeasy/gen.lock index 5f27b81..c614707 100644 --- a/.speakeasy/gen.lock +++ b/.speakeasy/gen.lock @@ -5,8 +5,8 @@ management: docVersion: 1.0.0 speakeasyVersion: 1.660.0 generationVersion: 2.760.2 - releaseVersion: 0.1.24 - configChecksum: 475b25558977e68908a4d0653d872817 + releaseVersion: 0.1.25 + configChecksum: ec2472ec2f065c6df73009a241b9f6cb repoURL: https://github.com/OpenRouterTeam/typescript-sdk.git installationURL: https://github.com/OpenRouterTeam/typescript-sdk published: true diff --git a/.speakeasy/gen.yaml b/.speakeasy/gen.yaml index 1c0234d..d6a821e 100644 --- a/.speakeasy/gen.yaml +++ b/.speakeasy/gen.yaml @@ -30,7 +30,7 @@ generation: generateNewTests: true skipResponseBodyAssertions: false typescript: - version: 0.1.24 + version: 0.1.25 acceptHeaderEnum: false additionalDependencies: dependencies: {} diff --git a/.speakeasy/workflow.lock b/.speakeasy/workflow.lock index 12f59e2..fd86e10 100644 --- a/.speakeasy/workflow.lock +++ b/.speakeasy/workflow.lock @@ -14,7 +14,7 @@ targets: sourceRevisionDigest: sha256:ffe0e925561a55a1b403667fe33bb3158e05892ef1e66f56211544c9a890b301 sourceBlobDigest: sha256:18aa7b22686c2f559af1062fea408a9f80146231027ed1fd62b68df38c71f65d codeSamplesNamespace: open-router-chat-completions-api-typescript-code-samples - codeSamplesRevisionDigest: sha256:f856e6a616f0d8edab5b1a77e49bfd32584caeb323d4ee7b740c6a7791c222fb + codeSamplesRevisionDigest: sha256:e88cd9ad795f165e1caced900c25a1fc13e944c9936ed229e43a1140d6c0b52c workflow: workflowVersion: 1.0.0 speakeasyVersion: latest diff --git a/jsr.json b/jsr.json index 8178e4c..b0f9a56 100644 --- a/jsr.json +++ b/jsr.json @@ -2,7 +2,7 @@ { "name": "@openrouter/sdk", - "version": "0.1.24", + "version": "0.1.25", "exports": { ".": "./src/index.ts", "./models/errors": "./src/models/errors/index.ts", diff --git a/package-lock.json b/package-lock.json index 9361585..79640f3 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "@openrouter/sdk", - "version": "0.1.24", + "version": "0.1.25", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@openrouter/sdk", - "version": "0.1.24", + "version": "0.1.25", "license": "Apache-2.0", "dependencies": { "zod": "^3.25.0 || ^4.0.0" diff --git a/package.json b/package.json index 45b2b9d..6ef2d95 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@openrouter/sdk", - "version": "0.1.24", + "version": "0.1.25", "author": "OpenRouter", "description": "The OpenRouter TypeScript SDK is a type-safe toolkit for building AI applications with access to 300+ language models through a unified API.", "keywords": [ diff --git a/src/funcs/callModel.ts b/src/funcs/callModel.ts index b0dafcf..625724f 100644 --- a/src/funcs/callModel.ts +++ b/src/funcs/callModel.ts @@ -5,6 +5,110 @@ import * as models from "../models/index.js"; import { EnhancedTool, MaxToolRounds } from "../lib/tool-types.js"; import { convertEnhancedToolsToAPIFormat } from "../lib/tool-executor.js"; +/** + * Input type that accepts both chat-style messages and responses-style input + */ +export type CallModelInput = + | models.OpenResponsesInput + | models.Message[]; + +/** + * Tool type that accepts chat-style, responses-style, or enhanced tools + */ +export type CallModelTools = + | EnhancedTool[] + | models.ToolDefinitionJson[] + | models.OpenResponsesRequest["tools"]; + +/** + * Check if input is chat-style messages (Message[]) + */ +function isChatStyleMessages(input: CallModelInput): input is models.Message[] { + if (!Array.isArray(input)) return false; + if (input.length === 0) return false; + + const first = input[0] as any; + // Chat-style messages have role but no 'type' field at top level + // Responses-style items have 'type' field (like 'message', 'function_call', etc.) + return first && 'role' in first && !('type' in first); +} + +/** + * Check if tools are chat-style (ToolDefinitionJson[]) + */ +function isChatStyleTools(tools: CallModelTools): tools is models.ToolDefinitionJson[] { + if (!Array.isArray(tools)) return false; + if (tools.length === 0) return false; + + const first = tools[0] as any; + // Chat-style tools have nested 'function' property with 'name' inside + // Enhanced tools have 'function' with 'inputSchema' + // Responses-style tools have 'name' at top level + return first && 'function' in first && first.function && 'name' in first.function && !('inputSchema' in first.function); +} + +/** + * Convert chat-style tools to responses-style + */ +function convertChatToResponsesTools(tools: models.ToolDefinitionJson[]): models.OpenResponsesRequest["tools"] { + return tools.map((tool): models.OpenResponsesRequestToolFunction => ({ + type: "function", + name: tool.function.name, + description: tool.function.description ?? null, + strict: tool.function.strict ?? null, + parameters: tool.function.parameters ?? null, + })); +} + +/** + * Convert chat-style messages to responses-style input + */ +function convertChatToResponsesInput(messages: models.Message[]): models.OpenResponsesInput { + return messages.map((msg): models.OpenResponsesEasyInputMessage | models.OpenResponsesFunctionCallOutput => { + // Extract extra fields like cache_control + const { role, content, ...extraFields } = msg as any; + + if (role === "tool") { + const toolMsg = msg as models.ToolResponseMessage; + return { + type: "function_call_output", + callId: toolMsg.toolCallId, + output: typeof toolMsg.content === "string" ? toolMsg.content : JSON.stringify(toolMsg.content), + ...extraFields, + } as models.OpenResponsesFunctionCallOutput; + } + + // Handle assistant messages with tool calls + if (role === "assistant") { + const assistantMsg = msg as models.AssistantMessage; + // If it has tool calls, we need to convert them + // For now, just convert the content part + return { + role: "assistant", + content: typeof assistantMsg.content === "string" + ? assistantMsg.content + : assistantMsg.content === null + ? "" + : JSON.stringify(assistantMsg.content), + ...extraFields, + } as models.OpenResponsesEasyInputMessage; + } + + // System, user, developer messages + const convertedContent = typeof content === "string" + ? content + : content === null || content === undefined + ? "" + : JSON.stringify(content); + + return { + role: role as "user" | "system" | "developer", + content: convertedContent, + ...extraFields, + } as models.OpenResponsesEasyInputMessage; + }) as models.OpenResponsesInput; +} + /** * Get a response with multiple consumption patterns * @@ -75,24 +179,46 @@ import { convertEnhancedToolsToAPIFormat } from "../lib/tool-executor.js"; */ export function callModel( client: OpenRouterCore, - request: Omit & { - tools?: EnhancedTool[] | models.OpenResponsesRequest["tools"]; + request: Omit & { + input?: CallModelInput; + tools?: CallModelTools; maxToolRounds?: MaxToolRounds; }, options?: RequestOptions, ): ResponseWrapper { - const { tools, maxToolRounds, ...apiRequest } = request; + const { tools, maxToolRounds, input, ...restRequest } = request; + + // Convert chat-style messages to responses-style input if needed + const convertedInput = input && isChatStyleMessages(input) + ? convertChatToResponsesInput(input) + : input; + + const apiRequest = { + ...restRequest, + input: convertedInput, + }; - // Separate enhanced tools from API tools + // Determine tool type and convert as needed let isEnhancedTools = false; - if (tools && tools.length > 0) { + let isChatTools = false; + + if (tools && Array.isArray(tools) && tools.length > 0) { const firstTool = tools[0] as any; isEnhancedTools = "function" in firstTool && firstTool.function && "inputSchema" in firstTool.function; + isChatTools = !isEnhancedTools && isChatStyleTools(tools); } + const enhancedTools = isEnhancedTools ? (tools as EnhancedTool[]) : undefined; - // Convert enhanced tools to API format if provided, otherwise use tools as-is - const apiTools = enhancedTools ? convertEnhancedToolsToAPIFormat(enhancedTools) : (tools as models.OpenResponsesRequest["tools"]); + // Convert tools to API format based on their type + let apiTools: models.OpenResponsesRequest["tools"]; + if (enhancedTools) { + apiTools = convertEnhancedToolsToAPIFormat(enhancedTools); + } else if (isChatTools) { + apiTools = convertChatToResponsesTools(tools as models.ToolDefinitionJson[]); + } else { + apiTools = tools as models.OpenResponsesRequest["tools"]; + } // Build the request with converted tools const finalRequest: models.OpenResponsesRequest = { diff --git a/src/lib/config.ts b/src/lib/config.ts index ad573f5..572f8e8 100644 --- a/src/lib/config.ts +++ b/src/lib/config.ts @@ -69,7 +69,7 @@ export function serverURLFromOptions(options: SDKOptions): URL | null { export const SDK_METADATA = { language: "typescript", openapiDocVersion: "1.0.0", - sdkVersion: "0.1.24", + sdkVersion: "0.1.25", genVersion: "2.760.2", - userAgent: "speakeasy-sdk/typescript 0.1.24 2.760.2 1.0.0 @openrouter/sdk", + userAgent: "speakeasy-sdk/typescript 0.1.25 2.760.2 1.0.0 @openrouter/sdk", } as const; diff --git a/src/sdk/sdk.ts b/src/sdk/sdk.ts index 68ef8da..0e0566f 100644 --- a/src/sdk/sdk.ts +++ b/src/sdk/sdk.ts @@ -94,8 +94,9 @@ export class OpenRouter extends ClientSDK { // #region sdk-class-body callModel( - request: Omit & { - tools?: EnhancedTool[] | models.OpenResponsesRequest["tools"]; + request: Omit & { + input?: import("../funcs/callModel.js").CallModelInput; + tools?: import("../funcs/callModel.js").CallModelTools; maxToolRounds?: MaxToolRounds; }, options?: RequestOptions, diff --git a/tests/e2e/callModel.test.ts b/tests/e2e/callModel.test.ts index d0af194..17a210e 100644 --- a/tests/e2e/callModel.test.ts +++ b/tests/e2e/callModel.test.ts @@ -21,6 +21,151 @@ describe("callModel E2E Tests", () => { }); }); + describe("Chat-style messages support", () => { + it("should accept chat-style Message array as input", async () => { + const response = client.callModel({ + model: "meta-llama/llama-3.2-1b-instruct", + input: [ + { + role: "system", + content: "You are a helpful assistant.", + }, + { + role: "user", + content: "Say 'chat test' and nothing else.", + }, + ], + }); + + const text = await response.getText(); + + expect(text).toBeDefined(); + expect(typeof text).toBe("string"); + expect(text.length).toBeGreaterThan(0); + }); + + it("should handle multi-turn chat-style conversation", async () => { + const response = client.callModel({ + model: "meta-llama/llama-3.2-1b-instruct", + input: [ + { + role: "user", + content: "My favorite color is blue.", + }, + { + role: "assistant", + content: "That's nice! Blue is a calming color.", + }, + { + role: "user", + content: "What is my favorite color?", + }, + ], + }); + + const text = await response.getText(); + + expect(text).toBeDefined(); + expect(text.toLowerCase()).toContain("blue"); + }); + + it("should handle system message in chat-style input", async () => { + const response = client.callModel({ + model: "meta-llama/llama-3.2-1b-instruct", + input: [ + { + role: "system", + content: "Always respond with exactly one word.", + }, + { + role: "user", + content: "Say hello.", + }, + ], + }); + + const text = await response.getText(); + + expect(text).toBeDefined(); + expect(typeof text).toBe("string"); + }); + + it("should accept chat-style tools (ToolDefinitionJson)", async () => { + const response = client.callModel({ + model: "qwen/qwen3-vl-8b-instruct", + input: [ + { + role: "user", + content: "What's the weather in Paris? Use the get_weather tool.", + }, + ], + tools: [ + { + type: "function" as const, + function: { + name: "get_weather", + description: "Get weather for a location", + parameters: { + type: "object", + properties: { + location: { + type: "string", + description: "City name", + }, + }, + required: ["location"], + }, + }, + }, + ], + }); + + const toolCalls = await response.getToolCalls(); + + // Model should call the tool + expect(toolCalls.length).toBeGreaterThan(0); + expect(toolCalls[0].name).toBe("get_weather"); + expect(toolCalls[0].arguments).toBeDefined(); + }, 30000); + + it("should work with chat-style messages and chat-style tools together", async () => { + const response = client.callModel({ + model: "meta-llama/llama-3.1-8b-instruct", + input: [ + { + role: "system", + content: "You are a helpful assistant. Use tools when needed.", + }, + { + role: "user", + content: "Get the weather in Tokyo using the weather tool.", + }, + ], + tools: [ + { + type: "function" as const, + function: { + name: "get_weather", + description: "Get current weather", + parameters: { + type: "object", + properties: { + city: { type: "string" }, + }, + required: ["city"], + }, + }, + }, + ], + }); + + const toolCalls = await response.getToolCalls(); + + expect(toolCalls.length).toBeGreaterThan(0); + expect(toolCalls[0].name).toBe("get_weather"); + }, 30000); + }); + describe("response.text - Text extraction", () => { it("should successfully get text from a response", async () => { const response = client.callModel({