123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282 |
- import { isTraceableFunction, traceable } from "../traceable.js";
- function _combineChatCompletionChoices(choices
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- ) {
- const reversedChoices = choices.slice().reverse();
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- const message = {
- role: "assistant",
- content: "",
- };
- for (const c of reversedChoices) {
- if (c.delta.role) {
- message["role"] = c.delta.role;
- break;
- }
- }
- const toolCalls = {};
- for (const c of choices) {
- if (c.delta.content) {
- message.content = message.content.concat(c.delta.content);
- }
- if (c.delta.function_call) {
- if (!message.function_call) {
- message.function_call = { name: "", arguments: "" };
- }
- if (c.delta.function_call.name) {
- message.function_call.name += c.delta.function_call.name;
- }
- if (c.delta.function_call.arguments) {
- message.function_call.arguments += c.delta.function_call.arguments;
- }
- }
- if (c.delta.tool_calls) {
- for (const tool_call of c.delta.tool_calls) {
- if (!toolCalls[c.index]) {
- toolCalls[c.index] = [];
- }
- toolCalls[c.index].push(tool_call);
- }
- }
- }
- if (Object.keys(toolCalls).length > 0) {
- message.tool_calls = [...Array(Object.keys(toolCalls).length)];
- for (const [index, toolCallChunks] of Object.entries(toolCalls)) {
- const idx = parseInt(index);
- message.tool_calls[idx] = {
- index: idx,
- id: toolCallChunks.find((c) => c.id)?.id || null,
- type: toolCallChunks.find((c) => c.type)?.type || null,
- };
- for (const chunk of toolCallChunks) {
- if (chunk.function) {
- if (!message.tool_calls[idx].function) {
- message.tool_calls[idx].function = {
- name: "",
- arguments: "",
- };
- }
- if (chunk.function.name) {
- message.tool_calls[idx].function.name += chunk.function.name;
- }
- if (chunk.function.arguments) {
- message.tool_calls[idx].function.arguments +=
- chunk.function.arguments;
- }
- }
- }
- }
- }
- return {
- index: choices[0].index,
- finish_reason: reversedChoices.find((c) => c.finish_reason) || null,
- message: message,
- };
- }
- const chatAggregator = (chunks) => {
- if (!chunks || chunks.length === 0) {
- return { choices: [{ message: { role: "assistant", content: "" } }] };
- }
- const choicesByIndex = {};
- for (const chunk of chunks) {
- for (const choice of chunk.choices) {
- if (choicesByIndex[choice.index] === undefined) {
- choicesByIndex[choice.index] = [];
- }
- choicesByIndex[choice.index].push(choice);
- }
- }
- const aggregatedOutput = chunks[chunks.length - 1];
- aggregatedOutput.choices = Object.values(choicesByIndex).map((choices) => _combineChatCompletionChoices(choices));
- return aggregatedOutput;
- };
- const textAggregator = (allChunks
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
- ) => {
- if (allChunks.length === 0) {
- return { choices: [{ text: "" }] };
- }
- const allContent = [];
- for (const chunk of allChunks) {
- const content = chunk.choices[0].text;
- if (content != null) {
- allContent.push(content);
- }
- }
- const content = allContent.join("");
- const aggregatedOutput = allChunks[allChunks.length - 1];
- aggregatedOutput.choices = [
- { ...aggregatedOutput.choices[0], text: content },
- ];
- return aggregatedOutput;
- };
- function processChatCompletion(outputs) {
- const chatCompletion = outputs;
- // copy the original object, minus usage
- const result = { ...chatCompletion };
- const usage = chatCompletion.usage;
- if (usage) {
- const inputTokenDetails = {
- ...(usage.prompt_tokens_details?.audio_tokens !== null && {
- audio: usage.prompt_tokens_details?.audio_tokens,
- }),
- ...(usage.prompt_tokens_details?.cached_tokens !== null && {
- cache_read: usage.prompt_tokens_details?.cached_tokens,
- }),
- };
- const outputTokenDetails = {
- ...(usage.completion_tokens_details?.audio_tokens !== null && {
- audio: usage.completion_tokens_details?.audio_tokens,
- }),
- ...(usage.completion_tokens_details?.reasoning_tokens !== null && {
- reasoning: usage.completion_tokens_details?.reasoning_tokens,
- }),
- };
- result.usage_metadata = {
- input_tokens: usage.prompt_tokens ?? 0,
- output_tokens: usage.completion_tokens ?? 0,
- total_tokens: usage.total_tokens ?? 0,
- ...(Object.keys(inputTokenDetails).length > 0 && {
- input_token_details: inputTokenDetails,
- }),
- ...(Object.keys(outputTokenDetails).length > 0 && {
- output_token_details: outputTokenDetails,
- }),
- };
- }
- delete result.usage;
- return result;
- }
- /**
- * Wraps an OpenAI client's completion methods, enabling automatic LangSmith
- * tracing. Method signatures are unchanged, with the exception that you can pass
- * an additional and optional "langsmithExtra" field within the second parameter.
- * @param openai An OpenAI client instance.
- * @param options LangSmith options.
- * @example
- * ```ts
- * import { OpenAI } from "openai";
- * import { wrapOpenAI } from "langsmith/wrappers/openai";
- *
- * const patchedClient = wrapOpenAI(new OpenAI());
- *
- * const patchedStream = await patchedClient.chat.completions.create(
- * {
- * messages: [{ role: "user", content: `Say 'foo'` }],
- * model: "gpt-4.1-mini",
- * stream: true,
- * },
- * {
- * langsmithExtra: {
- * metadata: {
- * additional_data: "bar",
- * },
- * },
- * },
- * );
- * ```
- */
- export const wrapOpenAI = (openai, options) => {
- if (isTraceableFunction(openai.chat.completions.create) ||
- isTraceableFunction(openai.completions.create)) {
- throw new Error("This instance of OpenAI client has been already wrapped once.");
- }
- // Some internal OpenAI methods call each other, so we need to preserve original
- // OpenAI methods.
- const tracedOpenAIClient = { ...openai };
- if (openai.beta &&
- openai.beta.chat &&
- openai.beta.chat.completions &&
- typeof openai.beta.chat.completions.parse === "function") {
- tracedOpenAIClient.beta = {
- ...openai.beta,
- chat: {
- ...openai.beta.chat,
- completions: {
- ...openai.beta.chat.completions,
- parse: traceable(openai.beta.chat.completions.parse.bind(openai.beta.chat.completions), {
- name: "ChatOpenAI",
- run_type: "llm",
- aggregator: chatAggregator,
- argsConfigPath: [1, "langsmithExtra"],
- getInvocationParams: (payload) => {
- if (typeof payload !== "object" || payload == null)
- return undefined;
- // we can safely do so, as the types are not exported in TSC
- const params = payload;
- const ls_stop = (typeof params.stop === "string"
- ? [params.stop]
- : params.stop) ?? undefined;
- return {
- ls_provider: "openai",
- ls_model_type: "chat",
- ls_model_name: params.model,
- ls_max_tokens: params.max_tokens ?? undefined,
- ls_temperature: params.temperature ?? undefined,
- ls_stop,
- };
- },
- ...options,
- }),
- },
- },
- };
- }
- tracedOpenAIClient.chat = {
- ...openai.chat,
- completions: {
- ...openai.chat.completions,
- create: traceable(openai.chat.completions.create.bind(openai.chat.completions), {
- name: "ChatOpenAI",
- run_type: "llm",
- aggregator: chatAggregator,
- argsConfigPath: [1, "langsmithExtra"],
- getInvocationParams: (payload) => {
- if (typeof payload !== "object" || payload == null)
- return undefined;
- // we can safely do so, as the types are not exported in TSC
- const params = payload;
- const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
- undefined;
- return {
- ls_provider: "openai",
- ls_model_type: "chat",
- ls_model_name: params.model,
- ls_max_tokens: params.max_tokens ?? undefined,
- ls_temperature: params.temperature ?? undefined,
- ls_stop,
- };
- },
- processOutputs: processChatCompletion,
- ...options,
- }),
- },
- };
- tracedOpenAIClient.completions = {
- ...openai.completions,
- create: traceable(openai.completions.create.bind(openai.completions), {
- name: "OpenAI",
- run_type: "llm",
- aggregator: textAggregator,
- argsConfigPath: [1, "langsmithExtra"],
- getInvocationParams: (payload) => {
- if (typeof payload !== "object" || payload == null)
- return undefined;
- // we can safely do so, as the types are not exported in TSC
- const params = payload;
- const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
- undefined;
- return {
- ls_provider: "openai",
- ls_model_type: "llm",
- ls_model_name: params.model,
- ls_max_tokens: params.max_tokens ?? undefined,
- ls_temperature: params.temperature ?? undefined,
- ls_stop,
- };
- },
- ...options,
- }),
- };
- return tracedOpenAIClient;
- };
|