openai.js 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282
  1. import { isTraceableFunction, traceable } from "../traceable.js";
  2. function _combineChatCompletionChoices(choices
  3. // eslint-disable-next-line @typescript-eslint/no-explicit-any
  4. ) {
  5. const reversedChoices = choices.slice().reverse();
  6. // eslint-disable-next-line @typescript-eslint/no-explicit-any
  7. const message = {
  8. role: "assistant",
  9. content: "",
  10. };
  11. for (const c of reversedChoices) {
  12. if (c.delta.role) {
  13. message["role"] = c.delta.role;
  14. break;
  15. }
  16. }
  17. const toolCalls = {};
  18. for (const c of choices) {
  19. if (c.delta.content) {
  20. message.content = message.content.concat(c.delta.content);
  21. }
  22. if (c.delta.function_call) {
  23. if (!message.function_call) {
  24. message.function_call = { name: "", arguments: "" };
  25. }
  26. if (c.delta.function_call.name) {
  27. message.function_call.name += c.delta.function_call.name;
  28. }
  29. if (c.delta.function_call.arguments) {
  30. message.function_call.arguments += c.delta.function_call.arguments;
  31. }
  32. }
  33. if (c.delta.tool_calls) {
  34. for (const tool_call of c.delta.tool_calls) {
  35. if (!toolCalls[c.index]) {
  36. toolCalls[c.index] = [];
  37. }
  38. toolCalls[c.index].push(tool_call);
  39. }
  40. }
  41. }
  42. if (Object.keys(toolCalls).length > 0) {
  43. message.tool_calls = [...Array(Object.keys(toolCalls).length)];
  44. for (const [index, toolCallChunks] of Object.entries(toolCalls)) {
  45. const idx = parseInt(index);
  46. message.tool_calls[idx] = {
  47. index: idx,
  48. id: toolCallChunks.find((c) => c.id)?.id || null,
  49. type: toolCallChunks.find((c) => c.type)?.type || null,
  50. };
  51. for (const chunk of toolCallChunks) {
  52. if (chunk.function) {
  53. if (!message.tool_calls[idx].function) {
  54. message.tool_calls[idx].function = {
  55. name: "",
  56. arguments: "",
  57. };
  58. }
  59. if (chunk.function.name) {
  60. message.tool_calls[idx].function.name += chunk.function.name;
  61. }
  62. if (chunk.function.arguments) {
  63. message.tool_calls[idx].function.arguments +=
  64. chunk.function.arguments;
  65. }
  66. }
  67. }
  68. }
  69. }
  70. return {
  71. index: choices[0].index,
  72. finish_reason: reversedChoices.find((c) => c.finish_reason) || null,
  73. message: message,
  74. };
  75. }
  76. const chatAggregator = (chunks) => {
  77. if (!chunks || chunks.length === 0) {
  78. return { choices: [{ message: { role: "assistant", content: "" } }] };
  79. }
  80. const choicesByIndex = {};
  81. for (const chunk of chunks) {
  82. for (const choice of chunk.choices) {
  83. if (choicesByIndex[choice.index] === undefined) {
  84. choicesByIndex[choice.index] = [];
  85. }
  86. choicesByIndex[choice.index].push(choice);
  87. }
  88. }
  89. const aggregatedOutput = chunks[chunks.length - 1];
  90. aggregatedOutput.choices = Object.values(choicesByIndex).map((choices) => _combineChatCompletionChoices(choices));
  91. return aggregatedOutput;
  92. };
  93. const textAggregator = (allChunks
  94. // eslint-disable-next-line @typescript-eslint/no-explicit-any
  95. ) => {
  96. if (allChunks.length === 0) {
  97. return { choices: [{ text: "" }] };
  98. }
  99. const allContent = [];
  100. for (const chunk of allChunks) {
  101. const content = chunk.choices[0].text;
  102. if (content != null) {
  103. allContent.push(content);
  104. }
  105. }
  106. const content = allContent.join("");
  107. const aggregatedOutput = allChunks[allChunks.length - 1];
  108. aggregatedOutput.choices = [
  109. { ...aggregatedOutput.choices[0], text: content },
  110. ];
  111. return aggregatedOutput;
  112. };
  113. function processChatCompletion(outputs) {
  114. const chatCompletion = outputs;
  115. // copy the original object, minus usage
  116. const result = { ...chatCompletion };
  117. const usage = chatCompletion.usage;
  118. if (usage) {
  119. const inputTokenDetails = {
  120. ...(usage.prompt_tokens_details?.audio_tokens !== null && {
  121. audio: usage.prompt_tokens_details?.audio_tokens,
  122. }),
  123. ...(usage.prompt_tokens_details?.cached_tokens !== null && {
  124. cache_read: usage.prompt_tokens_details?.cached_tokens,
  125. }),
  126. };
  127. const outputTokenDetails = {
  128. ...(usage.completion_tokens_details?.audio_tokens !== null && {
  129. audio: usage.completion_tokens_details?.audio_tokens,
  130. }),
  131. ...(usage.completion_tokens_details?.reasoning_tokens !== null && {
  132. reasoning: usage.completion_tokens_details?.reasoning_tokens,
  133. }),
  134. };
  135. result.usage_metadata = {
  136. input_tokens: usage.prompt_tokens ?? 0,
  137. output_tokens: usage.completion_tokens ?? 0,
  138. total_tokens: usage.total_tokens ?? 0,
  139. ...(Object.keys(inputTokenDetails).length > 0 && {
  140. input_token_details: inputTokenDetails,
  141. }),
  142. ...(Object.keys(outputTokenDetails).length > 0 && {
  143. output_token_details: outputTokenDetails,
  144. }),
  145. };
  146. }
  147. delete result.usage;
  148. return result;
  149. }
  150. /**
  151. * Wraps an OpenAI client's completion methods, enabling automatic LangSmith
  152. * tracing. Method signatures are unchanged, with the exception that you can pass
  153. * an additional and optional "langsmithExtra" field within the second parameter.
  154. * @param openai An OpenAI client instance.
  155. * @param options LangSmith options.
  156. * @example
  157. * ```ts
  158. * import { OpenAI } from "openai";
  159. * import { wrapOpenAI } from "langsmith/wrappers/openai";
  160. *
  161. * const patchedClient = wrapOpenAI(new OpenAI());
  162. *
  163. * const patchedStream = await patchedClient.chat.completions.create(
  164. * {
  165. * messages: [{ role: "user", content: `Say 'foo'` }],
  166. * model: "gpt-4.1-mini",
  167. * stream: true,
  168. * },
  169. * {
  170. * langsmithExtra: {
  171. * metadata: {
  172. * additional_data: "bar",
  173. * },
  174. * },
  175. * },
  176. * );
  177. * ```
  178. */
  179. export const wrapOpenAI = (openai, options) => {
  180. if (isTraceableFunction(openai.chat.completions.create) ||
  181. isTraceableFunction(openai.completions.create)) {
  182. throw new Error("This instance of OpenAI client has been already wrapped once.");
  183. }
  184. // Some internal OpenAI methods call each other, so we need to preserve original
  185. // OpenAI methods.
  186. const tracedOpenAIClient = { ...openai };
  187. if (openai.beta &&
  188. openai.beta.chat &&
  189. openai.beta.chat.completions &&
  190. typeof openai.beta.chat.completions.parse === "function") {
  191. tracedOpenAIClient.beta = {
  192. ...openai.beta,
  193. chat: {
  194. ...openai.beta.chat,
  195. completions: {
  196. ...openai.beta.chat.completions,
  197. parse: traceable(openai.beta.chat.completions.parse.bind(openai.beta.chat.completions), {
  198. name: "ChatOpenAI",
  199. run_type: "llm",
  200. aggregator: chatAggregator,
  201. argsConfigPath: [1, "langsmithExtra"],
  202. getInvocationParams: (payload) => {
  203. if (typeof payload !== "object" || payload == null)
  204. return undefined;
  205. // we can safely do so, as the types are not exported in TSC
  206. const params = payload;
  207. const ls_stop = (typeof params.stop === "string"
  208. ? [params.stop]
  209. : params.stop) ?? undefined;
  210. return {
  211. ls_provider: "openai",
  212. ls_model_type: "chat",
  213. ls_model_name: params.model,
  214. ls_max_tokens: params.max_tokens ?? undefined,
  215. ls_temperature: params.temperature ?? undefined,
  216. ls_stop,
  217. };
  218. },
  219. ...options,
  220. }),
  221. },
  222. },
  223. };
  224. }
  225. tracedOpenAIClient.chat = {
  226. ...openai.chat,
  227. completions: {
  228. ...openai.chat.completions,
  229. create: traceable(openai.chat.completions.create.bind(openai.chat.completions), {
  230. name: "ChatOpenAI",
  231. run_type: "llm",
  232. aggregator: chatAggregator,
  233. argsConfigPath: [1, "langsmithExtra"],
  234. getInvocationParams: (payload) => {
  235. if (typeof payload !== "object" || payload == null)
  236. return undefined;
  237. // we can safely do so, as the types are not exported in TSC
  238. const params = payload;
  239. const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
  240. undefined;
  241. return {
  242. ls_provider: "openai",
  243. ls_model_type: "chat",
  244. ls_model_name: params.model,
  245. ls_max_tokens: params.max_tokens ?? undefined,
  246. ls_temperature: params.temperature ?? undefined,
  247. ls_stop,
  248. };
  249. },
  250. processOutputs: processChatCompletion,
  251. ...options,
  252. }),
  253. },
  254. };
  255. tracedOpenAIClient.completions = {
  256. ...openai.completions,
  257. create: traceable(openai.completions.create.bind(openai.completions), {
  258. name: "OpenAI",
  259. run_type: "llm",
  260. aggregator: textAggregator,
  261. argsConfigPath: [1, "langsmithExtra"],
  262. getInvocationParams: (payload) => {
  263. if (typeof payload !== "object" || payload == null)
  264. return undefined;
  265. // we can safely do so, as the types are not exported in TSC
  266. const params = payload;
  267. const ls_stop = (typeof params.stop === "string" ? [params.stop] : params.stop) ??
  268. undefined;
  269. return {
  270. ls_provider: "openai",
  271. ls_model_type: "llm",
  272. ls_model_name: params.model,
  273. ls_max_tokens: params.max_tokens ?? undefined,
  274. ls_temperature: params.temperature ?? undefined,
  275. ls_stop,
  276. };
  277. },
  278. ...options,
  279. }),
  280. };
  281. return tracedOpenAIClient;
  282. };