class-chat-completion.ts 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899
  1. export interface TestChatMessage{
  2. role:string
  3. content:string
  4. }
  5. export class TestChatCompletion{
  6. messageList:Array<TestChatMessage>
  7. constructor(messageList:Array<TestChatMessage>){
  8. this.messageList = messageList
  9. }
  10. async createCompletionByStream() {
  11. let token = localStorage.getItem("token");
  12. let bodyJson = {
  13. "token": `Bearer ${token}`,
  14. "messages": this.messageList,
  15. "model": "gpt-3.5-turbo",
  16. "temperature": 0.5,
  17. "presence_penalty": 0,
  18. "frequency_penalty": 0,
  19. "top_p": 1,
  20. "stream":true
  21. };
  22. let response = await fetch("https://test.fmode.cn/api/apig/aigc/gpt/v1/chat/completions", {
  23. "headers": {
  24. "accept": "text/event-stream",
  25. "sec-fetch-dest": "empty",
  26. "sec-fetch-mode": "cors",
  27. "sec-fetch-site": "same-site"
  28. },
  29. "referrer": "https://ai.fmode.cn/",
  30. "referrerPolicy": "strict-origin-when-cross-origin",
  31. "body": JSON.stringify(bodyJson),
  32. "method": "POST",
  33. "mode": "cors",
  34. "credentials": "omit"
  35. });
  36. let messageAiReply = ""
  37. let messageIndex = this.messageList.length
  38. let reader = response.body?.getReader();
  39. if (!reader) {
  40. throw new Error("Failed to get the response reader.");
  41. }
  42. let decoder = new TextDecoder();
  43. let buffer = "";
  44. while (true) {
  45. let { done, value } = await reader.read();
  46. if (done) {
  47. break;
  48. }
  49. buffer += decoder.decode(value);
  50. // Split the buffer by newlines to get individual messages
  51. let messages = buffer.split("\n");
  52. // Process each message
  53. for (let i = 0; i < messages.length - 1; i++) {
  54. let message = messages[i];
  55. // Process the message as needed
  56. /**
  57. * data: {"id":"chatcmpl-y2PLKqPDnwAFJIj2L5aqdH5TWK9Yv","object":"chat.completion.chunk","created":1696770162,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}]}
  58. * data: {"id":"chatcmpl-y2PLKqPDnwAFJIj2L5aqdH5TWK9Yv","object":"chat.completion.chunk","created":1696770162,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]}
  59. * data: [DONE]
  60. */
  61. let dataText = message.replace("data:\ ","")
  62. if(dataText.startsWith("{")){
  63. try{
  64. let dataJson = JSON.parse(dataText)
  65. console.log(dataJson)
  66. messageAiReply += dataJson?.choices?.[0]?.delta?.content || ""
  67. this.messageList[messageIndex] = {
  68. role:"assistant",
  69. content:messageAiReply
  70. }
  71. }catch(err){}
  72. }
  73. if(dataText.startsWith("[")){
  74. console.log(message)
  75. console.log("完成")
  76. this.messageList[messageIndex] = {
  77. role:"assistant",
  78. content:messageAiReply
  79. }
  80. messageAiReply = ""
  81. }
  82. // Parse the message as JSON
  83. // let data = JSON.parse(message);
  84. // Clear the processed message from the buffer
  85. buffer = buffer.slice(message.length + 1);
  86. }
  87. }
  88. }
  89. }