base.js 2.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081
  1. import { insecureHash } from "../utils/hash.js";
  2. import { mapStoredMessageToChatMessage } from "../messages/utils.js";
  3. /**
  4. * This cache key should be consistent across all versions of LangChain.
  5. * It is currently NOT consistent across versions of LangChain.
  6. *
  7. * A huge benefit of having a remote cache (like redis) is that you can
  8. * access the cache from different processes/machines. The allows you to
  9. * separate concerns and scale horizontally.
  10. *
  11. * TODO: Make cache key consistent across versions of LangChain.
  12. */
  13. export const getCacheKey = (...strings) => insecureHash(strings.join("_"));
  14. export function deserializeStoredGeneration(storedGeneration) {
  15. if (storedGeneration.message !== undefined) {
  16. return {
  17. text: storedGeneration.text,
  18. message: mapStoredMessageToChatMessage(storedGeneration.message),
  19. };
  20. }
  21. else {
  22. return { text: storedGeneration.text };
  23. }
  24. }
  25. export function serializeGeneration(generation) {
  26. const serializedValue = {
  27. text: generation.text,
  28. };
  29. if (generation.message !== undefined) {
  30. serializedValue.message = generation.message.toDict();
  31. }
  32. return serializedValue;
  33. }
  34. /**
  35. * Base class for all caches. All caches should extend this class.
  36. */
  37. export class BaseCache {
  38. }
  39. const GLOBAL_MAP = new Map();
  40. /**
  41. * A cache for storing LLM generations that stores data in memory.
  42. */
  43. export class InMemoryCache extends BaseCache {
  44. constructor(map) {
  45. super();
  46. Object.defineProperty(this, "cache", {
  47. enumerable: true,
  48. configurable: true,
  49. writable: true,
  50. value: void 0
  51. });
  52. this.cache = map ?? new Map();
  53. }
  54. /**
  55. * Retrieves data from the cache using a prompt and an LLM key. If the
  56. * data is not found, it returns null.
  57. * @param prompt The prompt used to find the data.
  58. * @param llmKey The LLM key used to find the data.
  59. * @returns The data corresponding to the prompt and LLM key, or null if not found.
  60. */
  61. lookup(prompt, llmKey) {
  62. return Promise.resolve(this.cache.get(getCacheKey(prompt, llmKey)) ?? null);
  63. }
  64. /**
  65. * Updates the cache with new data using a prompt and an LLM key.
  66. * @param prompt The prompt used to store the data.
  67. * @param llmKey The LLM key used to store the data.
  68. * @param value The data to be stored.
  69. */
  70. async update(prompt, llmKey, value) {
  71. this.cache.set(getCacheKey(prompt, llmKey), value);
  72. }
  73. /**
  74. * Returns a global instance of InMemoryCache using a predefined global
  75. * map as the initial cache.
  76. * @returns A global instance of InMemoryCache.
  77. */
  78. static global() {
  79. return new InMemoryCache(GLOBAL_MAP);
  80. }
  81. }