base.d.ts 2.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556
  1. import type { Generation } from "../outputs.js";
  2. import { type StoredGeneration } from "../messages/base.js";
  3. /**
  4. * This cache key should be consistent across all versions of LangChain.
  5. * It is currently NOT consistent across versions of LangChain.
  6. *
  7. * A huge benefit of having a remote cache (like redis) is that you can
  8. * access the cache from different processes/machines. The allows you to
  9. * separate concerns and scale horizontally.
  10. *
  11. * TODO: Make cache key consistent across versions of LangChain.
  12. */
  13. export declare const getCacheKey: (...strings: string[]) => string;
  14. export declare function deserializeStoredGeneration(storedGeneration: StoredGeneration): {
  15. text: string;
  16. message: import("../messages/tool.js").ToolMessage | import("../messages/ai.js").AIMessage | import("../messages/chat.js").ChatMessage | import("../messages/function.js").FunctionMessage | import("../messages/human.js").HumanMessage | import("../messages/system.js").SystemMessage;
  17. } | {
  18. text: string;
  19. message?: undefined;
  20. };
  21. export declare function serializeGeneration(generation: Generation): StoredGeneration;
  22. /**
  23. * Base class for all caches. All caches should extend this class.
  24. */
  25. export declare abstract class BaseCache<T = Generation[]> {
  26. abstract lookup(prompt: string, llmKey: string): Promise<T | null>;
  27. abstract update(prompt: string, llmKey: string, value: T): Promise<void>;
  28. }
  29. /**
  30. * A cache for storing LLM generations that stores data in memory.
  31. */
  32. export declare class InMemoryCache<T = Generation[]> extends BaseCache<T> {
  33. private cache;
  34. constructor(map?: Map<string, T>);
  35. /**
  36. * Retrieves data from the cache using a prompt and an LLM key. If the
  37. * data is not found, it returns null.
  38. * @param prompt The prompt used to find the data.
  39. * @param llmKey The LLM key used to find the data.
  40. * @returns The data corresponding to the prompt and LLM key, or null if not found.
  41. */
  42. lookup(prompt: string, llmKey: string): Promise<T | null>;
  43. /**
  44. * Updates the cache with new data using a prompt and an LLM key.
  45. * @param prompt The prompt used to store the data.
  46. * @param llmKey The LLM key used to store the data.
  47. * @param value The data to be stored.
  48. */
  49. update(prompt: string, llmKey: string, value: T): Promise<void>;
  50. /**
  51. * Returns a global instance of InMemoryCache using a predefined global
  52. * map as the initial cache.
  53. * @returns A global instance of InMemoryCache.
  54. */
  55. static global(): InMemoryCache;
  56. }