Memory Store Langchain

How to configure external memory Store, for instance, PostgresStore other than InMemoryStore

You can use any implementation of BaseStore that uses external memory - here’s our official Postgres store: Storage

here’s some more examples from our community https://python.langchain.com/api_reference/community/storage/langchain_community.storage.mongodb.MongoDBStore.html

Hi @DivyanshJain0001

How do you perceive these examples?

1) Basic: use PostgresStore (key-value) outside of a graph

from langgraph.store.postgres import PostgresStore

with PostgresStore.from_conn_string(DB_URI) as store:
    store.setup()  # run once to create tables/indexes

    # write
    store.put(("users", "123"), "prefs", {"theme": "dark"})

    # read
    item = store.get(("users", "123"), "prefs")
    print(item.value if item else None)  # {'theme': 'dark'}

2) Use PostgresStore inside a LangGraph app (long‑term memory across threads)

from langchain.chat_models import init_chat_model
from langchain_core.runnables import RunnableConfig
from langgraph.graph import StateGraph, START, MessagesState
from langgraph.store.base import BaseStore
from langgraph.store.postgres import PostgresStore
from langgraph.checkpoint.postgres import PostgresSaver  # short-term memory (checkpointer)

model = init_chat_model("openai:gpt-4o-mini")

with (
    PostgresStore.from_conn_string(DB_URI) as store,
    PostgresSaver.from_conn_string(DB_URI) as checkpointer,
):
    # store.setup(); checkpointer.setup()  # uncomment on first run

    def chat(state: MessagesState, config: RunnableConfig, *, store: BaseStore):
        user_id = config["configurable"]["user_id"]
        ns = ("memories", user_id)

        # recall relevant memories for the current message
        hits = store.search(ns, query=state["messages"][-1].content, limit=3)
        mems = "\n".join([h.value.get("text", "") for h in hits])
        system = f"User memories:\n{mems}" if mems else "No memories yet."

        # optionally write a memory if user asks
        last = state["messages"][-1].content.lower()
        if "remember:" in last:
            import uuid
            to_remember = last.split("remember:", 1)[1].strip()
            store.put(ns, str(uuid.uuid4()), {"text": to_remember})

        resp = model.invoke([{"role": "system", "content": system}, *state["messages"]])
        return {"messages": [resp]}

    builder = StateGraph(MessagesState)
    builder.add_node(chat)
    builder.add_edge(START, "chat")
    graph = builder.compile(checkpointer=checkpointer, store=store)

    cfg = {"configurable": {"thread_id": "t1", "user_id": "u1"}}
    graph.invoke({"messages": [{"role": "user", "content": "remember: I like pizza"}]}, cfg)
    out = graph.invoke({"messages": [{"role": "user", "content": "What do I like?"}]}, cfg)
    out["messages"][-1].pretty_print()

3) Enable semantic search in the store (requires pgvector)

from langchain.embeddings import init_embeddings
from langgraph.store.postgres import PostgresStore

emb = init_embeddings("openai:text-embedding-3-small")  # dims=1536

with PostgresStore.from_conn_string(
    DB_URI,
    index={
        "dims": 1536,
        "embed": emb,
        "fields": ["text"],  # which fields to embed (default is entire object)
    },
) as store:
    store.setup()
    store.put(("docs",), "1", {"text": "I love pizza"})
    store.put(("docs",), "2", {"text": "I am a plumber"})
    results = store.search(("docs",), query="I'm hungry", limit=1)
    print(results[0].value)

4) Async variant

import asyncio
from langgraph.store.postgres.aio import AsyncPostgresStore

async def main():
    async with AsyncPostgresStore.from_conn_string(DB_URI) as store:
        await store.setup()
        await store.aput(("users", "123"), "prefs", {"theme": "dark"})
        item = await store.aget(("users", "123"), "prefs")
        print(item.value)

asyncio.run(main())