Help for Agents connection with FastMCP

I am making an agent which can utilize tools. But when I switched from langgraph.prebuilt to langchain.agents, it asked for prompts and I changed my code a little, it started failing to work. My code is:

import asyncio
from langchain_mcp_adapters.client import MultiServerMCPClient
import os
from google import genai
from langchain.agents import AgentExecutor, create_react_agent
from langchain_google_genai import ChatGoogleGenerativeAI
from dotenv import load_dotenv
from datetime import datetime
from langchain_core.prompts import PromptTemplate
from langchain_core.messages import AIMessage, HumanMessage

load_dotenv()

api_key = os.getenv("GOOGLE_API_KEY")

llm = ChatGoogleGenerativeAI(
    model= "gemini-2.5-flash",
    temperature=1.0,
    max_retries=2,
    google_api_key=api_key,
)

async def main():
    client = MultiServerMCPClient(
        {
            "GithubTools": {
                "command": "python",
                "args": ["server.py"],
                "transport": "stdio",
            }
        }
    )
    
    tools = await client.get_tools()
    template = '''Answer the following questions as best you can. You have access to the following tools:

{tools}

Use the following format:

Question: the input question you must answer
Thought: you should always think about what to do
Action: the action to take, should be one of [{tool_names}]
Action Input: the input to the action
Observation: the result of the action
... (this Thought/Action/Action Input/Observation can repeat N times)
Thought: I now know the final answer
Final Answer: the final answer to the original input question

Begin!

Question: {input}
Thought:{agent_scratchpad}'''
    prompt = PromptTemplate.from_template(template)

    
    agent = create_react_agent(
        llm,tools, prompt
    )

    while True:
        query = input("You: ")
        response = await agent.invoke(
                    {"messages": [{"role": "user", "content": query}]}
        )
        
        current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        with open("agents.log", "a", encoding="utf-8") as file:
            file.write(f"[{current_time}] {response}\n")

        for msg in response['messages']:
            if msg.__class__.__name__ == 'AIMessage':
                print(msg.content)

It throws error:

C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1> uv run client.py
WARNING: All log messages before absl::InitializeLog() is called are written to STDERR
E0000 00:00:1760884078.990770   16656 alts_credentials.cc:93] ALTS creds ignored. Not running on GCP and untrusted ALTS is not enabled.
You: Hi
Traceback (most recent call last):
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\client.py", line 84, in <module>
    asyncio.run(main())
    ~~~~~~~~~~~^^^^^^^^
  File "C:\Users\himan\AppData\Local\Python\pythoncore-3.13-64\Lib\asyncio\runners.py", line 195, in run
    return runner.run(main)
           ~~~~~~~~~~^^^^^^
  File "C:\Users\himan\AppData\Local\Python\pythoncore-3.13-64\Lib\asyncio\runners.py", line 118, in run
    return self._loop.run_until_complete(task)
           ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^
  File "C:\Users\himan\AppData\Local\Python\pythoncore-3.13-64\Lib\asyncio\base_events.py", line 725, in run_until_complete
    return future.result()
           ~~~~~~~~~~~~~^^
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\client.py", line 68, in main
    response = await agent.invoke(
                     ~~~~~~~~~~~~^
                {"messages": [{"role": "user", "content": query}]}
                ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    )
    ^
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 3244, in invoke
    input_ = context.run(step.invoke, input_, config, **kwargs)
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\passthrough.py", line 530, in invoke
    return self._call_with_config(self._invoke, input, config, **kwargs)
           ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 2092, in _call_with_config
    context.run(
    ~~~~~~~~~~~^
        call_func_with_variable_args,  # type: ignore[arg-type]
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    ...<4 lines>...
        **kwargs,
        ^^^^^^^^^
    ),
    ^
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\config.py", line 430, in call_func_with_variable_args
    return func(input, **kwargs)  # type: ignore[call-arg]
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\passthrough.py", line 516, in _invoke
    **self.mapper.invoke(
      ~~~~~~~~~~~~~~~~~~^
        value,
        ^^^^^^
        patch_config(config, callbacks=run_manager.get_child()),
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
        **kwargs,
        ^^^^^^^^^
    ),
    ^
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 4001, in invoke
    output = {key: future.result() for key, future in zip(steps, futures)}
                   ~~~~~~~~~~~~~^^
  File "C:\Users\himan\AppData\Local\Python\pythoncore-3.13-64\Lib\concurrent\futures\_base.py", line 449, in result
    return self.__get_result()
           ~~~~~~~~~~~~~~~~~^^
  File "C:\Users\himan\AppData\Local\Python\pythoncore-3.13-64\Lib\concurrent\futures\_base.py", line 401, in __get_result
    raise self._exception
  File "C:\Users\himan\AppData\Local\Python\pythoncore-3.13-64\Lib\concurrent\futures\thread.py", line 59, in run
    result = self.fn(*self.args, **self.kwargs)
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 3985, in _invoke_step
    return context.run(
           ~~~~~~~~~~~^
        step.invoke,
        ^^^^^^^^^^^^
        input_,
        ^^^^^^^
        child_config,
        ^^^^^^^^^^^^^
    )
    ^
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 5025, in invoke
    return self._call_with_config(
           ~~~~~~~~~~~~~~~~~~~~~~^
        self._invoke,
        ^^^^^^^^^^^^^
    ...<2 lines>...
        **kwargs,
        ^^^^^^^^^
    )
    ^
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 2092, in _call_with_config
    context.run(
    ~~~~~~~~~~~^
        call_func_with_variable_args,  # type: ignore[arg-type]
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    ...<4 lines>...
        **kwargs,
        ^^^^^^^^^
    ),
    ^
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\config.py", line 430, in call_func_with_variable_args
    return func(input, **kwargs)  # type: ignore[call-arg]
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 4882, in _invoke
    output = call_func_with_variable_args(
        self.func, input_, config, run_manager, **kwargs
    )
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\config.py", line 430, in call_func_with_variable_args
    return func(input, **kwargs)  # type: ignore[call-arg]
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain\agents\react\agent.py", line 139, in <lambda>
    agent_scratchpad=lambda x: format_log_to_str(x["intermediate_steps"]),
                                                 ~^^^^^^^^^^^^^^^^^^^^^^
KeyError: 'intermediate_steps'
PS C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1> uv run client.py
WARNING: All log messages before absl::InitializeLog() is called are written to STDERR
E0000 00:00:1760884960.041277   25196 alts_credentials.cc:93] ALTS creds ignored. Not running on GCP and untrusted ALTS is not enabled.
You: Traceback (most recent call last):
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\client.py", line 84, in <module>
    asyncio.run(main())
    ~~~~~~~~~~~^^^^^^^^
  File "C:\Users\himan\AppData\Local\Python\pythoncore-3.13-64\Lib\asyncio\runners.py", line 195, in run
    return runner.run(main)
           ~~~~~~~~~~^^^^^^
  File "C:\Users\himan\AppData\Local\Python\pythoncore-3.13-64\Lib\asyncio\runners.py", line 118, in run
    return self._loop.run_until_complete(task)
           ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^
  File "C:\Users\himan\AppData\Local\Python\pythoncore-3.13-64\Lib\asyncio\base_events.py", line 725, in run_until_complete
    return future.result()
           ~~~~~~~~~~~~~^^
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\client.py", line 67, in main
    query = input("You: ")
EOFError
PS C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1> uv run client.py
WARNING: All log messages before absl::InitializeLog() is called are written to STDERR
E0000 00:00:1760884967.835741    5144 alts_credentials.cc:93] ALTS creds ignored. Not running on GCP and untrusted ALTS is not enabled.
You: Traceback (most recent call last):
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\client.py", line 84, in <module>
    asyncio.run(main())
    ~~~~~~~~~~~^^^^^^^^
  File "C:\Users\himan\AppData\Local\Python\pythoncore-3.13-64\Lib\asyncio\runners.py", line 195, in run
    return runner.run(main)
           ~~~~~~~~~~^^^^^^
  File "C:\Users\himan\AppData\Local\Python\pythoncore-3.13-64\Lib\asyncio\runners.py", line 118, in run
    return self._loop.run_until_complete(task)
           ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^
  File "C:\Users\himan\AppData\Local\Python\pythoncore-3.13-64\Lib\asyncio\base_events.py", line 725, in run_until_complete
    return future.result()
           ~~~~~~~~~~~~~^^
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\client.py", line 67, in main
    query = input("You: ")
EOFError
PS C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1> uv run client.py
WARNING: All log messages before absl::InitializeLog() is called are written to STDERR
E0000 00:00:1760884977.946484   24120 alts_credentials.cc:93] ALTS creds ignored. Not running on GCP and untrusted ALTS is not enabled.
You: Hi
Traceback (most recent call last):
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\client.py", line 84, in <module>
    asyncio.run(main())
    ~~~~~~~~~~~^^^^^^^^
  File "C:\Users\himan\AppData\Local\Python\pythoncore-3.13-64\Lib\asyncio\runners.py", line 195, in run
    return runner.run(main)
           ~~~~~~~~~~^^^^^^
  File "C:\Users\himan\AppData\Local\Python\pythoncore-3.13-64\Lib\asyncio\runners.py", line 118, in run
    return self._loop.run_until_complete(task)
           ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~^^^^^^
  File "C:\Users\himan\AppData\Local\Python\pythoncore-3.13-64\Lib\asyncio\base_events.py", line 725, in run_until_complete
    return future.result()
           ~~~~~~~~~~~~~^^
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\client.py", line 68, in main
    response = await agent.invoke(
                     ~~~~~~~~~~~~^
                {"messages": [{"role": "user", "content": query}]}
                ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    )
    ^
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 3244, in invoke
    input_ = context.run(step.invoke, input_, config, **kwargs)
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\passthrough.py", line 530, in invoke
    return self._call_with_config(self._invoke, input, config, **kwargs)
           ~~~~~~~~~~~~~~~~~~~~~~^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 2092, in _call_with_config
    context.run(
    ~~~~~~~~~~~^
        call_func_with_variable_args,  # type: ignore[arg-type]
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    ...<4 lines>...
        **kwargs,
        ^^^^^^^^^
    ),
    ^
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\config.py", line 430, in call_func_with_variable_args
    return func(input, **kwargs)  # type: ignore[call-arg]
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\passthrough.py", line 516, in _invoke
    **self.mapper.invoke(
      ~~~~~~~~~~~~~~~~~~^
        value,
        ^^^^^^
        patch_config(config, callbacks=run_manager.get_child()),
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
        **kwargs,
        ^^^^^^^^^
    ),
    ^
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 4001, in invoke
    output = {key: future.result() for key, future in zip(steps, futures)}
                   ~~~~~~~~~~~~~^^
  File "C:\Users\himan\AppData\Local\Python\pythoncore-3.13-64\Lib\concurrent\futures\_base.py", line 449, in result
    return self.__get_result()
           ~~~~~~~~~~~~~~~~~^^
  File "C:\Users\himan\AppData\Local\Python\pythoncore-3.13-64\Lib\concurrent\futures\_base.py", line 401, in __get_result
    raise self._exception
  File "C:\Users\himan\AppData\Local\Python\pythoncore-3.13-64\Lib\concurrent\futures\thread.py", line 59, in run
    result = self.fn(*self.args, **self.kwargs)
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 3985, in _invoke_step
    return context.run(
           ~~~~~~~~~~~^
        step.invoke,
        ^^^^^^^^^^^^
        input_,
        ^^^^^^^
        child_config,
        ^^^^^^^^^^^^^
    )
    ^
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 5025, in invoke
    return self._call_with_config(
           ~~~~~~~~~~~~~~~~~~~~~~^
        self._invoke,
        ^^^^^^^^^^^^^
    ...<2 lines>...
        **kwargs,
        ^^^^^^^^^
    )
    ^
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 2092, in _call_with_config
    context.run(
    ~~~~~~~~~~~^
        call_func_with_variable_args,  # type: ignore[arg-type]
        ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
    ...<4 lines>...
        **kwargs,
        ^^^^^^^^^
    ),
    ^
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\config.py", line 430, in call_func_with_variable_args
    return func(input, **kwargs)  # type: ignore[call-arg]
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\base.py", line 4882, in _invoke
    output = call_func_with_variable_args(
        self.func, input_, config, run_manager, **kwargs
    )
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain_core\runnables\config.py", line 430, in call_func_with_variable_args
    return func(input, **kwargs)  # type: ignore[call-arg]
  File "C:\Users\himan\Desktop\AI MCP\Github Reader\v0.1\.venv\Lib\site-packages\langchain\agents\react\agent.py", line 139, in <lambda>
    agent_scratchpad=lambda x: format_log_to_str(x["intermediate_steps"]),
                                                 ~^^^^^^^^^^^^^^^^^^^^^^
KeyError: 'intermediate_steps'

Can someone help me solve what I am doing wrong?

Hi @Himanshu

what is your client.py?

The code I shared is “client.py”

Does this mean you’re running client.py, which starts a server that runs the same file?

It runs the server.py, which has custom tools made from FastMCP. Then those tools are obtained through:

client = MultiServerMCPClient(
        {
            "GithubTools": {
                "command": "python",
                "args": ["server.py"],
                "transport": "stdio",
            }
        }
    )
    
tools = await client.get_tools()

ooo true, sorry, my eyes saw client.py there :smiley:

It is ok. Can you help me fix this?

Let me investigate

1 Like

I fixed it by shifting to create_agent. I switched due to deprecation but did not knew I had to use create_agent which is told at: What's new in v1 - Docs by LangChain

Here is the code I implemented, if someone want to see the fix:

import asyncio
from langchain_mcp_adapters.client import MultiServerMCPClient
import os
from langchain.agents import create_agent
from langchain_google_genai import ChatGoogleGenerativeAI
from dotenv import load_dotenv
from datetime import datetime
from langchain_core.prompts import PromptTemplate
from langchain_core.messages import AIMessage, HumanMessage
from langgraph.checkpoint.memory import InMemorySaver

load_dotenv()

api_key = os.getenv("GOOGLE_API_KEY")

llm = ChatGoogleGenerativeAI(
    model= "gemini-2.5-flash",
    temperature=1.0,
    max_retries=2,
    google_api_key=api_key,
)

async def main():
    client = MultiServerMCPClient(
        {
            "GithubTools": {
                "command": "python",
                "args": ["server.py"],
                "transport": "stdio",
            }
        }
    )
    
    tools = await client.get_tools()

    
    agent = create_agent(
        llm,
        tools,
        system_prompt="You are a helpful assistant. When you call tool and want to give input, you have to tell yourself because it is not visible directly",
        checkpointer=InMemorySaver()
    )

    while True:
        query = input("You: ")
        response = await agent.ainvoke(
                    {"messages": [{"role": "user", "content": query}]},
                    {"configurable": {"thread_id": "1"}},
        )
        
        current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        with open("agents.log", "a", encoding="utf-8") as file:
            file.write(f"[{current_time}] {response}\n")

        for msgs in response['messages'][-1:]:
            if msgs.__class__.__name__ == 'AIMessage':
                if type(msgs.content)==list:
                    for msg in msgs.content:
                        print(msg["text"])
                else:
                    print(msgs.content)

Thanks for effort @pawel-twardziak

1 Like