No i am not using create react agent
This is my code:
async def pdf_fill_agent_scratch(
state: State, config: RunnableConfig
) -> Dict[str, List[AIMessage]]:
"""Call the LLM powering our "agent".
This function prepares the prompt, initializes the model, and processes the response.
Args:
state (State): The current state of the conversation.
config (RunnableConfig): Configuration for the model run.
Returns:
dict: A dictionary containing the model's response message.
"""
try:
logger.info("Entering pdf_fill_agent")
configuration = CombinedConfiguration.from_runnable_config(config)
coniguration_dict = asdict(configuration)
model_name = configuration.pdf_model
if model_name:
llm = load_chat_model(model_name)
else:
llm = ChatOpenAI(
model="gpt-4.1",
temperature=0,
)
model = llm.bind_tools(PDF_FILL_TOOLS)
system_prompt_text = PDF_FILLING_PROMPTS_OPTIONS
creator_instructions = state.get("creator_instructions", "")
updated_form_field = state.get("updated_form_field", {})
if creator_instructions:
system_prompt_text += (
"\n\n# FORM FILLING INSTRUCTIONS FROM FORM CREATOR\n"
"Follow these guidelines to help the user correctly fill out the form:\n"
f"{creator_instructions}\n"
)
# Prepare message list
messages:list = state.get("messages")
# Handle manually filled field
updated_form_field = state.get("updated_form_field")
if updated_form_field:
field_json = json.dumps(updated_form_field, ensure_ascii=False)
messages.append(
{
"role": "user",
"content": f"The following field has been filled manually: {field_json}",
}
)
# Call model
response = cast(
AIMessage,
await model.ainvoke(
[
{"role": "system", "content": system_prompt_text},
*messages,
],
config=coniguration_dict,
),
)
if state["is_last_step"] and response.tool_calls:
logger.warning(
"could not find an answer in the specified number of steps."
)
return {
"messages": [
AIMessage(
id=response.id,
content="Sorry, I could not find an answer to your question in the specified number of steps.",
)
]
}
return {"messages": [response]}
except Exception as e:
logger.exception("An error occurred in pdf_fill_agent_scratch: %s", e)
return {"error_messages": f"Error occcured isnide pdf agent node :{e}"}
def route_after_pdf_agent(state: State) -> Literal["__end__", "tools", "error"]:
"""Route to error node if error info present, else normal routing."""
if "error_messages" in state and state["error_messages"]:
return "error"
last_message = state.get("messages", [])[-1]
if not isinstance(last_message, AIMessage):
raise ValueError(
f"Expected AIMessage in output edges, but got {type(last_message).__name__}"
)
if not last_message.tool_calls:
return "__end__"
return "tools"
builder = StateGraph(State, config_schema=CombinedConfiguration)
builder.add_node("pdf_agent", pdf_fill_agent_scratch)
builder.add_node("tools", ToolNode(PDF_FILL_TOOLS))
builder.add_node("error", validator_node)
builder.add_edge(START, "pdf_agent")
builder.add_conditional_edges("pdf_agent", route_after_pdf_agent, {
"__end__": "__end__",
"tools": "tools",
"error": "error"
})
builder.add_edge("tools", "pdf_agent")
graph = builder.compile()
graph.name = "pdf_agent"
async def pdf_node(state: State, config: RunnableConfig) -> Command[Literal["__end__"]]:
logger.info("Code agent starting task")
result = await graph.ainvoke(state, config)
inner_messages = result.get("messages", [])
if "error_messages" in result:
update = {"workflow_logs": result["error_messages"]}
return Command(update=update, goto="error")
return Command(
update={"messages": inner_messages},
goto="__end__",
)