Simplify the boilerplate

Signed-off-by: William Fu-Hinthorn <13333726+hinthornw@users.noreply.github.com>
This commit is contained in:
William Fu-Hinthorn
2025-05-08 13:48:39 -07:00
parent 049ec8952e
commit 6627a2a05f
7 changed files with 52 additions and 74 deletions

1
.gitignore vendored
View File

@@ -161,3 +161,4 @@ cython_debug/
# option (not recommended) you can uncomment the following to ignore the entire idea folder. # option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/ #.idea/
uv.lock uv.lock
.langgraph_api/

View File

@@ -54,3 +54,9 @@ lint.ignore = [
"tests/*" = ["D", "UP"] "tests/*" = ["D", "UP"]
[tool.ruff.lint.pydocstyle] [tool.ruff.lint.pydocstyle]
convention = "google" convention = "google"
[dependency-groups]
dev = [
"anyio>=4.7.0",
"langgraph-cli[inmem]>=0.2.8",
]

View File

@@ -1,28 +0,0 @@
"""Define the configurable parameters for the agent."""
from __future__ import annotations
from dataclasses import dataclass, fields
from typing import Optional
from langchain_core.runnables import RunnableConfig
@dataclass(kw_only=True)
class Configuration:
"""The configuration for the agent."""
# Changeme: Add configurable values here!
# these values can be pre-set when you
# create assistants (https://langchain-ai.github.io/langgraph/cloud/how-tos/configuration_cloud/)
# and when you invoke the graph
my_configurable_param: str = "changeme"
@classmethod
def from_runnable_config(
cls, config: Optional[RunnableConfig] = None
) -> Configuration:
"""Create a Configuration instance from a RunnableConfig object."""
configurable = (config.get("configurable") or {}) if config else {}
_fields = {f.name for f in fields(cls) if f.init}
return cls(**{k: v for k, v in configurable.items() if k in _fields})

View File

@@ -1,38 +1,52 @@
"""Define a simple chatbot agent. """LangGraph single-node graph template.
This agent returns a predefined response without using an actual LLM. Returns a predefined response. Replace logic and configuration as needed.
""" """
from typing import Any, Dict from __future__ import annotations
from dataclasses import dataclass
from typing import Any, Dict, TypedDict
from langchain_core.runnables import RunnableConfig from langchain_core.runnables import RunnableConfig
from langgraph.graph import StateGraph from langgraph.graph import StateGraph
from agent.configuration import Configuration
from agent.state import State class Configuration(TypedDict):
"""Configurable parameters for the agent.
Set these when creating assistants OR when invoking the graph.
See: https://langchain-ai.github.io/langgraph/cloud/how-tos/configuration_cloud/
"""
my_configurable_param: str
@dataclass
class State:
"""Input state for the agent.
Defines the initial structure of incoming data.
See: https://langchain-ai.github.io/langgraph/concepts/low_level/#state
"""
changeme: str = "example"
async def my_node(state: State, config: RunnableConfig) -> Dict[str, Any]: async def my_node(state: State, config: RunnableConfig) -> Dict[str, Any]:
"""Each node does work.""" """Example node: processes input and returns output.
configuration = Configuration.from_runnable_config(config)
# configuration = Configuration.from_runnable_config(config) Can use runtime configuration to alter behavior.
# You can use runtime configuration to alter the behavior of your """
# graph. configuration = config["configurable"]
return { return {
"changeme": "output from my_node. " "changeme": "output from my_node. "
f"Configured with {configuration.my_configurable_param}" f'Configured with {configuration.get("my_configurable_param")}'
} }
# Define a new graph # Define the graph
workflow = StateGraph(State, config_schema=Configuration) graph = (
StateGraph(State, config_schema=Configuration)
# Add the node to the graph .add_node(my_node)
workflow.add_node("my_node", my_node) .add_edge("__start__", "my_node")
.compile(name="New Graph")
# Set the entrypoint as `call_model` )
workflow.add_edge("__start__", "my_node")
# Compile the workflow into an executable graph
graph = workflow.compile()
graph.name = "New Graph" # This defines the custom name in LangSmith

View File

@@ -1,17 +0,0 @@
"""Define the state structures for the agent."""
from __future__ import annotations
from dataclasses import dataclass
@dataclass
class State:
"""Defines the input state for the agent, representing a narrower interface to the outside world.
This class is used to define the initial state and structure of incoming data.
See: https://langchain-ai.github.io/langgraph/concepts/low_level/#state
for more information.
"""
changeme: str = "example"

View File

@@ -1,11 +1,9 @@
import pytest import pytest
from langsmith import unit
from agent import graph from agent import graph
@pytest.mark.asyncio @pytest.mark.langsmith
@unit
async def test_agent_simple_passthrough() -> None: async def test_agent_simple_passthrough() -> None:
res = await graph.ainvoke({"changeme": "some_val"}) res = await graph.ainvoke({"changeme": "some_val"})
assert res is not None assert res is not None

View File

@@ -1,5 +1,9 @@
from agent.configuration import Configuration from langgraph.pregel import Pregel
from agent.graph import graph
def test_configuration_empty() -> None: def test_placeholder() -> None:
Configuration.from_runnable_config({}) # TODO: You can add actual unit tests
# for your graph and other logic here.
assert isinstance(graph, Pregel)