Simplify the boilerplate
Signed-off-by: William Fu-Hinthorn <13333726+hinthornw@users.noreply.github.com>
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -161,3 +161,4 @@ cython_debug/
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
uv.lock
|
||||
.langgraph_api/
|
||||
|
||||
@@ -54,3 +54,9 @@ lint.ignore = [
|
||||
"tests/*" = ["D", "UP"]
|
||||
[tool.ruff.lint.pydocstyle]
|
||||
convention = "google"
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"anyio>=4.7.0",
|
||||
"langgraph-cli[inmem]>=0.2.8",
|
||||
]
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
"""Define the configurable parameters for the agent."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass, fields
|
||||
from typing import Optional
|
||||
|
||||
from langchain_core.runnables import RunnableConfig
|
||||
|
||||
|
||||
@dataclass(kw_only=True)
|
||||
class Configuration:
|
||||
"""The configuration for the agent."""
|
||||
|
||||
# Changeme: Add configurable values here!
|
||||
# these values can be pre-set when you
|
||||
# create assistants (https://langchain-ai.github.io/langgraph/cloud/how-tos/configuration_cloud/)
|
||||
# and when you invoke the graph
|
||||
my_configurable_param: str = "changeme"
|
||||
|
||||
@classmethod
|
||||
def from_runnable_config(
|
||||
cls, config: Optional[RunnableConfig] = None
|
||||
) -> Configuration:
|
||||
"""Create a Configuration instance from a RunnableConfig object."""
|
||||
configurable = (config.get("configurable") or {}) if config else {}
|
||||
_fields = {f.name for f in fields(cls) if f.init}
|
||||
return cls(**{k: v for k, v in configurable.items() if k in _fields})
|
||||
@@ -1,38 +1,52 @@
|
||||
"""Define a simple chatbot agent.
|
||||
"""LangGraph single-node graph template.
|
||||
|
||||
This agent returns a predefined response without using an actual LLM.
|
||||
Returns a predefined response. Replace logic and configuration as needed.
|
||||
"""
|
||||
|
||||
from typing import Any, Dict
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, TypedDict
|
||||
|
||||
from langchain_core.runnables import RunnableConfig
|
||||
from langgraph.graph import StateGraph
|
||||
|
||||
from agent.configuration import Configuration
|
||||
from agent.state import State
|
||||
|
||||
class Configuration(TypedDict):
|
||||
"""Configurable parameters for the agent.
|
||||
|
||||
Set these when creating assistants OR when invoking the graph.
|
||||
See: https://langchain-ai.github.io/langgraph/cloud/how-tos/configuration_cloud/
|
||||
"""
|
||||
my_configurable_param: str
|
||||
|
||||
|
||||
@dataclass
|
||||
class State:
|
||||
"""Input state for the agent.
|
||||
|
||||
Defines the initial structure of incoming data.
|
||||
See: https://langchain-ai.github.io/langgraph/concepts/low_level/#state
|
||||
"""
|
||||
changeme: str = "example"
|
||||
|
||||
|
||||
async def my_node(state: State, config: RunnableConfig) -> Dict[str, Any]:
|
||||
"""Each node does work."""
|
||||
configuration = Configuration.from_runnable_config(config)
|
||||
# configuration = Configuration.from_runnable_config(config)
|
||||
# You can use runtime configuration to alter the behavior of your
|
||||
# graph.
|
||||
"""Example node: processes input and returns output.
|
||||
|
||||
Can use runtime configuration to alter behavior.
|
||||
"""
|
||||
configuration = config["configurable"]
|
||||
return {
|
||||
"changeme": "output from my_node. "
|
||||
f"Configured with {configuration.my_configurable_param}"
|
||||
f'Configured with {configuration.get("my_configurable_param")}'
|
||||
}
|
||||
|
||||
|
||||
# Define a new graph
|
||||
workflow = StateGraph(State, config_schema=Configuration)
|
||||
|
||||
# Add the node to the graph
|
||||
workflow.add_node("my_node", my_node)
|
||||
|
||||
# Set the entrypoint as `call_model`
|
||||
workflow.add_edge("__start__", "my_node")
|
||||
|
||||
# Compile the workflow into an executable graph
|
||||
graph = workflow.compile()
|
||||
graph.name = "New Graph" # This defines the custom name in LangSmith
|
||||
# Define the graph
|
||||
graph = (
|
||||
StateGraph(State, config_schema=Configuration)
|
||||
.add_node(my_node)
|
||||
.add_edge("__start__", "my_node")
|
||||
.compile(name="New Graph")
|
||||
)
|
||||
|
||||
@@ -1,17 +0,0 @@
|
||||
"""Define the state structures for the agent."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
|
||||
|
||||
@dataclass
|
||||
class State:
|
||||
"""Defines the input state for the agent, representing a narrower interface to the outside world.
|
||||
|
||||
This class is used to define the initial state and structure of incoming data.
|
||||
See: https://langchain-ai.github.io/langgraph/concepts/low_level/#state
|
||||
for more information.
|
||||
"""
|
||||
|
||||
changeme: str = "example"
|
||||
@@ -1,11 +1,9 @@
|
||||
import pytest
|
||||
from langsmith import unit
|
||||
|
||||
from agent import graph
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@unit
|
||||
@pytest.mark.langsmith
|
||||
async def test_agent_simple_passthrough() -> None:
|
||||
res = await graph.ainvoke({"changeme": "some_val"})
|
||||
assert res is not None
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
from agent.configuration import Configuration
|
||||
from langgraph.pregel import Pregel
|
||||
|
||||
from agent.graph import graph
|
||||
|
||||
|
||||
def test_configuration_empty() -> None:
|
||||
Configuration.from_runnable_config({})
|
||||
def test_placeholder() -> None:
|
||||
# TODO: You can add actual unit tests
|
||||
# for your graph and other logic here.
|
||||
assert isinstance(graph, Pregel)
|
||||
|
||||
Reference in New Issue
Block a user