Merge remote-tracking branch 'origin/main' into sr/template-update
This commit is contained in:
@@ -45,7 +45,7 @@ For more information on getting started with LangGraph Server, [see here](https:
|
||||
|
||||
## How to customize
|
||||
|
||||
1. **Define configurable parameters**: Modify the `Configuration` class in the `graph.py` file to expose the arguments you want to configure. For example, in a chatbot application you may want to define a dynamic system prompt or LLM to use. For more information on configurations in LangGraph, [see here](https://langchain-ai.github.io/langgraph/concepts/low_level/?h=configuration#configuration).
|
||||
1. **Define runtime context**: Modify the `Context` class in the `graph.py` file to expose the arguments you want to configure per assistant. For example, in a chatbot application you may want to define a dynamic system prompt or LLM to use. For more information on runtime context in LangGraph, [see here](https://langchain-ai.github.io/langgraph/agents/context/?h=context#static-runtime-context).
|
||||
|
||||
2. **Extend the graph**: The core logic of the application is defined in [graph.py](./src/agent/graph.py). You can modify this file to add new nodes, edges, or change the flow of information.
|
||||
|
||||
|
||||
@@ -8,12 +8,12 @@ from __future__ import annotations
|
||||
from dataclasses import dataclass
|
||||
from typing import Any, Dict, TypedDict
|
||||
|
||||
from langchain_core.runnables import RunnableConfig
|
||||
from langgraph.graph import StateGraph
|
||||
from langgraph.runtime import Runtime
|
||||
|
||||
|
||||
class Configuration(TypedDict):
|
||||
"""Configurable parameters for the agent.
|
||||
class Context(TypedDict):
|
||||
"""Context parameters for the agent.
|
||||
|
||||
Set these when creating assistants OR when invoking the graph.
|
||||
See: https://langchain-ai.github.io/langgraph/cloud/how-tos/configuration_cloud/
|
||||
@@ -33,21 +33,20 @@ class State:
|
||||
changeme: str = "example"
|
||||
|
||||
|
||||
async def call_model(state: State, config: RunnableConfig) -> Dict[str, Any]:
|
||||
async def call_model(state: State, runtime: Runtime[Context]) -> Dict[str, Any]:
|
||||
"""Process input and returns output.
|
||||
|
||||
Can use runtime configuration to alter behavior.
|
||||
Can use runtime context to alter behavior.
|
||||
"""
|
||||
configuration = config["configurable"]
|
||||
return {
|
||||
"changeme": "output from call_model. "
|
||||
f'Configured with {configuration.get("my_configurable_param")}'
|
||||
f"Configured with {runtime.context.get('my_configurable_param')}"
|
||||
}
|
||||
|
||||
|
||||
# Define the graph
|
||||
graph = (
|
||||
StateGraph(State, config_schema=Configuration)
|
||||
StateGraph(State, context_schema=Context)
|
||||
.add_node(call_model)
|
||||
.add_edge("__start__", "call_model")
|
||||
.compile(name="New Graph")
|
||||
|
||||
Reference in New Issue
Block a user