diff --git a/.env.example b/.env.example
index 9157422..ec66fc0 100644
--- a/.env.example
+++ b/.env.example
@@ -1,9 +1,4 @@
# To separate your traces from other application
LANGSMITH_PROJECT=new-agent
-# The following depend on your selected configuration
-
-## LLM choice:
-ANTHROPIC_API_KEY=....
-FIREWORKS_API_KEY=...
-OPENAI_API_KEY=...
+# Add API keys for connecting to LLM providers, data sources, and other integrations here
diff --git a/README.md b/README.md
index 949cf2c..18ebe0a 100644
--- a/README.md
+++ b/README.md
@@ -2,73 +2,68 @@
[](https://github.com/langchain-ai/new-langgraph-project/actions/workflows/unit-tests.yml)
[](https://github.com/langchain-ai/new-langgraph-project/actions/workflows/integration-tests.yml)
-[](https://langgraph-studio.vercel.app/templates/open?githubUrl=https://github.com/langchain-ai/new-langgraph-project)
-This template demonstrates a simple chatbot implemented using [LangGraph](https://github.com/langchain-ai/langgraph), designed for [LangGraph Studio](https://github.com/langchain-ai/langgraph-studio). The chatbot maintains persistent chat memory, allowing for coherent conversations across multiple interactions.
+This template demonstrates a simple application implemented using [LangGraph](https://github.com/langchain-ai/langgraph), designed for showing how to get started with [LangGraph Server](https://langchain-ai.github.io/langgraph/concepts/langgraph_server/#langgraph-server) and using [LangGraph Studio](https://langchain-ai.github.io/langgraph/concepts/langgraph_studio/), a visual debugging IDE.
-
+
+

+
-The core logic, defined in `src/agent/graph.py`, showcases a straightforward chatbot that responds to user queries while maintaining context from previous messages.
+The core logic defined in `src/agent/graph.py`, showcases an single-step application that responds with a fixed string and the configuration provided.
-## What it does
-
-The simple chatbot:
-
-1. Takes a user **message** as input
-2. Maintains a history of the conversation
-3. Generates a response based on the current message and conversation history
-4. Updates the conversation history with the new interaction
-
-This template provides a foundation that can be easily customized and extended to create more complex conversational agents.
+You can extend this graph to orchestrate more complex agentic workflows that can be visualized and debugged in LangGraph Studio.
## Getting Started
-Assuming you have already [installed LangGraph Studio](https://github.com/langchain-ai/langgraph-studio?tab=readme-ov-file#download), to set up:
-
-1. Create a `.env` file.
-
-```bash
-cp .env.example .env
-```
-
-2. Define required API keys in your `.env` file.
-
-
-
-3. Customize the code as needed.
-4. Open the folder in LangGraph Studio!
+1. Install dependencies, along with the [LangGraph CLI](https://langchain-ai.github.io/langgraph/concepts/langgraph_cli/), which will be used to run the server.
+
+```bash
+cd path/to/your/app
+pip install -e . "langgraph-cli[inmem]"
+```
+
+2. (Optional) Customize the code and project as needed. Create a `.env` file if you need to use secrets.
+
+```bash
+cp .env.example .env
+```
+
+If you want to enable LangSmith tracing, add your LangSmith API key to the `.env` file.
+
+```text
+# .env
+LANGSMITH_API_KEY=lsv2...
+```
+
+3. Start the LangGraph Server.
+
+```shell
+langgraph dev
+```
+
+For more information on getting started with LangGraph Server, [see here](https://langchain-ai.github.io/langgraph/tutorials/langgraph-platform/local-server/).
## How to customize
-1. **Modify the system prompt**: The default system prompt is defined in [configuration.py](./src/agent/configuration.py). You can easily update this via configuration in the studio to change the chatbot's personality or behavior.
-2. **Select a different model**: We default to Anthropic's Claude 3 Sonnet. You can select a compatible chat model using `provider/model-name` via configuration. Example: `openai/gpt-4-turbo-preview`.
-3. **Extend the graph**: The core logic of the chatbot is defined in [graph.py](./src/agent/graph.py). You can modify this file to add new nodes, edges, or change the flow of the conversation.
+1. **Define configurable parameters**: Modify the `Configuration` class in the `graph.py` file to expose the arguments you want to configure. For example, in a chatbot application you may want to define a dynamic system prompt or LLM to use. For more information on configurations in LangGraph, [see here](https://langchain-ai.github.io/langgraph/concepts/low_level/?h=configuration#configuration).
-You can also quickly extend this template by:
-
-- Adding custom tools or functions to enhance the chatbot's capabilities.
-- Implementing additional logic for handling specific types of user queries or tasks.
-- Integrating external APIs or databases to provide more dynamic responses.
+2. **Extend the graph**: The core logic of the application is defined in [graph.py](./src/agent/graph.py). You can modify this file to add new nodes, edges, or change the flow of information.
## Development
-While iterating on your graph, you can edit past state and rerun your app from previous states to debug specific nodes. Local changes will be automatically applied via hot reload. Try experimenting with:
+While iterating on your graph in LangGraph Studio, you can edit past state and rerun your app from previous states to debug specific nodes. Local changes will be automatically applied via hot reload.
-- Modifying the system prompt to give your chatbot a unique personality.
-- Adding new nodes to the graph for more complex conversation flows.
-- Implementing conditional logic to handle different types of user inputs.
+Follow-up requests extend the same thread. You can create an entirely new thread, clearing previous history, using the `+` button in the top right.
-Follow-up requests will be appended to the same thread. You can create an entirely new thread, clearing previous history, using the `+` button in the top right.
-
-For more advanced features and examples, refer to the [LangGraph documentation](https://github.com/langchain-ai/langgraph). These resources can help you adapt this template for your specific use case and build more sophisticated conversational agents.
+For more advanced features and examples, refer to the [LangGraph documentation](https://langchain-ai.github.io/langgraph/). These resources can help you adapt this template for your specific use case and build more sophisticated conversational agents.
LangGraph Studio also integrates with [LangSmith](https://smith.langchain.com/) for more in-depth tracing and collaboration with teammates, allowing you to analyze and optimize your chatbot's performance.
@@ -82,4 +77,4 @@ Configuration auto-generated by `langgraph template lock`. DO NOT EDIT MANUALLY.
}
}
}
--->
\ No newline at end of file
+-->
diff --git a/pyproject.toml b/pyproject.toml
index 34b8734..a6237d4 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -59,4 +59,7 @@ convention = "google"
dev = [
"anyio>=4.7.0",
"langgraph-cli[inmem]>=0.2.8",
+ "mypy>=1.13.0",
+ "pytest>=8.3.5",
+ "ruff>=0.8.2",
]
diff --git a/src/agent/graph.py b/src/agent/graph.py
index f4d03ac..2cecc0b 100644
--- a/src/agent/graph.py
+++ b/src/agent/graph.py
@@ -18,6 +18,7 @@ class Configuration(TypedDict):
Set these when creating assistants OR when invoking the graph.
See: https://langchain-ai.github.io/langgraph/cloud/how-tos/configuration_cloud/
"""
+
my_configurable_param: str
@@ -28,17 +29,18 @@ class State:
Defines the initial structure of incoming data.
See: https://langchain-ai.github.io/langgraph/concepts/low_level/#state
"""
+
changeme: str = "example"
-async def my_node(state: State, config: RunnableConfig) -> Dict[str, Any]:
- """Example node: processes input and returns output.
+async def call_model(state: State, config: RunnableConfig) -> Dict[str, Any]:
+ """Process input and returns output.
Can use runtime configuration to alter behavior.
"""
configuration = config["configurable"]
return {
- "changeme": "output from my_node. "
+ "changeme": "output from call_model. "
f'Configured with {configuration.get("my_configurable_param")}'
}
@@ -46,7 +48,7 @@ async def my_node(state: State, config: RunnableConfig) -> Dict[str, Any]:
# Define the graph
graph = (
StateGraph(State, config_schema=Configuration)
- .add_node(my_node)
- .add_edge("__start__", "my_node")
+ .add_node(call_model)
+ .add_edge("__start__", "call_model")
.compile(name="New Graph")
)
diff --git a/static/studio_ui.png b/static/studio_ui.png
index 981343f..9ccba2b 100644
Binary files a/static/studio_ui.png and b/static/studio_ui.png differ
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 0000000..26262e4
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1,6 @@
+import pytest
+
+
+@pytest.fixture(scope="session")
+def anyio_backend():
+ return "asyncio"
diff --git a/tests/integration_tests/test_graph.py b/tests/integration_tests/test_graph.py
index 1c52b2a..68169be 100644
--- a/tests/integration_tests/test_graph.py
+++ b/tests/integration_tests/test_graph.py
@@ -2,8 +2,11 @@ import pytest
from agent import graph
+pytestmark = pytest.mark.anyio
+
@pytest.mark.langsmith
async def test_agent_simple_passthrough() -> None:
- res = await graph.ainvoke({"changeme": "some_val"})
+ inputs = {"changeme": "some_val"}
+ res = await graph.ainvoke(inputs)
assert res is not None