Skip to content
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 9 additions & 1 deletion langchain_mcp_adapters/client.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
from contextlib import AsyncExitStack
from types import TracebackType
from typing import Literal, TypedDict, cast
from typing import Any, Literal, Optional, TypedDict, cast

from langchain_core.messages import AIMessage, HumanMessage
from langchain_core.tools import BaseTool
from mcp import ClientSession, StdioServerParameters
from mcp.client.sse import sse_client
from mcp.client.stdio import stdio_client

from langchain_mcp_adapters.prompts import load_mcp_prompt
from langchain_mcp_adapters.tools import load_mcp_tools

DEFAULT_ENCODING = "utf-8"
Expand Down Expand Up @@ -211,6 +213,12 @@ def get_tools(self) -> list[BaseTool]:
all_tools.extend(server_tools)
return all_tools

async def get_prompt(
self, server_name: str, prompt_name: str, arguments: Optional[dict[str, Any]]
) -> list[HumanMessage | AIMessage]:
session = self.sessions[server_name]
return await load_mcp_prompt(session, prompt_name, arguments)

async def __aenter__(self) -> "MultiServerMCPClient":
try:
connections = self.connections or {}
Expand Down
51 changes: 51 additions & 0 deletions langchain_mcp_adapters/prompts.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
from typing import Any, Optional, Union

from langchain_core.messages import AIMessage, HumanMessage
from mcp import ClientSession
from mcp.types import PromptMessage


class UnsupportedContentError(Exception):
"""Raised when a prompt message contains unsupported content."""

pass


class UnsupportedRoleError(Exception):
"""Raised when a prompt message contains an unsupported role."""

pass


def convert_mcp_prompt_message_to_langchain_message(
message: PromptMessage,
) -> Union[HumanMessage, AIMessage]:
"""Convert an MCP prompt message to a LangChain message.

Args:
message: MCP prompt message to convert

Returns:
a LangChain message
"""
if message.content.type == "text":
if message.role == "user":
return HumanMessage(content=message.content.text)
elif message.role == "assistant":
return AIMessage(content=message.content.text)
else:
raise UnsupportedRoleError(f"Unsupported prompt message role: {message.role}")

raise UnsupportedContentError(
f"Unsupported prompt message content type: {message.content.type}"
)


async def load_mcp_prompt(
session: ClientSession, name: str, arguments: Optional[dict[str, Any]] = None
) -> list[Union[HumanMessage, AIMessage]]:
"""Load MCP prompt and convert to LangChain messages."""
response = await session.get_prompt(name, arguments)
return [
convert_mcp_prompt_message_to_langchain_message(message) for message in response.messages
]
25 changes: 10 additions & 15 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,30 +7,26 @@ name = "langchain-mcp-adapters"
version = "0.0.3"
description = "Make Anthropic Model Context Protocol (MCP) tools compatible with LangChain and LangGraph agents."
authors = [
{name = "Vadym Barda", email = "[email protected] "}
{ name = "Vadym Barda", email = "[email protected] " },
]
readme = "README.md"
requires-python = ">=3.10"
dependencies = [
"langchain-core>=0.3.36",
"mcp>=1.2.1",
]
dependencies = ["langchain-core>=0.3.36", "mcp>=1.2.1"]

[dependency-groups]
test = [
"pytest>=8.0.0",
"ruff>=0.9.4",
"mypy>=1.8.0",
"pytest-socket>=0.7.0",
"pytest-asyncio>=0.25.0",
"types-setuptools>=69.0.0",
]

[tool.pytest.ini_options]
minversion = "8.0"
addopts = "-ra -q -v"
testpaths = [
"tests",
]
testpaths = ["tests"]
python_files = ["test_*.py"]
python_functions = ["test_*"]

Expand All @@ -40,14 +36,14 @@ target-version = "py310"

[tool.ruff.lint]
select = [
"E", # pycodestyle errors
"W", # pycodestyle warnings
"F", # pyflakes
"I", # isort
"B", # flake8-bugbear
"E", # pycodestyle errors
"W", # pycodestyle warnings
"F", # pyflakes
"I", # isort
"B", # flake8-bugbear
]
ignore = [
"E501" # line-length
"E501", # line-length
]


Expand All @@ -57,4 +53,3 @@ warn_return_any = true
warn_unused_configs = true
disallow_untyped_defs = true
check_untyped_defs = true

2 changes: 1 addition & 1 deletion tests/test_import.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
def test_import() -> None:
"""Test that the code can be imported"""
from langchain_mcp_adapters import client, tools # noqa: F401
from langchain_mcp_adapters import client, prompts, tools # noqa: F401
76 changes: 76 additions & 0 deletions tests/test_prompts.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
from unittest.mock import AsyncMock

import pytest
from langchain_core.messages import AIMessage, HumanMessage
from mcp.types import (
EmbeddedResource,
ImageContent,
PromptMessage,
TextContent,
TextResourceContents,
)

from langchain_mcp_adapters.prompts import (
UnsupportedContentError,
convert_mcp_prompt_message_to_langchain_message,
load_mcp_prompt,
)


@pytest.mark.parametrize(
"role,text,expected_cls",
[
("assistant", "Hello", AIMessage),
("user", "Hello", HumanMessage),
],
)
def test_convert_mcp_prompt_message_to_langchain_message_with_text_content(
role: str, text: str, expected_cls: type
):
message = PromptMessage(role=role, content=TextContent(type="text", text=text))
result = convert_mcp_prompt_message_to_langchain_message(message)
assert isinstance(result, expected_cls)
assert result.content == text


@pytest.mark.parametrize("role", ["assistant", "user"])
def test_convert_mcp_prompt_message_to_langchain_message_with_resource_content(role: str):
message = PromptMessage(
role=role,
content=EmbeddedResource(
type="resource",
resource=TextResourceContents(
uri="message://greeting", mimeType="text/plain", text="hi"
),
),
)
with pytest.raises(UnsupportedContentError):
convert_mcp_prompt_message_to_langchain_message(message)


@pytest.mark.parametrize("role", ["assistant", "user"])
def test_convert_mcp_prompt_message_to_langchain_message_with_image_content(role: str):
message = PromptMessage(
role=role, content=ImageContent(type="image", mimeType="image/png", data="base64data")
)
with pytest.raises(UnsupportedContentError):
convert_mcp_prompt_message_to_langchain_message(message)


@pytest.mark.asyncio
async def test_load_mcp_prompt():
session = AsyncMock()
session.get_prompt = AsyncMock(
return_value=AsyncMock(
messages=[
PromptMessage(role="user", content=TextContent(type="text", text="Hello")),
PromptMessage(role="assistant", content=TextContent(type="text", text="Hi")),
]
)
)
result = await load_mcp_prompt(session, "test_prompt")
assert len(result) == 2
assert isinstance(result[0], HumanMessage)
assert result[0].content == "Hello"
assert isinstance(result[1], AIMessage)
assert result[1].content == "Hi"
15 changes: 15 additions & 0 deletions uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.