Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
144 changes: 144 additions & 0 deletions .github/workflows/stress-test-mcp-server.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,144 @@
name: MCP Server Stress Test

on:
pull_request:
paths:
- 'kai_mcp_solution_server/**'
- '.github/workflows/stress-test-mcp-server.yml'
push:
branches:
- main
paths:
- 'kai_mcp_solution_server/**'
- '.github/workflows/stress-test-mcp-server.yml'
workflow_dispatch:
inputs:
num_clients:
description: 'Number of concurrent clients to test'
required: false
default: '100'

jobs:
stress-test-postgres:
name: Stress Test with PostgreSQL
runs-on: ubuntu-latest

services:
postgres:
image: postgres:16
env:
POSTGRES_USER: kai_user
POSTGRES_PASSWORD: kai_password
POSTGRES_DB: kai_test_db
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432

defaults:
run:
shell: bash
working-directory: ./kai_mcp_solution_server

steps:
- uses: actions/checkout@v4

- name: Set up Python 3.12
uses: actions/setup-python@v5
with:
python-version: "3.12"

- name: Install the latest version of uv
uses: astral-sh/setup-uv@v6
with:
version: "latest"

- name: Install dependencies
run: |
uv sync
uv pip install pytest-asyncio psycopg2-binary asyncpg

- name: Run stress test with PostgreSQL backend
env:
KAI_DB_DSN: "postgresql+asyncpg://kai_user:kai_password@localhost:5432/kai_test_db"
KAI_LLM_PARAMS: '{"model": "fake", "responses": ["Test response"]}'
MCP_SERVER_URL: "http://localhost:8000"
NUM_CONCURRENT_CLIENTS: ${{ github.event.inputs.num_clients || '100' }}
run: |
echo "Starting MCP server connected to PostgreSQL..."
uv run python -m kai_mcp_solution_server --transport streamable-http --host 0.0.0.0 --port 8000 &
SERVER_PID=$!

# Wait for server to be ready
echo "Waiting for server to start..."
for i in {1..30}; do
if curl -s http://localhost:8000/ > /dev/null 2>&1; then
echo "Server is ready!"
break
fi
if [ $i -eq 30 ]; then
echo "Server failed to start in 30 seconds"
kill $SERVER_PID || true
exit 1
fi
echo -n "."
sleep 1
done

# Run the stress test
echo ""
echo "Testing with $NUM_CONCURRENT_CLIENTS concurrent clients against PostgreSQL"
uv run python -m pytest tests/test_multiple_integration.py::TestMultipleIntegration::test_multiple_users -xvs
TEST_RESULT=$?

# Stop the server
echo "Stopping MCP server..."
kill $SERVER_PID || true

exit $TEST_RESULT
timeout-minutes: 10

- name: Check PostgreSQL connection count
if: always()
run: |
PGPASSWORD=kai_password psql -h localhost -U kai_user -d kai_test_db -c \
"SELECT count(*), state FROM pg_stat_activity GROUP BY state;"

Comment on lines +104 to +109
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

🛠️ Refactor suggestion

Install psql before querying pg_stat_activity

Ubuntu runners may lack psql; this step can fail the job.

Add this step before “Check PostgreSQL connection count”:

      - name: Install PostgreSQL client
        run: |
          sudo apt-get update
          sudo apt-get install -y postgresql-client
🤖 Prompt for AI Agents
In .github/workflows/stress-test-mcp-server.yml around lines 104 to 109, the
workflow runs psql directly which can fail on Ubuntu runners that don't have the
PostgreSQL client installed; add a step immediately before the "Check PostgreSQL
connection count" step that runs "sudo apt-get update" and then "sudo apt-get
install -y postgresql-client" to ensure the psql binary is available for the
subsequent query.

stress-test-sqlite:
name: Basic Test with SQLite
runs-on: ubuntu-latest

defaults:
run:
shell: bash
working-directory: ./kai_mcp_solution_server

steps:
- uses: actions/checkout@v4

- name: Set up Python 3.12
uses: actions/setup-python@v5
with:
python-version: "3.12"

- name: Install the latest version of uv
uses: astral-sh/setup-uv@v6
with:
version: "latest"

- name: Install dependencies
run: |
uv sync
uv pip install pytest-asyncio

- name: Run basic test with SQLite (limited concurrency)
run: |
# SQLite has limitations with concurrent writes, so we test with fewer clients
export KAI_LLM_PARAMS='{"model": "fake", "responses": ["Test response"]}'
export NUM_CONCURRENT_CLIENTS=5
echo "Testing with $NUM_CONCURRENT_CLIENTS concurrent clients against SQLite"
uv run python -m pytest tests/test_multiple_integration.py::TestMultipleIntegration::test_multiple_users -xvs
timeout-minutes: 5
5 changes: 5 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -206,3 +206,8 @@ lerna-debug.log*

# Package lock files (optionally ignore)
# package-lock.json

# Test database files
tests/test_*.db
kai_mcp_solution_server/tests/test_*.db
*.db
78 changes: 69 additions & 9 deletions kai_mcp_solution_server/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ run-local:
cd $(PROJECT_ROOT) && KAI_DB_DSN='$(KAI_DB_DSN)' KAI_LLM_PARAMS='$(KAI_LLM_PARAMS)' uv run python -m kai_mcp_solution_server --transport streamable-http --host 0.0.0.0 --port 8000 --mount-path=$(MOUNT_PATH)


# Run with Podman for testing
# Run with Podman for testing (flexible - any database via KAI_DB_DSN)
.PHONY: run-podman
run-podman: build
@echo "Running MCP solution server in Podman..."
Expand All @@ -96,7 +96,60 @@ run-podman: build
-e KAI_LLM_PARAMS='$(KAI_LLM_PARAMS)' \
-e KAI_DB_DSN='$(KAI_DB_DSN)' \
$(if $(PODMAN_ARGS),$(PODMAN_ARGS),) \
--name kai-mcp-solution-server $(IMAGE)
--name kai-mcp-solution-server $(IMAGE)

# Convenience target: Run with SQLite
.PHONY: podman-sqlite
podman-sqlite: build
@echo "Running MCP solution server with SQLite..."
@if [ -z "$(KAI_LLM_PARAMS)" ]; then echo "Error: KAI_LLM_PARAMS is required"; exit 1; fi
podman run --rm -it -p 8000:8000 \
-e MOUNT_PATH='$(MOUNT_PATH)' \
-e KAI_LLM_PARAMS='$(KAI_LLM_PARAMS)' \
-e KAI_DB_DSN='sqlite+aiosqlite:///data/kai.db' \
$(if $(OPENAI_API_KEY),-e OPENAI_API_KEY='$(OPENAI_API_KEY)',) \
$(if $(ANTHROPIC_API_KEY),-e ANTHROPIC_API_KEY='$(ANTHROPIC_API_KEY)',) \
$(if $(AZURE_OPENAI_API_KEY),-e AZURE_OPENAI_API_KEY='$(AZURE_OPENAI_API_KEY)',) \
$(if $(AZURE_OPENAI_ENDPOINT),-e AZURE_OPENAI_ENDPOINT='$(AZURE_OPENAI_ENDPOINT)',) \
$(if $(GOOGLE_API_KEY),-e GOOGLE_API_KEY='$(GOOGLE_API_KEY)',) \
$(if $(AWS_ACCESS_KEY_ID),-e AWS_ACCESS_KEY_ID='$(AWS_ACCESS_KEY_ID)',) \
$(if $(AWS_SECRET_ACCESS_KEY),-e AWS_SECRET_ACCESS_KEY='$(AWS_SECRET_ACCESS_KEY)',) \
$(if $(AWS_REGION),-e AWS_REGION='$(AWS_REGION)',) \
$(if $(OLLAMA_HOST),-e OLLAMA_HOST='$(OLLAMA_HOST)',) \
-v kai-sqlite-data:/data:Z \
--name kai-mcp-sqlite $(IMAGE)

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue

Avoid leaking API keys via process args; pass-through host env instead of inlining values

Using -e KEY='value' can expose secrets via ps/history. Prefer -e KEY to pass through from host.

Apply this diff:

-		$(if $(OPENAI_API_KEY),-e OPENAI_API_KEY='$(OPENAI_API_KEY)',) \
-		$(if $(ANTHROPIC_API_KEY),-e ANTHROPIC_API_KEY='$(ANTHROPIC_API_KEY)',) \
-		$(if $(AZURE_OPENAI_API_KEY),-e AZURE_OPENAI_API_KEY='$(AZURE_OPENAI_API_KEY)',) \
-		$(if $(AZURE_OPENAI_ENDPOINT),-e AZURE_OPENAI_ENDPOINT='$(AZURE_OPENAI_ENDPOINT)',) \
-		$(if $(GOOGLE_API_KEY),-e GOOGLE_API_KEY='$(GOOGLE_API_KEY)',) \
-		$(if $(AWS_ACCESS_KEY_ID),-e AWS_ACCESS_KEY_ID='$(AWS_ACCESS_KEY_ID)',) \
-		$(if $(AWS_SECRET_ACCESS_KEY),-e AWS_SECRET_ACCESS_KEY='$(AWS_SECRET_ACCESS_KEY)',) \
-		$(if $(AWS_REGION),-e AWS_REGION='$(AWS_REGION)',) \
-		$(if $(OLLAMA_HOST),-e OLLAMA_HOST='$(OLLAMA_HOST)',) \
+		$(if $(OPENAI_API_KEY),-e OPENAI_API_KEY,) \
+		$(if $(ANTHROPIC_API_KEY),-e ANTHROPIC_API_KEY,) \
+		$(if $(AZURE_OPENAI_API_KEY),-e AZURE_OPENAI_API_KEY,) \
+		$(if $(AZURE_OPENAI_ENDPOINT),-e AZURE_OPENAI_ENDPOINT,) \
+		$(if $(GOOGLE_API_KEY),-e GOOGLE_API_KEY,) \
+		$(if $(AWS_ACCESS_KEY_ID),-e AWS_ACCESS_KEY_ID,) \
+		$(if $(AWS_SECRET_ACCESS_KEY),-e AWS_SECRET_ACCESS_KEY,) \
+		$(if $(AWS_REGION),-e AWS_REGION,) \
+		$(if $(OLLAMA_HOST),-e OLLAMA_HOST,) \
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
# Convenience target: Run with SQLite
.PHONY: podman-sqlite
podman-sqlite: build
@echo "Running MCP solution server with SQLite..."
@if [ -z "$(KAI_LLM_PARAMS)" ]; then echo "Error: KAI_LLM_PARAMS is required"; exit 1; fi
podman run --rm -it -p 8000:8000 \
-e MOUNT_PATH='$(MOUNT_PATH)' \
-e KAI_LLM_PARAMS='$(KAI_LLM_PARAMS)' \
-e KAI_DB_DSN='sqlite+aiosqlite:///data/kai.db' \
$(if $(OPENAI_API_KEY),-e OPENAI_API_KEY='$(OPENAI_API_KEY)',) \
$(if $(ANTHROPIC_API_KEY),-e ANTHROPIC_API_KEY='$(ANTHROPIC_API_KEY)',) \
$(if $(AZURE_OPENAI_API_KEY),-e AZURE_OPENAI_API_KEY='$(AZURE_OPENAI_API_KEY)',) \
$(if $(AZURE_OPENAI_ENDPOINT),-e AZURE_OPENAI_ENDPOINT='$(AZURE_OPENAI_ENDPOINT)',) \
$(if $(GOOGLE_API_KEY),-e GOOGLE_API_KEY='$(GOOGLE_API_KEY)',) \
$(if $(AWS_ACCESS_KEY_ID),-e AWS_ACCESS_KEY_ID='$(AWS_ACCESS_KEY_ID)',) \
$(if $(AWS_SECRET_ACCESS_KEY),-e AWS_SECRET_ACCESS_KEY='$(AWS_SECRET_ACCESS_KEY)',) \
$(if $(AWS_REGION),-e AWS_REGION='$(AWS_REGION)',) \
$(if $(OLLAMA_HOST),-e OLLAMA_HOST='$(OLLAMA_HOST)',) \
-v kai-sqlite-data:/data:Z \
--name kai-mcp-sqlite $(IMAGE)
# Convenience target: Run with SQLite
.PHONY: podman-sqlite
podman-sqlite: build
@echo "Running MCP solution server with SQLite..."
@if [ -z "$(KAI_LLM_PARAMS)" ]; then echo "Error: KAI_LLM_PARAMS is required"; exit 1; fi
podman run --rm -it -p 8000:8000 \
-e MOUNT_PATH='$(MOUNT_PATH)' \
-e KAI_LLM_PARAMS='$(KAI_LLM_PARAMS)' \
-e KAI_DB_DSN='sqlite+aiosqlite:///data/kai.db' \
$(if $(OPENAI_API_KEY),-e OPENAI_API_KEY,) \
$(if $(ANTHROPIC_API_KEY),-e ANTHROPIC_API_KEY,) \
$(if $(AZURE_OPENAI_API_KEY),-e AZURE_OPENAI_API_KEY,) \
$(if $(AZURE_OPENAI_ENDPOINT),-e AZURE_OPENAI_ENDPOINT,) \
$(if $(GOOGLE_API_KEY),-e GOOGLE_API_KEY,) \
$(if $(AWS_ACCESS_KEY_ID),-e AWS_ACCESS_KEY_ID,) \
$(if $(AWS_SECRET_ACCESS_KEY),-e AWS_SECRET_ACCESS_KEY,) \
$(if $(AWS_REGION),-e AWS_REGION,) \
$(if $(OLLAMA_HOST),-e OLLAMA_HOST,) \
-v kai-sqlite-data:/data:Z \
--name kai-mcp-sqlite $(IMAGE)

# Convenience target: Run with PostgreSQL via podman-compose
.PHONY: podman-postgres
podman-postgres: build
@echo "Starting MCP solution server with PostgreSQL using podman-compose..."
@if [ -z "$(KAI_LLM_PARAMS)" ]; then echo "Error: KAI_LLM_PARAMS is required"; exit 1; fi
IMAGE=$(IMAGE) KAI_LLM_PARAMS='$(KAI_LLM_PARAMS)' MOUNT_PATH='$(MOUNT_PATH)' \
podman-compose up --force-recreate


# Run stress test - self-contained mode (starts its own server)
# This mode starts a test server with SQLite and runs clients against it
.PHONY: test-stress
test-stress:
@echo "Running self-contained stress test with $${NUM_CONCURRENT_CLIENTS:-30} concurrent clients..."
@echo "Database: $${KAI_DB_DSN:-sqlite+aiosqlite:///test.db}"
@echo "The test will start its own server on port 8087"
NUM_CONCURRENT_CLIENTS=$${NUM_CONCURRENT_CLIENTS:-30} \
KAI_DB_DSN=$${KAI_DB_DSN:-sqlite+aiosqlite:///test.db} \
uv run python -m pytest tests/test_multiple_integration.py::TestMultipleIntegration::test_multiple_users -xvs

# Run stress test against external server (e.g., one started with run-local or podman-postgres)
# First start a server: make run-local or make podman-postgres
# Then run: make test-stress-external
.PHONY: test-stress-external
test-stress-external:
@echo "Running stress test against external server at $${MCP_SERVER_URL:-http://localhost:8000}"
@echo "Testing with $${NUM_CONCURRENT_CLIENTS:-30} concurrent clients..."
@echo "Make sure the server is already running!"
MCP_SERVER_URL=$${MCP_SERVER_URL:-http://localhost:8000} \
NUM_CONCURRENT_CLIENTS=$${NUM_CONCURRENT_CLIENTS:-30} \
uv run python -m pytest tests/test_multiple_integration.py::TestMultipleIntegration::test_multiple_users -xvs

# Test against HTTP server
.PHONY: test-http
Expand Down Expand Up @@ -124,12 +177,6 @@ test-stdio-ts:
@echo "Running TypeScript MCP test client using stdio transport..."
cd $(PROJECT_ROOT)/ts-mcp-client && npm run build && node --es-module-specifier-resolution=node dist/client.js --transport stdio --server-path $(PROJECT_ROOT)

# Run pytest integration tests
.PHONY: pytest
pytest:
@echo "Running MCP integration tests with pytest..."
cd $(PROJECT_ROOT) && python -m pytest $(TESTS_DIR)/test_integration.py -v

# Run with test client in separate pod
.PHONY: run-with-tests
run-with-tests: build
Expand Down Expand Up @@ -178,7 +225,11 @@ help:
@echo " clean : Remove local container images"
@echo " port-forward : Forward port to local machine for testing"
@echo " run-local : Run server locally for testing"
@echo " run-podman : Run server in Podman container for testing"
@echo " run-podman : Run server in Podman (uses KAI_DB_DSN)"
@echo " podman-sqlite : Run server with SQLite in Podman"
@echo " podman-postgres: Run server with PostgreSQL via podman-compose"
@echo " test-stress : Run self-contained stress test (starts own server)"
@echo " test-stress-external: Run stress test against external server"
@echo " test-http : Test server using HTTP transport (Python client)"
@echo " test-http-ts : Test server using HTTP transport (TypeScript client)"
@echo " test-stdio : Test server using STDIO transport (Python client)"
Expand All @@ -193,6 +244,10 @@ help:
@echo " HOST : Server hostname for HTTP tests (default: localhost)"
@echo " PORT : Server port for HTTP tests (default: 8000)"
@echo " BEARER_TOKEN : Bearer token for HTTP authentication (optional)"
@echo " KAI_DB_DSN : Database connection string for server"
@echo " KAI_LLM_PARAMS : LLM configuration as JSON (required)"
@echo " NUM_CONCURRENT_CLIENTS : Number of clients for stress testing (default: 30)"
@echo " MCP_SERVER_URL : URL of external server for test-stress-external (default: http://localhost:8000)"
@echo " EXTRA_VARS : Any additional variables to pass to Ansible"
@echo ""
@echo "Example usage:"
Expand All @@ -204,6 +259,11 @@ help:
@echo " make deploy EXTRA_VARS='route_tls_enabled=true route_tls_termination=edge route_tls_insecure_policy=Allow'"
@echo " make run-local"
@echo " make test-stdio"
@echo " KAI_LLM_PARAMS='{\"model\":\"gpt-4\"}' make podman-postgres"
@echo " NUM_CONCURRENT_CLIENTS=100 make test-stress # Self-contained test"
@echo " # Test against running server:"
@echo " make run-local # In one terminal"
@echo " MCP_SERVER_URL=http://localhost:8000 NUM_CONCURRENT_CLIENTS=100 make test-stress-external # In another"
@echo " make test-http"
@echo " make test-http BEARER_TOKEN='your-jwt-token-here'"
@echo " make test-http HOST=api.example.com PORT=443 BEARER_TOKEN='token'"
Loading
Loading