Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
84 changes: 84 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
# Git
.git
.gitignore
.gitattributes
GIT-GUIDE.md

# Python
__pycache__
*.py[cod]
*$py.class
*.so
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
.pytest_cache/
.coverage
htmlcov/
.tox/
.mypy_cache/
.dmypy.json
dmypy.json

# Virtual environments
venv/
env/
ENV/
.venv

# IDEs
.vscode/
.idea/
*.swp
*.swo
*~
.DS_Store

# Documentation
*.md
!README.md


# Tests
tests/
pytest.ini
conftest.py

# R files
R/
*.R
*.Rproj

# Scripts
scripts/

# Build tools
dodo.py

# Conda
!environment.yml

# ASCII art
*.txt

# Docker
Dockerfile*
docker-compose*.yml
.dockerignore

# Demo files (kept for runtime, excluded from context)
# demo/
13 changes: 13 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# EMBED_URL=
# EMBED_MODEL=

LLM_API_URL=
LLM_API_KEY=
LLM_MODEL=
# LLM_TIMEOUT=240
# LLM_CANDIDATE_LIMIT=10
# LLM_LOG=1
# LLM_DRY_RUN=0
# LLM_USE_RESPONSES=0

# MCP_LOG_LEVEL=INFO
35 changes: 35 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
FROM mambaorg/micromamba:2.5-alpine3.22

USER root
WORKDIR /app

RUN apk add --no-cache curl

COPY --chown=$MAMBA_USER:$MAMBA_USER environment.yml /tmp/environment.yml
RUN micromamba install -y -n base -f /tmp/environment.yml && \
micromamba clean --all --yes

COPY --chown=$MAMBA_USER:$MAMBA_USER core/ ./core/
COPY --chown=$MAMBA_USER:$MAMBA_USER mcp_server/ ./mcp_server/
COPY --chown=$MAMBA_USER:$MAMBA_USER acp_agent/ ./acp_agent/
COPY --chown=$MAMBA_USER:$MAMBA_USER docs/ ./docs/
COPY --chown=$MAMBA_USER:$MAMBA_USER pyproject.toml ./

RUN micromamba run -n base pip install --no-cache-dir -e .

RUN mkdir -p /data/phenotype_index && chown -R $MAMBA_USER:$MAMBA_USER /data

USER $MAMBA_USER

ENV PYTHONUNBUFFERED=1 \
PHENOTYPE_INDEX_DIR=/data/phenotype_index \
MCP_TRANSPORT=http \
MCP_HOST=0.0.0.0 \
MCP_PORT=8790 \
MCP_PATH=/mcp \
STUDY_AGENT_HOST=0.0.0.0 \
STUDY_AGENT_PORT=8765 \
STUDY_AGENT_HOST_GATEWAY=host.docker.internal \
STUDY_AGENT_MCP_URL=http://host.docker.internal:8790/mcp

CMD ["micromamba", "run", "-n", "base", "study-agent-mcp"]
62 changes: 62 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,68 @@ curl -s -X POST http://127.0.0.1:8765/flows/phenotype_recommendation \
-d '{"study_intent":"Identify clinical risk factors for older adult patients who experience an adverse event of acute gastro-intenstinal (GI) bleeding", "top_k":20, "max_results":10,"candidate_limit":10}'
```

### Docker quickstart

Use Docker compose to run MCP and ACP together with MCP over HTTP.

NOTE: If you plan to use phenotype services, you will need to phenotype index (see `./docs/PHENOTYPE_INDEXING.md`) with output to `data/phenotype_index/`

1. Prepare environment variables:

```bash
cp .env.example .env
```

Recommended contents of `.env`:
```
EMBED_API_KEY=<your api key>
EMBED_MODEL=<an embedding model>
EMBED_URL=http://172.17.0.1:3000/ollama/api/embed # or equivalent
LLM_API_KEY=<your api key>
LLM_API_URL=http://172.17.0.1:3000/api/chat/completions # or equivalent
LLM_MODEL=<a chat completion model>
LLM_LOG=1
LLM_USE_RESPONSES=0
LLM_TIMEOUT=180
STUDY_AGENT_ALLOW_CORE_FALLBACK=0
STUDY_AGENT_DEBUG=1
```

2. Build and start both services:

```bash
docker compose up --build -d
```

3. Check service health and tool listing:

```bash
curl -s http://127.0.0.1:8765/health | python -m json.tool
```

Expected output:
```
{
"status": "ok",
"mcp": {
"ok": true,
"mode": "http"
},
"mcp_index": {
"skipped": true
}
}
```

This should show a number of services with an empty warnings list
```bash
curl -s http://127.0.0.1:8765/services | python -m json.tool
```

Notes:
- ACP is exposed on port 8765 and MCP on port 8790.
- The phenotype index is mounted from `./data/phenotype_index` into MCP at `/data/phenotype_index`.

## Planned Services

Below is a set of planned study agent services, organized by category. For each service, document the input, output, and validation approach.
Expand Down
3 changes: 3 additions & 0 deletions acp_agent/study_agent_acp/llm_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
import urllib.request
from typing import Any, Dict, Optional

from study_agent_core.net import rewrite_container_host_url

def build_prompt(
overview: str,
Expand Down Expand Up @@ -219,6 +220,8 @@ def _extract_json_object(text: str) -> Optional[Dict[str, Any]]:

def call_llm(prompt: str) -> Optional[Dict[str, Any]]:
api_url = os.getenv("LLM_API_URL", "http://localhost:3000/api/chat/completions")
api_url = rewrite_container_host_url(api_url)

api_key = os.getenv("LLM_API_KEY")
model = os.getenv("LLM_MODEL", "agentstudyassistant")
timeout = int(os.getenv("LLM_TIMEOUT", "180"))
Expand Down
54 changes: 43 additions & 11 deletions acp_agent/study_agent_acp/mcp_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -135,6 +135,8 @@ def _ensure_session(self) -> None:
with self._lock:
if self._session is not None:
return
self._session = None
self._exit_stack = None
self._portal_cm = start_blocking_portal()
self._portal = self._portal_cm.__enter__()
assert self._portal is not None
Expand All @@ -155,6 +157,24 @@ async def _async_init(self) -> None:
self._session = session

def close(self) -> None:
portal = self._portal
try:
if portal is not None:
try:
portal.call(self._async_close)
except Exception:
pass
finally:
if self._portal_cm is not None:
try:
self._portal_cm.__exit__(None, None, None)
except Exception:
pass
self._portal_cm = None
self._portal = None
self._session = None
self._exit_stack = None

if self._portal is None:
return
try:
Expand Down Expand Up @@ -214,13 +234,15 @@ def __init__(self, config: HttpMCPClientConfig) -> None:

def list_tools(self) -> List[Dict[str, Any]]:
self._ensure_session()
assert self._portal is not None
return self._portal.call(self._list_tools)
with self._lock:
assert self._portal is not None
return self._portal.call(self._list_tools)

def call_tool(self, name: str, arguments: Dict[str, Any]) -> Dict[str, Any]:
self._ensure_session()
assert self._portal is not None
return self._portal.call(self._call_tool, name, arguments)
with self._lock:
assert self._portal is not None
return self._portal.call(self._call_tool, name, arguments)

def health_check(self) -> Dict[str, Any]:
if self._host and self._port:
Expand All @@ -231,8 +253,9 @@ def health_check(self) -> Dict[str, Any]:
return {"ok": False, "error": str(exc)}
try:
self._ensure_session()
assert self._portal is not None
return self._portal.call(self._ping)
with self._lock:
assert self._portal is not None
return self._portal.call(self._ping)
except Exception as exc:
return {"ok": False, "error": str(exc)}

Expand All @@ -259,6 +282,8 @@ def _ensure_session(self) -> None:
with self._lock:
if self._session is not None:
return
self._session = None
self._exit_stack = None
self._portal_cm = start_blocking_portal()
self._portal = self._portal_cm.__enter__()
assert self._portal is not None
Expand All @@ -279,17 +304,24 @@ async def _async_init(self) -> None:
await self._exit_stack.enter_async_context(session)
await session.initialize()
self._session = session

def close(self) -> None:
if self._portal is None:
return
portal = self._portal
try:
self._portal.call(self._async_close)
if portal is not None:
try:
portal.call(self._async_close)
except Exception:
pass
finally:
if self._portal_cm is not None:
self._portal_cm.__exit__(None, None, None)
try:
self._portal_cm.__exit__(None, None, None)
except Exception:
pass
self._portal_cm = None
self._portal = None
self._session = None
self._exit_stack = None

async def _async_close(self) -> None:
if self._exit_stack is not None:
Expand Down
Loading
Loading