- FastMCP server with deep_research and deep_research_info tools - OpenAI Responses API integration with background polling - Configurable model via DEEP_RESEARCH_MODEL env var - Default: o4-mini-deep-research (faster/cheaper) - Optional FastAPI backend for standalone use - Tested successfully: 80s query, 20 web searches, 4 citations
25 lines
528 B
TOML
25 lines
528 B
TOML
[project]
|
|
name = "mcp-deep-research"
|
|
version = "0.1.0"
|
|
description = "MCP Server for OpenAI Deep Research API"
|
|
readme = "README.md"
|
|
requires-python = ">=3.11"
|
|
dependencies = [
|
|
"mcp[cli]>=1.0.0",
|
|
"openai>=1.0.0",
|
|
"fastapi>=0.100.0",
|
|
"uvicorn>=0.23.0",
|
|
"httpx>=0.24.0",
|
|
"python-dotenv>=1.0.0",
|
|
]
|
|
|
|
[project.scripts]
|
|
mcp-deep-research = "mcp_server.server:main"
|
|
|
|
[build-system]
|
|
requires = ["hatchling"]
|
|
build-backend = "hatchling.build"
|
|
|
|
[tool.hatch.build.targets.wheel]
|
|
packages = ["mcp_server", "backend"]
|