Add OpenAI Deep Research MCP server
- FastMCP server with deep_research and deep_research_info tools - OpenAI Responses API integration with background polling - Configurable model via DEEP_RESEARCH_MODEL env var - Default: o4-mini-deep-research (faster/cheaper) - Optional FastAPI backend for standalone use - Tested successfully: 80s query, 20 web searches, 4 citations
This commit is contained in:
24
pyproject.toml
Normal file
24
pyproject.toml
Normal file
@@ -0,0 +1,24 @@
|
||||
[project]
|
||||
name = "mcp-deep-research"
|
||||
version = "0.1.0"
|
||||
description = "MCP Server for OpenAI Deep Research API"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"mcp[cli]>=1.0.0",
|
||||
"openai>=1.0.0",
|
||||
"fastapi>=0.100.0",
|
||||
"uvicorn>=0.23.0",
|
||||
"httpx>=0.24.0",
|
||||
"python-dotenv>=1.0.0",
|
||||
]
|
||||
|
||||
[project.scripts]
|
||||
mcp-deep-research = "mcp_server.server:main"
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["mcp_server", "backend"]
|
||||
Reference in New Issue
Block a user