Update dependencies and add comprehensive examples

This commit is contained in:
stabgan
2025-03-27 16:30:13 +05:30
parent 1fd46839ef
commit 436ac8d07f
6 changed files with 685 additions and 237 deletions

167
README.md
View File

@@ -68,7 +68,7 @@ Add one of the following configurations to your MCP settings file (e.g., `cline_
],
"env": {
"OPENROUTER_API_KEY": "your-api-key-here",
"OPENROUTER_DEFAULT_MODEL": "anthropic/claude-3.5-sonnet"
"DEFAULT_MODEL": "qwen/qwen2.5-vl-32b-instruct:free"
}
}
}
@@ -89,7 +89,7 @@ Add one of the following configurations to your MCP settings file (e.g., `cline_
],
"env": {
"OPENROUTER_API_KEY": "your-api-key-here",
"OPENROUTER_DEFAULT_MODEL": "anthropic/claude-3.5-sonnet"
"DEFAULT_MODEL": "qwen/qwen2.5-vl-32b-instruct:free"
}
}
}
@@ -108,7 +108,7 @@ Add one of the following configurations to your MCP settings file (e.g., `cline_
"--rm",
"-i",
"-e", "OPENROUTER_API_KEY=your-api-key-here",
"-e", "OPENROUTER_DEFAULT_MODEL=anthropic/claude-3.5-sonnet",
"-e", "DEFAULT_MODEL=qwen/qwen2.5-vl-32b-instruct:free",
"stabgandocker/openrouter-mcp-multimodal:latest"
]
}
@@ -129,23 +129,45 @@ Add one of the following configurations to your MCP settings file (e.g., `cline_
],
"env": {
"OPENROUTER_API_KEY": "your-api-key-here",
"OPENROUTER_DEFAULT_MODEL": "anthropic/claude-3.5-sonnet"
"DEFAULT_MODEL": "qwen/qwen2.5-vl-32b-instruct:free"
}
}
}
}
```
## Examples
For comprehensive examples of how to use this MCP server, check out the [examples directory](./examples/). We provide:
- JavaScript examples for Node.js applications
- Python examples with interactive chat capabilities
- Code snippets for integrating with various applications
Each example comes with clear documentation and step-by-step instructions.
## Dependencies
This project uses the following key dependencies:
- `@modelcontextprotocol/sdk`: ^1.8.0 - Latest MCP SDK for tool implementation
- `openai`: ^4.89.1 - OpenAI-compatible API client for OpenRouter
- `sharp`: ^0.33.5 - Fast image processing library
- `axios`: ^1.8.4 - HTTP client for API requests
- `node-fetch`: ^3.3.2 - Modern fetch implementation
Node.js 18 or later is required. All dependencies are regularly updated to ensure compatibility and security.
## Available Tools
### chat_completion
### mcp_openrouter_chat_completion
Send text or multimodal messages to OpenRouter models:
```javascript
use_mcp_tool({
server_name: "openrouter",
tool_name: "chat_completion",
tool_name: "mcp_openrouter_chat_completion",
arguments: {
model: "google/gemini-2.5-pro-exp-03-25:free", // Optional if default is set
messages: [
@@ -168,7 +190,7 @@ For multimodal messages with images:
```javascript
use_mcp_tool({
server_name: "openrouter",
tool_name: "chat_completion",
tool_name: "mcp_openrouter_chat_completion",
arguments: {
model: "anthropic/claude-3.5-sonnet",
messages: [
@@ -190,133 +212,4 @@ use_mcp_tool({
]
}
});
```
### multi_image_analysis
Analyze multiple images with a single prompt:
```javascript
use_mcp_tool({
server_name: "openrouter",
tool_name: "multi_image_analysis",
arguments: {
images: [
{ url: "https://example.com/image1.jpg" },
{ url: "file:///absolute/path/to/image2.jpg" },
{
url: "https://example.com/image3.jpg",
alt: "Optional description of image 3"
}
],
prompt: "Compare these images and tell me their similarities and differences",
markdown_response: true, // Optional, defaults to true
model: "anthropic/claude-3-opus" // Optional if default is set
}
});
```
### search_models
Search and filter available models:
```javascript
use_mcp_tool({
server_name: "openrouter",
tool_name: "search_models",
arguments: {
query: "claude", // Optional text search
provider: "anthropic", // Optional provider filter
capabilities: {
vision: true // Filter for models with vision capabilities
},
limit: 5 // Optional, defaults to 10
}
});
```
### get_model_info
Get detailed information about a specific model:
```javascript
use_mcp_tool({
server_name: "openrouter",
tool_name: "get_model_info",
arguments: {
model: "anthropic/claude-3.5-sonnet"
}
});
```
### validate_model
Check if a model ID is valid:
```javascript
use_mcp_tool({
server_name: "openrouter",
tool_name: "validate_model",
arguments: {
model: "google/gemini-2.5-pro-exp-03-25:free"
}
});
```
## Error Handling
The server provides detailed error messages for various failure cases:
- Invalid input parameters
- Network errors
- Rate limiting issues
- Invalid image formats
- Authentication problems
## Troubleshooting
### Common Issues
- **"fetch is not defined" error**: This often occurs when the Node.js environment doesn't have global fetch. Use Node.js v18+ or add the PATH environment variable to your configuration as shown below:
```json
{
"mcpServers": {
"openrouter": {
"command": "npx",
"args": [
"-y",
"@stabgan/openrouter-mcp-multimodal"
],
"env": {
"OPENROUTER_API_KEY": "your-api-key-here",
"PATH": "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
}
}
}
}
```
- **Image analysis failures**: Make sure your image path is absolute and the file format is supported.
## Development
To build from source:
```bash
git clone https://github.com/stabgan/openrouter-mcp-multimodal.git
cd openrouter-mcp-multimodal
npm install
npm run build
```
## License
MIT License
## Version 1.2.0 Updates
- Simplified image analysis by consolidating all functionality into the `multi_image_analysis` tool
- Added automatic selection of free models with the largest context window when no model is specified
- Improved handling of various image formats (file://, http://, data:)
- Enhanced error handling and logging for better troubleshooting
- Removed the `analyze_image` tool to eliminate confusion and streamline the interface
```

119
examples/README.md Normal file
View File

@@ -0,0 +1,119 @@
# OpenRouter MCP Server Examples
This directory contains example scripts demonstrating how to use the OpenRouter MCP Server for various tasks such as text chat, image analysis, and model searching.
## Prerequisites
Before running these examples, ensure you have:
1. Node.js 18 or later installed
2. OpenRouter API key (get one from [OpenRouter](https://openrouter.ai))
3. Set up the environment variable:
```
OPENROUTER_API_KEY=your_api_key_here
```
You can create a `.env` file in the root directory with this variable.
## JavaScript Example
The `index.js` file demonstrates how to use the MCP server from Node.js:
1. Starting the MCP server
2. Connecting to the server
3. Simple text chat
4. Single image analysis
5. Multiple image analysis
6. Model search
### Running the JavaScript Example
```bash
# Install dependencies if you haven't already
npm install
# Run the example
npm run examples
```
## Python Example
The `python_example.py` script demonstrates how to use the MCP server from Python:
1. Connecting to the MCP server
2. Converting MCP tool definitions to OpenAI format
3. Interactive chat loop with tool calling
### Running the Python Example
```bash
# Install required Python packages
pip install python-mcp openai python-dotenv
# Run the example
python examples/python_example.py
```
## Using the MCP Server in Your Projects
To use the OpenRouter MCP Server in your own projects:
1. Install the package:
```bash
npm install @stabgan/openrouter-mcp-multimodal
```
2. Create a client connection using the MCP client libraries:
```javascript
import { ClientSession, StdioServerParameters } from '@modelcontextprotocol/sdk/client/index.js';
import { stdio_client } from '@modelcontextprotocol/sdk/client/stdio.js';
// Configure server
const serverConfig = {
command: 'npx',
args: ['-y', '@stabgan/openrouter-mcp-multimodal'],
env: { OPENROUTER_API_KEY: 'your_api_key_here' }
};
// Create connection
const serverParams = new StdioServerParameters(
serverConfig.command,
serverConfig.args,
serverConfig.env
);
const client = await stdio_client(serverParams);
const [stdio, write] = client;
// Initialize session
const session = new ClientSession(stdio, write);
await session.initialize();
```
3. Call tools:
```javascript
// Get available tools
const response = await session.list_tools();
console.log('Available tools:', response.tools.map(tool => tool.name).join(', '));
// Call a tool
const result = await session.call_tool('mcp_openrouter_chat_completion', {
messages: [
{ role: 'user', content: 'Hello, what can you do?' }
],
model: 'deepseek/deepseek-chat-v3-0324:free'
});
console.log('Response:', result.content[0].text);
```
## Available Tools
The OpenRouter MCP Server provides the following tools:
1. `mcp_openrouter_chat_completion` - Text chat with LLMs
2. `mcp_openrouter_analyze_image` - Analyze a single image
3. `mcp_openrouter_multi_image_analysis` - Analyze multiple images
4. `search_models` - Search for available models
5. `get_model_info` - Get details about a specific model
6. `validate_model` - Check if a model ID is valid
For detailed information about each tool's parameters, see the [main README](../README.md) file.

262
examples/index.js Normal file
View File

@@ -0,0 +1,262 @@
#!/usr/bin/env node
/**
* OpenRouter MCP Server Examples
*
* This script demonstrates how to use the OpenRouter MCP Server for various tasks:
* 1. Text chat with LLMs
* 2. Single image analysis
* 3. Multiple image analysis
* 4. Model search and selection
*/
import { ClientSession, StdioServerParameters } from '@modelcontextprotocol/sdk/client/index.js';
import { stdio_client } from '@modelcontextprotocol/sdk/client/stdio.js';
import OpenAI from 'openai';
import dotenv from 'dotenv';
import path from 'path';
import { fileURLToPath } from 'url';
import fs from 'fs';
import { exec } from 'child_process';
import { promisify } from 'util';
// Get the directory name of the current module
const __dirname = path.dirname(fileURLToPath(import.meta.url));
const execPromise = promisify(exec);
// Load environment variables
dotenv.config();
const API_KEY = process.env.OPENROUTER_API_KEY;
if (!API_KEY) {
console.error('Error: OPENROUTER_API_KEY environment variable is missing');
console.error('Please set it in a .env file or in your environment');
process.exit(1);
}
// OpenAI client for direct API calls if needed
const openai = new OpenAI({
apiKey: API_KEY,
baseURL: 'https://openrouter.ai/api/v1',
defaultHeaders: {
'HTTP-Referer': 'https://github.com/stabgan/openrouter-mcp-multimodal',
'X-Title': 'OpenRouter MCP Multimodal Examples',
},
});
// Image file paths for examples
const testImage = path.join(__dirname, '..', 'test.png');
/**
* Convert an image to base64
*/
async function imageToBase64(imagePath) {
try {
const imageBuffer = fs.readFileSync(imagePath);
return imageBuffer.toString('base64');
} catch (error) {
console.error(`Error reading image ${imagePath}: ${error.message}`);
throw error;
}
}
/**
* Example 1: Start the MCP server
*/
async function startMcpServer() {
try {
// Path to the project's main script
const serverScriptPath = path.join(__dirname, '..', 'dist', 'index.js');
// Start the MCP server as a child process
console.log('Starting MCP server...');
// Command to start the server with environment variables
const command = `OPENROUTER_API_KEY=${API_KEY} node ${serverScriptPath}`;
const { stdout, stderr } = await execPromise(command);
if (stderr) {
console.error('Server start error:', stderr);
}
console.log('MCP server output:', stdout);
console.log('MCP server started successfully!');
return serverScriptPath;
} catch (error) {
console.error('Failed to start MCP server:', error.message);
throw error;
}
}
/**
* Example 2: Connect to the MCP server
*/
async function connectToMcpServer(serverPath) {
try {
// Configuration for the MCP server
const serverConfig = {
command: 'node',
args: [serverPath],
env: {
OPENROUTER_API_KEY: API_KEY,
}
};
// Connect to the server
const session = await establishMcpSession(serverConfig);
console.log('Connected to MCP server');
return session;
} catch (error) {
console.error('Failed to connect to MCP server:', error.message);
throw error;
}
}
/**
* Establish an MCP session
*/
async function establishMcpSession(serverConfig) {
// Set up server parameters
const serverParams = new StdioServerParameters(
serverConfig.command,
serverConfig.args,
serverConfig.env
);
// Create client connection
const client = await stdio_client(serverParams);
const [stdio, write] = client;
// Create and initialize session
const session = new ClientSession(stdio, write);
await session.initialize();
// List available tools
const response = await session.list_tools();
console.log('Available tools:', response.tools.map(tool => tool.name).join(', '));
return session;
}
/**
* Example 3: Simple text chat using the MCP server
*/
async function textChatExample(session) {
console.log('\n--- Text Chat Example ---');
try {
// Call the text chat tool
const result = await session.call_tool('mcp_openrouter_chat_completion', {
messages: [
{ role: 'user', content: 'What is the Model Context Protocol (MCP) and how is it useful?' }
],
model: 'deepseek/deepseek-chat-v3-0324:free'
});
console.log('Response:', result.content[0].text);
} catch (error) {
console.error('Text chat error:', error.message);
}
}
/**
* Example 4: Image analysis using the MCP server
*/
async function imageAnalysisExample(session) {
console.log('\n--- Image Analysis Example ---');
try {
// Convert image to base64
const base64Image = await imageToBase64(testImage);
// Call the image analysis tool
const result = await session.call_tool('mcp_openrouter_analyze_image', {
image_path: testImage,
question: 'What can you see in this image? Please describe it in detail.'
});
console.log('Response:', result.content[0].text);
} catch (error) {
console.error('Image analysis error:', error.message);
}
}
/**
* Example 5: Multiple image analysis using the MCP server
*/
async function multiImageAnalysisExample(session) {
console.log('\n--- Multiple Image Analysis Example ---');
try {
// Call the multi-image analysis tool
const result = await session.call_tool('mcp_openrouter_multi_image_analysis', {
images: [
{ url: testImage }
],
prompt: 'What can you see in this image? Please describe it in detail.',
markdown_response: true
});
console.log('Response:', result.content[0].text);
} catch (error) {
console.error('Multi-image analysis error:', error.message);
}
}
/**
* Example 6: Search available models
*/
async function searchModelsExample(session) {
console.log('\n--- Search Models Example ---');
try {
// Call the search models tool
const result = await session.call_tool('search_models', {
query: 'free',
capabilities: {
vision: true
},
limit: 5
});
console.log('Available free vision models:');
result.content[0].models.forEach((model, index) => {
console.log(`${index + 1}. ${model.id} - Context length: ${model.context_length}`);
});
} catch (error) {
console.error('Search models error:', error.message);
}
}
/**
* Run all examples
*/
async function runExamples() {
try {
// Start the MCP server
const serverPath = await startMcpServer();
// Connect to the MCP server
const session = await connectToMcpServer(serverPath);
// Run the text chat example
await textChatExample(session);
// Run the image analysis example
await imageAnalysisExample(session);
// Run the multi-image analysis example
await multiImageAnalysisExample(session);
// Run the search models example
await searchModelsExample(session);
console.log('\nAll examples completed successfully!');
} catch (error) {
console.error('Error running examples:', error.message);
}
}
// Run the examples
runExamples().catch(console.error);

187
examples/python_example.py Normal file
View File

@@ -0,0 +1,187 @@
#!/usr/bin/env python3
"""
OpenRouter MCP Server - Python Example
This script demonstrates how to use the OpenRouter MCP Server from Python,
for various tasks such as text chat and image analysis.
"""
import os
import sys
import json
import asyncio
import subprocess
from typing import Optional, Dict, Any, List
from contextlib import AsyncExitStack
from dotenv import load_dotenv
# Try to import MCP client libraries, show a helpful error if not available
try:
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client
except ImportError:
print("Error: MCP client libraries not found. Please install them with:")
print("pip install python-mcp")
sys.exit(1)
# Try to import OpenAI, show a helpful error if not available
try:
from openai import OpenAI
except ImportError:
print("Error: OpenAI client not found. Please install it with:")
print("pip install openai")
sys.exit(1)
# Load environment variables from .env file
load_dotenv()
# Get API key from environment, or show error
API_KEY = os.getenv("OPENROUTER_API_KEY")
if not API_KEY:
print("Error: OPENROUTER_API_KEY environment variable is missing")
print("Please create a .env file with OPENROUTER_API_KEY=your_key")
sys.exit(1)
# Default model to use
MODEL = "anthropic/claude-3-5-sonnet"
# Configuration for the MCP server
SERVER_CONFIG = {
"command": "npx",
"args": ["-y", "@stabgan/openrouter-mcp-multimodal"],
"env": {"OPENROUTER_API_KEY": API_KEY}
}
def convert_tool_format(tool):
"""Convert MCP tool definition to OpenAI tool format"""
converted_tool = {
"type": "function",
"function": {
"name": tool.name,
"description": tool.description,
"parameters": {
"type": "object",
"properties": tool.inputSchema["properties"],
"required": tool.inputSchema["required"]
}
}
}
return converted_tool
class MCPClient:
"""MCP Client for interacting with the OpenRouter MCP server"""
def __init__(self):
self.session: Optional[ClientSession] = None
self.exit_stack = AsyncExitStack()
self.openai = OpenAI(
base_url="https://openrouter.ai/api/v1",
api_key=API_KEY
)
self.messages = []
async def connect_to_server(self, server_config):
"""Connect to the MCP server"""
server_params = StdioServerParameters(**server_config)
stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params))
self.stdio, self.write = stdio_transport
self.session = await self.exit_stack.enter_async_context(ClientSession(self.stdio, self.write))
await self.session.initialize()
# List available tools from the MCP server
response = await self.session.list_tools()
print("\nConnected to server with tools:", [tool.name for tool in response.tools])
return response.tools
async def process_query(self, query: str) -> str:
"""Process a text query using the MCP server"""
self.messages.append({
"role": "user",
"content": query
})
# Get available tools from the MCP server
response = await self.session.list_tools()
available_tools = [convert_tool_format(tool) for tool in response.tools]
# Make the initial OpenRouter API call with tool definitions
response = self.openai.chat.completions.create(
model=MODEL,
tools=available_tools,
messages=self.messages
)
self.messages.append(response.choices[0].message.model_dump())
final_text = []
content = response.choices[0].message
# Process tool calls if any
if content.tool_calls is not None:
tool_name = content.tool_calls[0].function.name
tool_args = content.tool_calls[0].function.arguments
tool_args = json.loads(tool_args) if tool_args else {}
# Execute tool call
try:
result = await self.session.call_tool(tool_name, tool_args)
final_text.append(f"[Calling tool {tool_name} with args {tool_args}]")
except Exception as e:
print(f"Error calling tool {tool_name}: {e}")
result = None
# Add tool result to messages
self.messages.append({
"role": "tool",
"tool_call_id": content.tool_calls[0].id,
"name": tool_name,
"content": result.content if result else "Error executing tool call"
})
# Make a follow-up API call with the tool results
response = self.openai.chat.completions.create(
model=MODEL,
max_tokens=1000,
messages=self.messages,
)
final_text.append(response.choices[0].message.content)
else:
final_text.append(content.content)
return "\n".join(final_text)
async def chat_loop(self):
"""Run an interactive chat loop"""
print("\nMCP Client Started!")
print("Type your queries or 'quit' to exit.")
while True:
try:
query = input("\nQuery: ").strip()
if query.lower() in ['quit', 'exit']:
break
result = await self.process_query(query)
print("Result:")
print(result)
except Exception as e:
print(f"Error: {str(e)}")
async def cleanup(self):
"""Clean up resources"""
await self.exit_stack.aclose()
async def main():
"""Main entry point for the example script"""
client = MCPClient()
try:
await client.connect_to_server(SERVER_CONFIG)
await client.chat_loop()
finally:
await client.cleanup()
if __name__ == "__main__":
asyncio.run(main())

173
package-lock.json generated
View File

@@ -1,28 +1,29 @@
{
"name": "@stabgan/openrouter-mcp-multimodal",
"version": "1.0.0",
"version": "1.2.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@stabgan/openrouter-mcp-multimodal",
"version": "1.0.0",
"version": "1.2.0",
"license": "MIT",
"dependencies": {
"@modelcontextprotocol/sdk": "^1.4.1",
"axios": "^1.7.9",
"@modelcontextprotocol/sdk": "^1.8.0",
"axios": "^1.8.4",
"dotenv": "^16.4.7",
"node-fetch": "^3.3.2",
"openai": "^4.83.0",
"sharp": "^0.33.3"
"openai": "^4.89.1",
"sharp": "^0.33.5"
},
"bin": {
"openrouter-multimodal": "dist/index.js"
},
"devDependencies": {
"@types/node": "^22.13.1",
"@types/node": "^22.13.14",
"@types/sharp": "^0.32.0",
"shx": "^0.3.4",
"typescript": "^5.7.3"
"typescript": "^5.8.2"
},
"engines": {
"node": ">=18.0.0"
@@ -421,9 +422,9 @@
}
},
"node_modules/@types/node": {
"version": "22.13.13",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.13.tgz",
"integrity": "sha512-ClsL5nMwKaBRwPcCvH8E7+nU4GxHVx1axNvMZTFHMEfNI7oahimt26P5zjVCRrjiIWj6YFXfE1v3dEp94wLcGQ==",
"version": "22.13.14",
"resolved": "https://registry.npmjs.org/@types/node/-/node-22.13.14.tgz",
"integrity": "sha512-Zs/Ollc1SJ8nKUAgc7ivOEdIBM8JAKgrqqUYi2J997JuKO7/tpQC+WCetQ1sypiKCQWHdvdg9wBNpUPEWZae7w==",
"license": "MIT",
"dependencies": {
"undici-types": "~6.20.0"
@@ -512,16 +513,16 @@
"license": "MIT"
},
"node_modules/body-parser": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.1.0.tgz",
"integrity": "sha512-/hPxh61E+ll0Ujp24Ilm64cykicul1ypfwjVttduAiEdtnJFvLePSrIPk+HMImtNv5270wOGCb1Tns2rybMkoQ==",
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-2.2.0.tgz",
"integrity": "sha512-02qvAaxv8tp7fBa/mw1ga98OGm+eCbqzJOKoRt70sLmfEEi+jyBYVTDGfCL/k06/4EMk/z01gCe7HoCH/f2LTg==",
"license": "MIT",
"dependencies": {
"bytes": "^3.1.2",
"content-type": "^1.0.5",
"debug": "^4.4.0",
"http-errors": "^2.0.0",
"iconv-lite": "^0.5.2",
"iconv-lite": "^0.6.3",
"on-finished": "^2.4.1",
"qs": "^6.14.0",
"raw-body": "^3.0.0",
@@ -788,16 +789,6 @@
"node": ">= 0.8"
}
},
"node_modules/destroy": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/destroy/-/destroy-1.2.0.tgz",
"integrity": "sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==",
"license": "MIT",
"engines": {
"node": ">= 0.8",
"npm": "1.2.8000 || >= 1.4.16"
}
},
"node_modules/detect-libc": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz",
@@ -807,6 +798,18 @@
"node": ">=8"
}
},
"node_modules/dotenv": {
"version": "16.4.7",
"resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.4.7.tgz",
"integrity": "sha512-47qPchRCykZC03FhkYAhrvwU4xDBFIj1QPqaarj6mdM/hgUzfPHcpkHJOn3mJAufFeeAxAzeGsr5X0M4k6fLZQ==",
"license": "BSD-2-Clause",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://dotenvx.com"
}
},
"node_modules/dunder-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
@@ -906,21 +909,21 @@
}
},
"node_modules/eventsource": {
"version": "3.0.5",
"resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.5.tgz",
"integrity": "sha512-LT/5J605bx5SNyE+ITBDiM3FxffBiq9un7Vx0EwMDM3vg8sWKx/tO2zC+LMqZ+smAM0F2hblaDZUVZF0te2pSw==",
"version": "3.0.6",
"resolved": "https://registry.npmjs.org/eventsource/-/eventsource-3.0.6.tgz",
"integrity": "sha512-l19WpE2m9hSuyP06+FbuUUf1G+R0SFLrtQfbRb9PRr+oimOfxQhgGCbVaXg5IvZyyTThJsxh6L/srkMiCeBPDA==",
"license": "MIT",
"dependencies": {
"eventsource-parser": "^3.0.0"
"eventsource-parser": "^3.0.1"
},
"engines": {
"node": ">=18.0.0"
}
},
"node_modules/eventsource-parser": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.0.tgz",
"integrity": "sha512-T1C0XCUimhxVQzW4zFipdx0SficT651NnkR0ZSH3yQwh+mFMdLfgjABVi4YtMTtaL4s168593DaoaRLMqryavA==",
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/eventsource-parser/-/eventsource-parser-3.0.1.tgz",
"integrity": "sha512-VARTJ9CYeuQYb0pZEPbzi740OWFgpHe7AYJ2WFZVnUDUQp5Dk2yJUgF36YsZ81cOyxT0QxmXD2EQpapAouzWVA==",
"license": "MIT",
"engines": {
"node": ">=18.0.0"
@@ -1313,12 +1316,12 @@
}
},
"node_modules/iconv-lite": {
"version": "0.5.2",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.5.2.tgz",
"integrity": "sha512-kERHXvpSaB4aU3eANwidg79K8FlrN77m8G9V+0vOR3HYaRifrlwMEpT7ZBJqLSEIHnEgJTHcWK82wwLwwKwtag==",
"version": "0.6.3",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz",
"integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==",
"license": "MIT",
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3"
"safer-buffer": ">= 2.1.2 < 3.0.0"
},
"engines": {
"node": ">=0.10.0"
@@ -1444,12 +1447,12 @@
}
},
"node_modules/mime-types": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.0.tgz",
"integrity": "sha512-XqoSHeCGjVClAmoGFG3lVFqQFRIrTVw2OH3axRqAcfaw+gHWIfnASS92AV+Rl/mk0MupgZTRHQOjxY6YVnzK5w==",
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-3.0.1.tgz",
"integrity": "sha512-xRc4oEhT6eaBpU1XF7AjpOFD+xQmXNB5OVKwp4tqCuBpHLS/ZbBDrc07mYTDqVMg6PfxUjjNp85O6Cd2Z/5HWA==",
"license": "MIT",
"dependencies": {
"mime-db": "^1.53.0"
"mime-db": "^1.54.0"
},
"engines": {
"node": ">= 0.6"
@@ -1757,18 +1760,6 @@
"node": ">= 0.8"
}
},
"node_modules/raw-body/node_modules/iconv-lite": {
"version": "0.6.3",
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz",
"integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==",
"license": "MIT",
"dependencies": {
"safer-buffer": ">= 2.1.2 < 3.0.0"
},
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/rechoir": {
"version": "0.6.2",
"resolved": "https://registry.npmjs.org/rechoir/-/rechoir-0.6.2.tgz",
@@ -1803,11 +1794,13 @@
}
},
"node_modules/router": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/router/-/router-2.1.0.tgz",
"integrity": "sha512-/m/NSLxeYEgWNtyC+WtNHCF7jbGxOibVWKnn+1Psff4dJGOfoXP+MuC/f2CwSmyiHdOIzYnYFp4W6GxWfekaLA==",
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/router/-/router-2.2.0.tgz",
"integrity": "sha512-nLTrUKm2UyiL7rlhapu/Zl45FwNgkZGaCpZbIHajDYgwlJCOzLSk+cIPAnsEqV955GjILJnKbdQC1nVPz+gAYQ==",
"license": "MIT",
"dependencies": {
"debug": "^4.4.0",
"depd": "^2.0.0",
"is-promise": "^4.0.0",
"parseurl": "^1.3.3",
"path-to-regexp": "^8.0.0"
@@ -1816,6 +1809,29 @@
"node": ">= 18"
}
},
"node_modules/router/node_modules/debug": {
"version": "4.4.0",
"resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz",
"integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==",
"license": "MIT",
"dependencies": {
"ms": "^2.1.3"
},
"engines": {
"node": ">=6.0"
},
"peerDependenciesMeta": {
"supports-color": {
"optional": true
}
}
},
"node_modules/router/node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"license": "MIT"
},
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
@@ -1855,19 +1871,18 @@
}
},
"node_modules/send": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/send/-/send-1.1.0.tgz",
"integrity": "sha512-v67WcEouB5GxbTWL/4NeToqcZiAWEq90N888fczVArY8A79J0L4FD7vj5hm3eUMua5EpoQ59wa/oovY6TLvRUA==",
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/send/-/send-1.2.0.tgz",
"integrity": "sha512-uaW0WwXKpL9blXE2o0bRhoL2EGXIrZxQ2ZQ4mgcfoBxdFmQold+qWsD2jLrfZ0trjKL6vOw0j//eAwcALFjKSw==",
"license": "MIT",
"dependencies": {
"debug": "^4.3.5",
"destroy": "^1.2.0",
"encodeurl": "^2.0.0",
"escape-html": "^1.0.3",
"etag": "^1.8.1",
"fresh": "^0.5.2",
"fresh": "^2.0.0",
"http-errors": "^2.0.0",
"mime-types": "^2.1.35",
"mime-types": "^3.0.1",
"ms": "^2.1.3",
"on-finished": "^2.4.1",
"range-parser": "^1.2.1",
@@ -1877,36 +1892,6 @@
"node": ">= 18"
}
},
"node_modules/send/node_modules/fresh": {
"version": "0.5.2",
"resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
"integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/send/node_modules/mime-db": {
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/send/node_modules/mime-types": {
"version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"license": "MIT",
"dependencies": {
"mime-db": "1.52.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/send/node_modules/ms": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
@@ -2161,9 +2146,9 @@
"optional": true
},
"node_modules/type-is": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.0.tgz",
"integrity": "sha512-gd0sGezQYCbWSbkZr75mln4YBidWUN60+devscpLF5mtRDUpiaTvKpBNrdaCvel1NdR2k6vclXybU5fBd2i+nw==",
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz",
"integrity": "sha512-OZs6gsjF4vMp32qrCbiVSkrFmXtG/AZhY3t0iAMrMBiAZyV9oALtXO8hsrHbMXF9x6L3grlFuwW2oAz7cav+Gw==",
"license": "MIT",
"dependencies": {
"content-type": "^1.0.5",

View File

@@ -1,6 +1,6 @@
{
"name": "@stabgan/openrouter-mcp-multimodal",
"version": "1.2.1",
"version": "1.2.0",
"description": "MCP server for OpenRouter providing text chat and image analysis tools",
"type": "module",
"main": "dist/index.js",
@@ -16,7 +16,9 @@
"build": "tsc && shx chmod +x dist/*.js",
"prepare": "npm run build",
"start": "node dist/index.js",
"watch": "tsc --watch"
"watch": "tsc --watch",
"examples": "node examples/index.js",
"audit": "npm audit fix"
},
"keywords": [
"mcp",
@@ -41,18 +43,18 @@
"node": ">=18.0.0"
},
"dependencies": {
"@modelcontextprotocol/sdk": "^1.4.1",
"@modelcontextprotocol/sdk": "^1.8.0",
"axios": "^1.8.4",
"dotenv": "^16.4.7",
"node-fetch": "^3.3.2",
"openai": "^4.89.1",
"sharp": "^0.33.3"
"sharp": "^0.33.5"
},
"devDependencies": {
"@types/node": "^22.13.1",
"@types/node": "^22.13.14",
"@types/sharp": "^0.32.0",
"shx": "^0.3.4",
"typescript": "^5.7.3"
"typescript": "^5.8.2"
},
"overrides": {
"uri-js": "npm:uri-js-replace",