Configure and manage multiple MCP servers for complex workflows
This guide shows you how to configure and use multiple MCP servers simultaneously with mcp_use, enabling complex workflows that span different domains.
import { ChatOpenAI } from '@langchain/openai'import { MCPAgent, MCPClient, loadConfigFile } from 'mcp-use'async function main() { // Load multi-server configuration const config = await loadConfigFile('multi_server_config.json') const client = new MCPClient(config) // Create agent (all servers will be connected) const llm = new ChatOpenAI({ model: 'gpt-4' }) const agent = new MCPAgent({ llm, client }) // Agent has access to tools from all servers const result = await agent.run( 'Search for Python tutorials online, save the best ones to a file, ' + 'then create a database table to track my learning progress' ) console.log(result) await client.closeAllSessions()}main().catch(console.error)
Enable the server manager for more efficient resource usage:
Copy
Ask AI
import { ChatOpenAI } from '@langchain/openai'import { MCPAgent, MCPClient, loadConfigFile } from 'mcp-use'async function main() { const config = await loadConfigFile('multi_server_config.json') const client = new MCPClient(config) const llm = new ChatOpenAI({ model: 'gpt-4' }) // Enable server manager for dynamic server selection const agent = new MCPAgent({ llm, client, useServerManager: true, // Only connects to servers as needed maxSteps: 30 }) // The agent will automatically choose appropriate servers const result = await agent.run( 'Research the latest AI papers, summarize them in a markdown file, ' + 'and commit the file to my research repository on GitHub' ) console.log(result) await client.closeAllSessions()}main().catch(console.error)
const result = await agent.run( 'Scrape product data from example-store.com, ' + 'clean and analyze it with pandas, ' + 'then save the results as CSV and Excel files')
const result = await agent.run( 'Create a new Python function to calculate fibonacci numbers, ' + 'write unit tests for it, run the tests, ' + 'and if they pass, commit the changes to the current git branch')
You can conditionally include servers based on availability:
Copy
Ask AI
import { ChatOpenAI } from '@langchain/openai'import { MCPClient, MCPAgent } from 'mcp-use'async function createAgentWithAvailableServers() { const config: any = { mcpServers: {} } // Always include filesystem config.mcpServers.filesystem = { command: 'mcp-server-filesystem', args: ['/workspace'] } // Include GitHub server if token is available if (process.env.GITHUB_TOKEN) { config.mcpServers.github = { command: 'mcp-server-github', env: { GITHUB_TOKEN: process.env.GITHUB_TOKEN } } } // Include database server if URL is available if (process.env.DATABASE_URL) { config.mcpServers.postgres = { command: 'mcp-server-postgres', env: { DATABASE_URL: process.env.DATABASE_URL } } } const client = new MCPClient(config) return new MCPAgent({ llm: new ChatOpenAI({ model: 'gpt-4' }), client })}
The server manager provides several performance benefits:
Lazy Loading
Resource Management
Error Isolation
Copy
Ask AI
// Without server manager - all servers start immediately const agent = new MCPAgent({ llm, client, useServerManager: false }) // Result: All 5 servers start, consuming resources // With server manager - servers start only when needed const agentOptimized = new MCPAgent({ llm, client, useServerManager: true }) // Result: Only the required servers start for each task