How to build MCP server that triggers an Agent using AI SDK
1. Set up your project mkdir code-reviewer cd code-reviewer npm init -y npm pkg set type=module 2. Install dependencies npm install @modelcontextprotocol/sdk npm install ai npm install @ai-sdk/openai 3. Create index.js import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js"; import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"; import { z } from "zod"; // Contents of parallel-code-review.js is copied pasted from // https://sdk.vercel.ai/docs/foundations/agents#parallel-processing import { parallelCodeReview } from "./parallel-code-review.js"; const server = new McpServer({ name: "Code Reviewer", version: "1.0.0" }); server.tool("code_review", { code: z.string() }, async ({ code }) => { const { summary } = await parallelCodeReview(code); return { content: [{ type: "text", text: summary }], }; } ); const transport = new StdioServerTransport(); await server.connect(transport); The code in parallel-code-review.js is copied pasted from the example in https://sdk.vercel.ai/docs/foundations/agents#parallel-processing. // Source: https://sdk.vercel.ai/docs/foundations/agents#parallel-processing import { openai } from '@ai-sdk/openai'; import { generateText, generateObject } from 'ai'; import { z } from 'zod'; // Example: Parallel code review with multiple specialized reviewers export async function parallelCodeReview(code) { // ... } AI SDK defaults to looking at OPENAI_API_KEY which we will provide in next step. 4. Add to Cursor (or your MCP host of choice) { "mcpServers": { // other MCP servers... "Code Reviewer": { "command": "node", "args": [ "/absolute/path/to/your/code-reviewer/index.js" ], "env": { "OPENAI_API_KEY": "sk-proj-xxxxxxx" } } } } 5. Trigger your MCP by asking to use code reviewer

1. Set up your project
mkdir code-reviewer
cd code-reviewer
npm init -y
npm pkg set type=module
2. Install dependencies
npm install @modelcontextprotocol/sdk
npm install ai
npm install @ai-sdk/openai
3. Create index.js
import { McpServer } from "@modelcontextprotocol/sdk/server/mcp.js";
import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js";
import { z } from "zod";
// Contents of parallel-code-review.js is copied pasted from
// https://sdk.vercel.ai/docs/foundations/agents#parallel-processing
import { parallelCodeReview } from "./parallel-code-review.js";
const server = new McpServer({
name: "Code Reviewer",
version: "1.0.0"
});
server.tool("code_review",
{ code: z.string() },
async ({ code }) => {
const { summary } = await parallelCodeReview(code);
return {
content: [{ type: "text", text: summary }],
};
}
);
const transport = new StdioServerTransport();
await server.connect(transport);
The code in parallel-code-review.js
is copied pasted from the example in https://sdk.vercel.ai/docs/foundations/agents#parallel-processing.
// Source: https://sdk.vercel.ai/docs/foundations/agents#parallel-processing
import { openai } from '@ai-sdk/openai';
import { generateText, generateObject } from 'ai';
import { z } from 'zod';
// Example: Parallel code review with multiple specialized reviewers
export async function parallelCodeReview(code) {
// ...
}
AI SDK defaults to looking at OPENAI_API_KEY
which we will provide in next step.
4. Add to Cursor (or your MCP host of choice)
{
"mcpServers": {
// other MCP servers...
"Code Reviewer": {
"command": "node",
"args": [
"/absolute/path/to/your/code-reviewer/index.js"
],
"env": {
"OPENAI_API_KEY": "sk-proj-xxxxxxx"
}
}
}
}