MCP 注解示例
本页提供了在 Spring AI 应用中使用 MCP 注解的完整示例。
本页提供了在 Spring AI 应用中使用 MCP 注解的完整示例。
完整应用示例
简单计算器服务器
一个完整的 MCP 服务器示例——提供计算器工具:
@SpringBootApplication
public class CalculatorServerApplication {
public static void main(String[] args) {
SpringApplication.run(CalculatorServerApplication.class, args);
}
}
@Component
public class CalculatorTools {
@McpTool(name = "add", description = "两个数字相加")
public double add(
@McpToolParam(description = "第一个数字", required = true) double a,
@McpToolParam(description = "第二个数字", required = true) double b) {
return a + b;
}
@McpTool(name = "subtract", description = "两个数字相减")
public double subtract(
@McpToolParam(description = "第一个数字", required = true) double a,
@McpToolParam(description = "第二个数字", required = true) double b) {
return a - b;
}
@McpTool(name = "multiply", description = "两个数字相乘")
public double multiply(
@McpToolParam(description = "第一个数字", required = true) double a,
@McpToolParam(description = "第二个数字", required = true) double b) {
return a * b;
}
@McpTool(name = "divide", description = "两个数字相除")
public double divide(
@McpToolParam(description = "被除数", required = true) double dividend,
@McpToolParam(description = "除数", required = true) double divisor) {
if (divisor == 0) {
throw new IllegalArgumentException("除数不能为零");
}
return dividend / divisor;
}
@McpTool(name = "calculate-expression",
description = "计算复杂数学表达式")
public CallToolResult calculateExpression(
CallToolRequest request,
McpSyncRequestContext context) {
Map<String, Object> args = request.arguments();
String expression = (String) args.get("expression");
// 使用便捷日志方法
context.info("计算表达式: " + expression);
try {
double result = evaluateExpression(expression);
return CallToolResult.builder()
.addTextContent("结果: " + result)
.build();
} catch (Exception e) {
return CallToolResult.builder()
.isError(true)
.addTextContent("错误: " + e.getMessage())
.build();
}
}
}
配置示例:
spring:
ai:
mcp:
server:
name: calculator-server
version: 1.0.0
type: SYNC
protocol: SSE # 可选:STDIO、STREAMABLE
capabilities:
tool: true
resource: true
prompt: true
completion: true
文档处理服务器
一个带有资源和提示的文档处理服务器示例:
@Component
public class DocumentServer {
private final Map<String, Document> documents = new ConcurrentHashMap<>();
@McpResource(
uri = "document://{id}",
name = "Document",
description = "访问已存储的文档")
public ReadResourceResult getDocument(String id, McpMeta meta) {
Document doc = documents.get(id);
if (doc == null) {
return new ReadResourceResult(List.of(
new TextResourceContents("document://" + id,
"text/plain", "文档未找到")
));
}
// 根据元数据检查访问权限
String accessLevel = (String) meta.get("accessLevel");
if ("restricted".equals(doc.getClassification()) &&
!"admin".equals(accessLevel)) {
return new ReadResourceResult(List.of(
new TextResourceContents("document://" + id,
"text/plain", "访问被拒绝")
));
}
return new ReadResourceResult(List.of(
new TextResourceContents("document://" + id,
doc.getMimeType(), doc.getContent())
));
}
@McpTool(name = "analyze-document",
description = "分析文档内容")
public String analyzeDocument(
McpSyncRequestContext context,
@McpToolParam(description = "文档 ID", required = true) String docId,
@McpToolParam(description = "分析类型", required = false) String type) {
Document doc = documents.get(docId);
if (doc == null) {
return "文档未找到";
}
// 从上下文获取进度标识
String progressToken = context.request().progressToken();
if (progressToken != null) {
context.progress(p -> p.progress(0.0).total(1.0).message("开始分析"));
}
// 执行分析
String analysisType = type != null ? type : "summary";
String result = performAnalysis(doc, analysisType);
if (progressToken != null) {
context.progress(p -> p.progress(1.0).total(1.0).message("分析完成"));
}
return result;
}
@McpPrompt(
name = "document-summary",
description = "生成文档摘要提示")
public GetPromptResult documentSummaryPrompt(
@McpArg(name = "docId", required = true) String docId,
@McpArg(name = "length", required = false) String length) {
Document doc = documents.get(docId);
if (doc == null) {
return new GetPromptResult("错误",
List.of(new PromptMessage(Role.SYSTEM,
new TextContent("文档未找到"))));
}
String promptText = String.format(
"请将以下文档总结为 %s:\n\n%s",
length != null ? length : "若干段落",
doc.getContent()
);
return new GetPromptResult("文档摘要",
List.of(new PromptMessage(Role.USER, new TextContent(promptText))));
}
@McpComplete(prompt = "document-summary")
public List<String> completeDocumentId(String prefix) {
return documents.keySet().stream()
.filter(id -> id.startsWith(prefix))
.sorted()
.limit(10)
.toList();
}
}
带处理器的 MCP 客户端
完整的 MCP 客户端应用示例,包含各种处理器:
@SpringBootApplication
public class McpClientApplication {
public static void main(String[] args) {
SpringApplication.run(McpClientApplication.class, args);
}
}
@Component
public class ClientHandlers {
private final Logger logger = LoggerFactory.getLogger(ClientHandlers.class);
private final ProgressTracker progressTracker = new ProgressTracker();
private final ChatModel chatModel;
public ClientHandlers(@Lazy ChatModel chatModel) {
this.chatModel = chatModel;
}
@McpLogging(clients = "server1")
public void handleLogging(LoggingMessageNotification notification) {
switch (notification.level()) {
case ERROR:
logger.error("[MCP] {} - {}", notification.logger(), notification.data());
break;
case WARNING:
logger.warn("[MCP] {} - {}", notification.logger(), notification.data());
break;
case INFO:
logger.info("[MCP] {} - {}", notification.logger(), notification.data());
break;
default:
logger.debug("[MCP] {} - {}", notification.logger(), notification.data());
}
}
@McpSampling(clients = "server1")
public CreateMessageResult handleSampling(CreateMessageRequest request) {
// 使用 Spring AI ChatModel 执行采样
List<Message> messages = request.messages().stream()
.map(msg -> {
if (msg.role() == Role.USER) {
return new UserMessage(((TextContent) msg.content()).text());
} else {
return AssistantMessage.builder()
.content(((TextContent) msg.content()).text())
.build();
}
})
.toList();
ChatResponse response = chatModel.call(new Prompt(messages));
return CreateMessageResult.builder()
.role(Role.ASSISTANT)
.content(new TextContent(response.getResult().getOutput().getContent()))
.model(request.modelPreferences().hints().get(0).name())
.build();
}
@McpElicitation(clients = "server1")
public ElicitResult handleElicitation(ElicitRequest request) {
// 实际应用中可弹出 UI 对话框
Map<String, Object> userData = new HashMap<>();
logger.info("收到 elicitation 请求: {}", request.message());
// 根据 schema 模拟用户输入
Map<String, Object> schema = request.requestedSchema();
if (schema != null && schema.containsKey("properties")) {
Map<String, Object> properties = (Map<String, Object>) schema.get("properties");
properties.forEach((key, value) -> {
// 在真实应用中应提示用户填写每个字段
userData.put(key, getDefaultValueForProperty(key, value));
});
}
return new ElicitResult(ElicitResult.Action.ACCEPT, userData);
}
@McpProgress(clients = "server1")
public void handleProgress(ProgressNotification notification) {
progressTracker.update(
notification.progressToken(),
notification.progress(),
notification.total(),
notification.message()
);
// 更新 UI 或通过 websocket 通知
broadcastProgress(notification);
}
@McpToolListChanged(clients = "server1")
public void handleServer1ToolsChanged(List<McpSchema.Tool> tools) {
logger.info("Server1 tools updated: {} tools available", tools.size());
// Update tool registry
toolRegistry.updateServerTools("server1", tools);
// Notify UI to refresh tool list
eventBus.publish(new ToolsUpdatedEvent("server1", tools));
}
@McpResourceListChanged(clients = "server1")
public void handleServer1ResourcesChanged(List<McpSchema.Resource> resources) {
logger.info("Server1 resources updated: {} resources available", resources.size());
// Clear resource cache for this server
resourceCache.clearServer("server1");
// Register new resources
resources.forEach(resource ->
resourceCache.register("server1", resource));
}
}
配置:
spring:
ai:
mcp:
client:
type: SYNC
initialized: true
request-timeout: 30s
annotation-scanner:
enabled: true
sse:
connections:
server1:
url: http://localhost:8080
stdio:
connections:
local-tool:
command: /usr/local/bin/mcp-tool
args:
- --mode=production
异步示例
异步工具服务器
@Component
public class AsyncDataProcessor {
@McpTool(name = "fetch-data", description = "Fetch data from external source")
public Mono<DataResult> fetchData(
@McpToolParam(description = "Data source URL", required = true) String url,
@McpToolParam(description = "Timeout in seconds", required = false) Integer timeout) {
Duration timeoutDuration = Duration.ofSeconds(timeout != null ? timeout : 30);
return WebClient.create()
.get()
.uri(url)
.retrieve()
.bodyToMono(String.class)
.map(data -> new DataResult(url, data, System.currentTimeMillis()))
.timeout(timeoutDuration)
.onErrorReturn(new DataResult(url, "Error fetching data", 0L));
}
@McpTool(name = "process-stream", description = "Process data stream")
public Flux<String> processStream(
McpAsyncRequestContext context,
@McpToolParam(description = "Item count", required = true) int count) {
// Access progress token from context
String progressToken = context.request().progressToken();
return Flux.range(1, count)
.delayElements(Duration.ofMillis(100))
.flatMap(i -> {
if (progressToken != null) {
double progress = (double) i / count;
return context.progress(p -> p.progress(progress).total(1.0).message("Processing item " + i))
.thenReturn("Processed item " + i);
}
return Mono.just("Processed item " + i);
});
}
@McpResource(uri = "async-data://{id}", name = "Async Data")
public Mono<ReadResourceResult> getAsyncData(String id) {
return Mono.fromCallable(() -> loadDataAsync(id))
.subscribeOn(Schedulers.boundedElastic())
.map(data -> new ReadResourceResult(List.of(
new TextResourceContents("async-data://" + id,
"application/json", data)
)));
}
}
异步客户端处理器
@Component
public class AsyncClientHandlers {
@McpSampling(clients = "async-server")
public Mono<CreateMessageResult> handleAsyncSampling(CreateMessageRequest request) {
return Mono.fromCallable(() -> {
// Prepare request for LLM
String prompt = extractPrompt(request);
return prompt;
})
.flatMap(prompt -> callLLMAsync(prompt))
.map(response -> CreateMessageResult.builder()
.role(Role.ASSISTANT)
.content(new TextContent(response))
.model("gpt-4")
.build())
.timeout(Duration.ofSeconds(30));
}
@McpProgress(clients = "async-server")
public Mono<Void> handleAsyncProgress(ProgressNotification notification) {
return Mono.fromRunnable(() -> {
// Update progress tracking
updateProgressAsync(notification);
})
.then(broadcastProgressAsync(notification))
.subscribeOn(Schedulers.parallel());
}
@McpElicitation(clients = "async-server")
public Mono<ElicitResult> handleAsyncElicitation(ElicitRequest request) {
return showUserDialogAsync(request)
.map(userData -> {
if (userData != null && !userData.isEmpty()) {
return new ElicitResult(ElicitResult.Action.ACCEPT, userData);
} else {
return new ElicitResult(ElicitResult.Action.DECLINE, null);
}
})
.timeout(Duration.ofMinutes(5))
.onErrorReturn(new ElicitResult(ElicitResult.Action.CANCEL, null));
}
}
无状态服务器示例
@Component
public class StatelessTools {
// Simple stateless tool
@McpTool(name = "format-text", description = "Format text")
public String formatText(
@McpToolParam(description = "Text to format", required = true) String text,
@McpToolParam(description = "Format type", required = true) String format) {
return switch (format.toLowerCase()) {
case "uppercase" -> text.toUpperCase();
case "lowercase" -> text.toLowerCase();
case "title" -> toTitleCase(text);
case "reverse" -> new StringBuilder(text).reverse().toString();
default -> text;
};
}
// Stateless with transport context
@McpTool(name = "validate-json", description = "Validate JSON")
public CallToolResult validateJson(
McpTransportContext context,
@McpToolParam(description = "JSON string", required = true) String json) {
try {
ObjectMapper mapper = new ObjectMapper();
mapper.readTree(json);
return CallToolResult.builder()
.addTextContent("Valid JSON")
.structuredContent(Map.of("valid", true))
.build();
} catch (Exception e) {
return CallToolResult.builder()
.addTextContent("Invalid JSON: " + e.getMessage())
.structuredContent(Map.of("valid", false, "error", e.getMessage()))
.build();
}
}
@McpResource(uri = "static://{path}", name = "Static Resource")
public String getStaticResource(String path) {
// Simple stateless resource
return loadStaticContent(path);
}
@McpPrompt(name = "template", description = "Template prompt")
public GetPromptResult templatePrompt(
@McpArg(name = "template", required = true) String templateName,
@McpArg(name = "variables", required = false) String variables) {
String template = loadTemplate(templateName);
if (variables != null) {
template = substituteVariables(template, variables);
}
return new GetPromptResult("Template: " + templateName,
List.of(new PromptMessage(Role.USER, new TextContent(template))));
}
}
使用多个 LLM 提供者的 MCP 采样
这个示例演示了如何使用 MCP 采样(MCP Sampling)从多个 LLM 提供者生成创意内容,并展示了基于注解的服务端和客户端实现方法。
采样服务器实现
服务端提供了一个天气工具,该工具使用 MCP 采样(MCP Sampling)从不同的 LLM 提供者生成诗歌:
@Service
public class WeatherService {
private final RestClient restClient = RestClient.create();
public record WeatherResponse(Current current) {
public record Current(LocalDateTime time, int interval, double temperature_2m) {
}
}
@McpTool(description = "获取指定位置的温度(摄氏度)")
public String getTemperature2(McpSyncServerExchange exchange,
@McpToolParam(description = "位置纬度") double latitude,
@McpToolParam(description = "位置经度") double longitude) {
// 获取天气数据
WeatherResponse weatherResponse = restClient
.get()
.uri("https://api.open-meteo.com/v1/forecast?latitude={latitude}&longitude={longitude}¤t=temperature_2m",
latitude, longitude)
.retrieve()
.body(WeatherResponse.class);
StringBuilder openAiWeatherPoem = new StringBuilder();
StringBuilder anthropicWeatherPoem = new StringBuilder();
// 发送日志通知
exchange.loggingNotification(LoggingMessageNotification.builder()
.level(LoggingLevel.INFO)
.data("开始采样")
.build());
// 检查客户端是否支持采样
if (exchange.getClientCapabilities().sampling() != null) {
var messageRequestBuilder = McpSchema.CreateMessageRequest.builder()
.systemPrompt("你是一位诗人!")
.messages(List.of(new McpSchema.SamplingMessage(McpSchema.Role.USER,
new McpSchema.TextContent(
"请根据以下天气预报写一首诗(温度单位为摄氏度),使用 Markdown 格式:\n "
+ ModelOptionsUtils.toJsonStringPrettyPrinter(weatherResponse)))));
// 向 OpenAI 请求诗歌
var openAiLlmMessageRequest = messageRequestBuilder
.modelPreferences(ModelPreferences.builder().addHint("openai").build())
.build();
CreateMessageResult openAiLlmResponse = exchange.createMessage(openAiLlmMessageRequest);
openAiWeatherPoem.append(((McpSchema.TextContent) openAiLlmResponse.content()).text());
// 向 Anthropic 请求诗歌
var anthropicLlmMessageRequest = messageRequestBuilder
.modelPreferences(ModelPreferences.builder().addHint("anthropic").build())
.build();
CreateMessageResult anthropicAiLlmResponse = exchange.createMessage(anthropicLlmMessageRequest);
anthropicWeatherPoem.append(((McpSchema.TextContent) anthropicAiLlmResponse.content()).text());
}
exchange.loggingNotification(LoggingMessageNotification.builder()
.level(LoggingLevel.INFO)
.data("采样完成")
.build());
// 合并结果
String responseWithPoems = "OpenAI 关于天气的诗歌:\n" + openAiWeatherPoem.toString() + "\n\n" +
"Anthropic 关于天气的诗歌:\n" + anthropicWeatherPoem.toString() + "\n"
+ ModelOptionsUtils.toJsonStringPrettyPrinter(weatherResponse);
return responseWithPoems;
}
}
采样客户端实现
客户端通过根据模型提示(model hints)将采样请求路由到相应的 LLM 提供者来处理采样请求:
@Service
public class McpClientHandlers {
private static final Logger logger = LoggerFactory.getLogger(McpClientHandlers.class);
@Autowired
Map<String, ChatClient> chatClients;
@McpProgress(clients = "server1")
public void progressHandler(ProgressNotification progressNotification) {
logger.info("MCP 进度: [{}] 当前进度: {} 总量: {} 消息: {}",
progressNotification.progressToken(), progressNotification.progress(),
progressNotification.total(), progressNotification.message());
}
@McpLogging(clients = "server1")
public void loggingHandler(LoggingMessageNotification loggingMessage) {
logger.info("MCP 日志: [{}] {}", loggingMessage.level(), loggingMessage.data());
}
@McpSampling(clients = "server1")
public CreateMessageResult samplingHandler(CreateMessageRequest llmRequest) {
logger.info("MCP 采样请求: {}", llmRequest);
// 提取用户提示和模型提示
var userPrompt = ((McpSchema.TextContent) llmRequest.messages().get(0).content()).text();
String modelHint = llmRequest.modelPreferences().hints().get(0).name();
// 根据模型提示找到合适的 ChatClient
ChatClient hintedChatClient = chatClients.entrySet().stream()
.filter(e -> e.getKey().contains(modelHint))
.findFirst()
.orElseThrow()
.getValue();
// 使用选定的模型生成响应
String response = hintedChatClient.prompt()
.system(llmRequest.systemPrompt())
.user(userPrompt)
.call()
.content();
return CreateMessageResult.builder()
.content(new McpSchema.TextContent(response))
.build();
}
}
客户端应用设置
在客户端应用中注册 MCP 工具和处理器:
@SpringBootApplication
public class McpClientApplication {
public static void main(String[] args) {
SpringApplication.run(McpClientApplication.class, args).close();
}
@Bean
public CommandLineRunner predefinedQuestions(OpenAiChatModel openAiChatModel,
ToolCallbackProvider mcpToolProvider) {
return args -> {
ChatClient chatClient = ChatClient.builder(openAiChatModel)
.defaultToolCallbacks(mcpToolProvider)
.build();
String userQuestion = """
现在阿姆斯特丹的天气如何?
请整合来自所有 LLM 提供者的创意回答。
在其他提供者回答之后,再写一首诗,综合所有提供者的诗歌。
""";
System.out.println("> 用户: " + userQuestion);
System.out.println("> 助手: " + chatClient.prompt(userQuestion).call().content());
};
}
}
配置
服务器配置
# 服务器 application.properties
spring.ai.mcp.server.name=mcp-sampling-server-annotations
spring.ai.mcp.server.version=0.0.1
spring.ai.mcp.server.protocol=STREAMABLE
spring.main.banner-mode=off
客户端配置
# 客户端 application.properties
spring.application.name=mcp
spring.main.web-application-type=none
# 禁用默认聊天客户端的自动配置(适用于多模型场景)
spring.ai.chat.client.enabled=false
# API 密钥
spring.ai.openai.api-key=${OPENAI_API_KEY}
spring.ai.anthropic.api-key=${ANTHROPIC_API_KEY}
# 使用 stateless-http 传输的 MCP 客户端连接
spring.ai.mcp.client.streamable-http.connections.server1.url=http://localhost:8080
# 禁用工具回调以防止循环依赖
spring.ai.mcp.client.toolcallback.enabled=false
展示的关键功能
- 多模型采样:服务器根据模型提示(model hints)向多个 LLM 提供者请求内容
- 基于注解的处理器:客户端使用
@McpSampling、@McpLogging和@McpProgress注解 - 无状态 HTTP 传输:使用可流式(streamable)协议进行通信
- 创意内容生成:根据不同模型的天气数据生成诗歌
- 统一响应处理:将来自多个提供者的响应合并为单一结果
示例输出
运行客户端时,你会看到如下输出:
> USER: What is the weather in Amsterdam right now?
Please incorporate all creative responses from all LLM providers.
After the other providers add a poem that synthesizes the poems from all the other providers.
> ASSISTANT:
OpenAI poem about the weather:
**Amsterdam's Winter Whisper**
*Temperature: 4.2°C*
In Amsterdam's embrace, where canals reflect the sky,
A gentle chill of 4.2 degrees drifts by...
Anthropic poem about the weather:
**Canal-Side Contemplation**
*Current conditions: 4.2°C*
Along the waterways where bicycles rest,
The winter air puts Amsterdam to test...
Weather Data:
{
"current": {
"time": "2025-01-23T11:00",
"interval": 900,
"temperature_2m": 4.2
}
}
与 Spring AI 的集成
示例展示了 MCP 工具如何与 Spring AI 的函数调用集成:
@RestController
@RequestMapping("/chat")
public class ChatController {
private final ChatModel chatModel;
private final SyncMcpToolCallbackProvider toolCallbackProvider;
public ChatController(ChatModel chatModel,
SyncMcpToolCallbackProvider toolCallbackProvider) {
this.chatModel = chatModel;
this.toolCallbackProvider = toolCallbackProvider;
}
@PostMapping
public ChatResponse chat(@RequestBody ChatRequest request) {
// 将 MCP 工具作为 Spring AI 的函数回调获取
ToolCallback[] mcpTools = toolCallbackProvider.getToolCallbacks();
// 创建包含 MCP 工具的提示
Prompt prompt = new Prompt(
request.getMessage(),
ChatOptionsBuilder.builder()
.withTools(mcpTools)
.build()
);
// 调用聊天模型,同时可使用 MCP 工具
return chatModel.call(prompt);
}
}
@Component
public class WeatherTools {
@McpTool(name = "get-weather", description = "获取当前天气")
public WeatherInfo getWeather(
@McpToolParam(description = "城市名称", required = true) String city,
@McpToolParam(description = "单位(metric/imperial)", required = false) String units) {
String unit = units != null ? units : "metric";
// 调用天气 API
return weatherService.getCurrentWeather(city, unit);
}
@McpTool(name = "get-forecast", description = "获取天气预报")
public ForecastInfo getForecast(
@McpToolParam(description = "城市名称", required = true) String city,
@McpToolParam(description = "天数(1-7)", required = false) Integer days) {
int forecastDays = days != null ? days : 3;
return weatherService.getForecast(city, forecastDays);
}
}