Skip to content

Commit

Permalink
Examples for executing tools over MCP with stdio and HTTP transports (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
jmartisk authored Dec 22, 2024
1 parent c9379e7 commit 593af27
Show file tree
Hide file tree
Showing 7 changed files with 207 additions and 2 deletions.
4 changes: 2 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ target/
!**/src/test/**/target/

### IntelliJ IDEA ###
.idea/*
.idea/
.idea/modules.xml
.idea/jarRepositories.xml
.idea/compiler.xml
Expand Down Expand Up @@ -36,4 +36,4 @@ build/
.vscode/

### Mac OS ###
.DS_Store
.DS_Store
57 changes: 57 additions & 0 deletions mcp-example/pom.xml
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

<groupId>dev.langchain4j</groupId>
<artifactId>mcp-example</artifactId>
<version>0.36.1</version>

<properties>
<maven.compiler.source>17</maven.compiler.source>
<maven.compiler.target>17</maven.compiler.target>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
</properties>

<dependencies>

<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j</artifactId>
<version>0.37.0</version>
</dependency>

<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-open-ai</artifactId>
<version>0.37.0</version>
</dependency>

<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-mcp</artifactId>
<version>0.37.0</version>
</dependency>

<dependency>
<groupId>org.junit.jupiter</groupId>
<artifactId>junit-jupiter-engine</artifactId>
<version>5.10.0</version>
</dependency>

<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
<version>3.25.3</version>
</dependency>

<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.5.12</version>
</dependency>

</dependencies>

</project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
package dev.langchain4j.example.mcp;

import dev.langchain4j.service.UserMessage;

public interface Bot {

String chat(@UserMessage String prompt);

}
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
package dev.langchain4j.example.mcp;

import dev.langchain4j.mcp.McpToolProvider;
import dev.langchain4j.mcp.client.DefaultMcpClient;
import dev.langchain4j.mcp.client.McpClient;
import dev.langchain4j.mcp.client.transport.http.HttpMcpTransport;
import dev.langchain4j.mcp.client.transport.McpTransport;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.tool.ToolProvider;

import java.time.Duration;
import java.util.List;

public class McpToolsExampleOverHttp {

/**
* This example uses the `server-everything` MCP server that showcases some aspects of the MCP protocol.
* In particular, we use its 'add' tool that adds two numbers.
*
* Before running this example, you need to start the `everything` server in SSE mode on localhost:3001.
* Check out https://github.com/modelcontextprotocol/servers/tree/main/src/everything
* and run `npm install` and `node dist/sse.js`.
*
* Of course, feel free to swap out the server with any other MCP server.
*
* Run the example and check the logs to verify that the model used the tool.
*/
public static void main(String[] args) throws Exception {
ChatLanguageModel model = OpenAiChatModel.builder()
.modelName("gpt-4o")
.apiKey(System.getenv("OPENAI_API_KEY"))
.logRequests(true)
.logResponses(true)
.build();
McpTransport transport = new HttpMcpTransport.Builder()
.sseUrl("http://localhost:3001/sse")
.timeout(Duration.ofSeconds(60))
.logRequests(true)
.logResponses(true)
.build();
McpClient mcpClient = new DefaultMcpClient.Builder()
.transport(transport)
.build();
ToolProvider toolProvider = McpToolProvider.builder()
.mcpClients(List.of(mcpClient))
.build();
Bot bot = AiServices.builder(Bot.class)
.chatLanguageModel(model)
.toolProvider(toolProvider)
.build();
try {
String response = bot.chat("What is 5+12? Use the provided tool to answer " +
"and always assume that the tool is correct.");
System.out.println(response);
} finally {
mcpClient.close();
}
}


}
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
package dev.langchain4j.example.mcp;

import dev.langchain4j.mcp.McpToolProvider;
import dev.langchain4j.mcp.client.DefaultMcpClient;
import dev.langchain4j.mcp.client.McpClient;
import dev.langchain4j.mcp.client.transport.McpTransport;
import dev.langchain4j.mcp.client.transport.stdio.StdioMcpTransport;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.tool.ToolProvider;

import java.io.File;
import java.util.List;

public class McpToolsExampleOverStdio {

// We will let the AI read the contents of this file
public static final String FILE_TO_BE_READ = "src/main/resources/file.txt";

/**
* This example uses the `server-filesystem` MCP server to showcase how
* to allow an LLM to interact with the local filesystem.
*
* Running this example requires npm to be installed on your machine,
* because it spawns the `server-filesystem` as a subprocess via npm:
* `npm exec @modelcontextprotocol/[email protected]`.
*
* Of course, feel free to swap out the server with any other MCP server.
*
* The communication with the server is done directly via stdin/stdout.
*
* IMPORTANT: when executing this, make sure that the working directory is
* equal to the root directory of the project
* (`langchain4j-examples/mcp-example`), otherwise the program won't be able to find
* the proper file to read. If you're working from another directory,
* adjust the path inside the StdioMcpTransport.Builder() usage in the main method.
*/
public static void main(String[] args) throws Exception {
ChatLanguageModel model = OpenAiChatModel.builder()
.modelName("gpt-4o")
.apiKey(System.getenv("OPENAI_API_KEY"))
// .logRequests(true)
// .logResponses(true)
.build();
McpTransport transport = new StdioMcpTransport.Builder()
.command(List.of("/usr/bin/npm", "exec",
"@modelcontextprotocol/[email protected]",
// allowed directory for the server to interact with
new File("src/main/resources").getAbsolutePath()
))
.logEvents(true)
.build();
McpClient mcpClient = new DefaultMcpClient.Builder()
.transport(transport)
.build();
ToolProvider toolProvider = McpToolProvider.builder()
.mcpClients(List.of(mcpClient))
.build();
Bot bot = AiServices.builder(Bot.class)
.chatLanguageModel(model)
.toolProvider(toolProvider)
.build();
try {
File file = new File(FILE_TO_BE_READ);
String response = bot.chat("Read the contents of the file " + file.getAbsolutePath());
System.out.println("RESPONSE: " + response);
} finally {
mcpClient.close();
}
}


}
1 change: 1 addition & 0 deletions mcp-example/src/main/resources/file.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Kaboom!
1 change: 1 addition & 0 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
<module>infinispan-example</module>
<module>jakartaee-microprofile-example</module>
<module>jlama-examples</module>
<module>mcp-example</module>
<module>milvus-example</module>
<module>mistral-ai-examples</module>
<module>neo4j-example</module>
Expand Down

0 comments on commit 593af27

Please sign in to comment.