Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions docs.json
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,10 @@
},
"navbar": {
"links": [
{
"label": "Connect to W&B's MCP Server",
"href": "https://docs.wandb.ai/platform/mcp-server"
},
{
"label": "Log in",
"href": "https://app.wandb.ai/login?_gl=1*8ninq0*_ga*MTE3ODEwNDkyLjE3MzQwMjk0NjA.*_ga_JH1SJHJQXJ*MTczNDA2Mzc4OS4yLjEuMTczNDA2MzkxMC42MC4wLjA.*_ga_GMYDGNGKDT*MTczNDA2Mzc4OS4zLjEuMTczNDA2Mzg2Ny4wLjAuMA..*_gcl_au*MTc0Mjk3ODgzMi4xNzM0MDI5NDYw"
Expand All @@ -61,6 +65,7 @@
"pages": [
"index",
"get-started",
"platform/mcp-server",
{
"group": "Deployment options",
"pages": [
Expand Down
37 changes: 37 additions & 0 deletions list_projects.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
#!/usr/bin/env python3
"""List projects in your W&B account."""

try:
from wandb.apis.public.api import Api

# Initialize the API
api = Api()

# Get projects for the default entity
print("Fetching projects from your W&B account...\n")

projects = api.projects()

project_list = list(projects)

if not project_list:
print("No projects found in your account.")
else:
print(f"Found {len(project_list)} project(s):\n")
for i, project in enumerate(project_list, 1):
print(f"{i}. {project.name}")
print(f" Entity: {project.entity}")
print(f" URL: {project.url}")
if hasattr(project, 'created_at') and project.created_at:
print(f" Created: {project.created_at}")
print()

except ImportError:
print("Error: wandb package is not installed.")
print("Install it with: pip install wandb")
except Exception as e:
print(f"Error: {e}")
print("\nMake sure you're logged in to W&B:")
print(" wandb login")
print("\nOr set your API key:")
print(" export WANDB_API_KEY=your_api_key")
77 changes: 77 additions & 0 deletions list_projects_mcp.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
#!/usr/bin/env python3
"""List projects using W&B API directly (fallback if MCP server isn't connected)."""

import os
import json
import urllib.request
import urllib.error

api_key = os.environ.get('WANDB_API_KEY')
if not api_key:
print("Error: WANDB_API_KEY environment variable is not set.")
exit(1)

# Use W&B Public API to list projects
headers = {
'Authorization': f'Bearer {api_key}',
'Content-Type': 'application/json'
}

def make_request(url):
"""Make an HTTP GET request with headers."""
req = urllib.request.Request(url, headers=headers)
with urllib.request.urlopen(req) as response:
return json.loads(response.read().decode())

# Get the default entity (user) first
print("Fetching your W&B account information...\n")
try:
user_data = make_request('https://api.wandb.ai/api/v1/user')
entity = user_data.get('username') or user_data.get('entity', {}).get('username')

if not entity:
print("Could not determine entity. Trying to list projects...")
entity = None
else:
print(f"Entity: {entity}\n")

# List projects
print("Fetching projects...\n")
if entity:
projects_url = f'https://api.wandb.ai/api/v1/projects?entity={entity}'
else:
projects_url = 'https://api.wandb.ai/api/v1/projects'

projects_data = make_request(projects_url)
projects = projects_data.get('projects', [])

if not projects:
print("No projects found in your account.")
else:
print(f"Found {len(projects)} project(s):\n")
for i, project in enumerate(projects, 1):
project_name = project.get('name', 'Unknown')
project_entity = project.get('entity', {}).get('name', entity or 'Unknown')
project_id = project.get('id', 'Unknown')

print(f"{i}. {project_name}")
print(f" Entity: {project_entity}")
print(f" Project ID: {project_id}")
if project.get('createdAt'):
print(f" Created: {project['createdAt']}")
print()

except urllib.error.HTTPError as e:
print(f"Error connecting to W&B API: HTTP {e.code}")
if e.code == 401:
print("Authentication failed. Check your WANDB_API_KEY.")
else:
print(f"Response: {e.read().decode()}")
except urllib.error.URLError as e:
print(f"Error connecting to W&B API: {e}")
print("\nMake sure:")
print("1. Your WANDB_API_KEY is valid")
print("2. You have internet connectivity")
print("3. The W&B API is accessible")
except Exception as e:
print(f"Unexpected error: {e}")
203 changes: 203 additions & 0 deletions platform/mcp-server.mdx
Original file line number Diff line number Diff line change
@@ -0,0 +1,203 @@
---
title: Access W&B docs programmatically
description: Connect your IDE or LLM application to W&B's Model Context Protocol (MCP) server to provide your agent requests with more context about W&B products.
---

import McpConfig from '/snippets/en/_includes/mcp-config.mdx';

The W&B MCP (Model Context Protocol) server lets you query and analyze your W&B data from your IDE or MCP client. It also provides your client with programmatic access to W&B's documentation, giving it additional context and accuracy when generating responses to W&B-related queries. Use can it to analyze experiments, debug traces, create reports, and get help with integrating your applications with W&B features.

## Prerequisites

* Get your W&B API key from [wandb.ai/authorize](https://wandb.ai/authorize).
* Set your key as an environment variable named `WANDB_API_KEY`.

## Configure your MCP client to use the W&B's remote MCP server

W&B provides a hosted MCP server at `https://mcp.withwandb.com` that requires no installation. The following instructions show how to configure the hosted server with various AI assistants and IDEs.

<Tabs>
<Tab title="Cursor">
1. On macOS, open the **Cursor** menu, select `Settings`, and then select **Cursor Settings**. One Windows or Linux, open the **Preferences** menu, select **Settings**, and then select **Cursor Settings**.
2. From the Cursor Settings menu, select **Tools and MCP**. This opens the Tools menu.
3. In the Installed MCP Servers section, select **Add Custom MCP**. This opens the `mcp.json` configuration file.
4. In the configuration file, in the `mcpServers` JSON object, add the following `wandb` object, like this:

<McpConfig/>

5. Restart Cursor to make the changes take effect.
6. Verify that the chat agent has access to the W&B MCP server by telling it to, "List the projects in my W&B account."

For more detailed information, see [Cursor's documentation](https://cursor.com/docs/context/mcp).

</Tab>
<Tab title="Claude Desktop">

1. Locate the Claude Desktop configuration file:
* **macOS**: `~/Library/Application Support/Claude/claude_desktop_config.json`
* **Windows**: `%APPDATA%\Claude\claude_desktop_config.json`

2. Then open the file in a text editor and add the following JSON code to the file:
<McpConfig/>

3. Save changes to the file and then restart Claude Desktop.
4. Verify that the chat agent has access to the W&B MCP server by telling it to, "List the projects in my W&B account."

For more detailed instructions, see [Claude Desktop's documentation](https://support.claude.com/en/articles/11175166-getting-started-with-custom-connectors-using-remote-mcp).
</Tab>
<Tab title="VS Code">
1. Open the Command Palette by pressing **Ctrl+Shift+P** (Windows/Linux) or **Cmd+Shift+P** (macOS).
2. Type **"MCP: Open User Configuration"** and select it. This opens the MCP configuration file.
3. Add the following configuration to the file:
<McpConfig/>
3. Save changes to the file and then restart VS Code.
4. Verify that the chat agent has access to the W&B MCP server by telling it to, "List the projects in my W&B account."

For detailed instructions, see [VS Code's documentation](https://code.visualstudio.com/docs/copilot/customization/mcp-servers).

</Tab>
<Tab title="OpenAI">
Add the MCP server in the `tools` field of your OpenAI responses configuration, like this:

```python
from openai import OpenAI
import os

client = OpenAI()

resp = client.responses.create(
model="gpt-4o",
tools=[{
"type": "mcp",
"server_label": "wandb",
"server_description": "Query W&B data",
"server_url": "https://mcp.withwandb.com/mcp",
"authorization": os.getenv("WANDB_API_KEY"),
"require_approval": "never",
}],
input="List the projects in my W&B account.",
)

print(resp.output_text)
```

<Note>
OpenAI's uses server-side MCP integration and do not work with localhost URLs. We recommend using the hosted server URL.
</Note>

</Tab>
<Tab title="Gemini CLI">
1. Install the W&B MCP extension with a single command:

```bash
# Install the extension
gemini extensions install https://github.com/wandb/wandb-mcp-server
```
2. Once installed, restart the Gemini CLI.
3. Verify that the chat agent has access to the W&B MCP server by telling it to, "List the projects in my W&B account."

For more detailed information, see [Gemini's documentation](https://geminicli.com/docs/tools/mcp-server/).
</Tab>
<Tab title="Mistral LeChat">
1. Open the **Intelligence** menu, then select **Add Connector**. This opens the Connector window.
2. Select the **Custom MCP Connector** tab, and then configure the fields using the following values:

- **Connector Server**: `https://mcp.withwandb.com/mcp`
- **Description**: (Optional) A brief arbitrary description of the connection.
- **Authentication Method**: Select **API Token Authentication**. This opens additional fields.
- **Header name**: Leave the default value, **Authorization**.
- **Header type**: Select **Bearer**.
- **Header value**: Enter your W&B API token.

3. Once you have configured all of the fields, select **Create**. LeChat adds the MCP server to your configuration.
4. Verify that the chat agent has access to the W&B MCP server by telling it to, "List the projects in my W&B account."

For more detailed information, see [LeChat's documentation](https://mistral.ai/news/le-chat-mcp-connectors-memories).
</Tab>
</Tabs>

## Set up a local version of the W&B MCP server

If you need to run the MCP server locally for development, testing, or air-gapped environments, you can install and run it on your machine.

### Prerequisites

- Python 3.10 or higher
- [uv](https://docs.astral.sh/uv/) (recommended) or pip

To install uv, run the following cURL command:

```bash
curl -LsSf https://astral.sh/uv/install.sh | sh
```

### Install the W&B MCP server on your local machine

To install the W&B MCP server on your local machine:

1. run one of the following commands:

<Tabs>
<Tab title="uv">
```bash
uv install wandb-mcp-server
```
</Tab>
<Tab title="pip">
```bash
pip install wandb-mcp-server
```
</Tab>
<Tab title="Install directly from GitHub">
```bash
pip install git+https://github.com/wandb/wandb-mcp-server
```
</Tab>
</Tabs>

2. Once you have successfully installed the server, add the following JSON object to your MCP client configuration:

```json
{
"mcpServers": {
"wandb": {
"command": "uvx",
"args": [
"--from",
"git+https://github.com/wandb/wandb-mcp-server",
"wandb_mcp_server"
],
"env": {
"WANDB_API_KEY": "YOUR_API_KEY"
}
}
}
}
```

The code tells your MCP client, such as Cursor, to use the local W&B MCP server instead of the server hosted by W&B at `https://mcp.withwandb.com/mcp`.

3. For web-based clients or testing, run the server with HTTP transport:

```bash
uvx wandb_mcp_server --transport http --host 0.0.0.0 --port 8080
```

To expose the local server to external clients like OpenAI, use ngrok:

```bash
# Start the HTTP server
uvx wandb-mcp-server --transport http --port 8080

# In another terminal, expose with ngrok
ngrok http 8080
```

If you expose the server using `ngrok`, update your MCP client configuration to use the `ngrok` URL.


## Usage tips

- **Provide your W&B project and entity name**: Specify the W&B entity and project in your queries for accurate results.
- **Avoid overly broad questions**: Instead of "what is my best evaluation?", ask "what eval had the highest f1 score?"
- **Verify data retrieval**: When asking broad questions like "what are my best performing runs?", ask the assistant to confirm it retrieved all available runs.
32 changes: 32 additions & 0 deletions snippets/button-links.css
Original file line number Diff line number Diff line change
Expand Up @@ -75,3 +75,35 @@
background-color: rgba(39, 39, 42, 0.8);
border-color: rgba(63, 63, 70, 1);
}

/* Style MCP Server navbar link as a button */
nav a[href="https://docs.wandb.ai/platform/mcp-server"] {
display: inline-flex;
align-items: center;
justify-content: center;
padding: 8px 16px;
border: 1px solid rgba(255, 182, 51, 0.3);
border-radius: 6px;
font-weight: 500;
background-color: rgba(255, 182, 51, 0.1);
color: rgb(255, 182, 51);
text-decoration: none;
transition: all 0.2s ease;
}

nav a[href="https://docs.wandb.ai/platform/mcp-server"]:hover {
background-color: rgba(255, 182, 51, 0.2);
border-color: rgba(255, 182, 51, 0.5);
text-decoration: none;
}

.dark nav a[href="https://docs.wandb.ai/platform/mcp-server"] {
border-color: rgba(255, 182, 51, 0.4);
background-color: rgba(255, 182, 51, 0.15);
color: rgb(255, 182, 51);
}

.dark nav a[href="https://docs.wandb.ai/platform/mcp-server"]:hover {
background-color: rgba(255, 182, 51, 0.25);
border-color: rgba(255, 182, 51, 0.6);
}
17 changes: 17 additions & 0 deletions snippets/en/_includes/mcp-config.mdx
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
```json
{
"mcpServers": {
"wandb": {
"command": "uvx",
"args": [
"--from",
"git+https://github.com/wandb/wandb-mcp-server",
"wandb_mcp_server"
],
"env": {
"WANDB_API_KEY": "${WANDB_API_KEY}"
}
}
}
}
```
Loading
Loading