WORLDBOOK

Worldbooks | WebMCP | Search | Submit WebMCP

reddit WebMCP

Browser tool configuration for reddit

URL Pattern: ^https?://([a-z0-9-]+\.)?reddit\.com(/.*)?$
Allowed Extra Domains: reddit.com

Tools (8)

reddit_context()

获取评论的 ancestor chain(从根帖到目标评论的完整路径)

Parameters

url string required - Reddit comment URL (utm params are stripped automatically)

JavaScript Handler

(params) => {
  const run = async function(args) {

      if (!args.url) return {error: 'Missing argument: url', hint: 'Provide a Reddit comment URL'};
      const path = args.url.replace(/https?:\/\/[^/]*/, '').replace(/\?.*/, '').replace(/\/*$/, '/');

      // Extract comment_id from various URL formats:
      //   /r/sub/comments/POST_ID/comment/COMMENT_ID/    (new shreddit style)
      //   /r/sub/comments/POST_ID/slug/COMMENT_ID/       (old style)
      let comment_id = (path.match(/\/comment\/([^/]+)/) || [])[1];
      if (!comment_id) {
        const parts = path.match(/\/comments\/[^/]*\/[^/]*\/([^/]*)/);
        comment_id = parts ? parts[1] : null;
      }
      if (!comment_id) return {error: 'Cannot extract comment_id from URL', hint: 'Expected: .../comment/<id>/'};

      // Build API URL: /r/sub/comments/POST_ID/COMMENT_ID/.json (the only format that works since 2025)
      const postMatch = path.match(/(\/r\/[^/]+\/comments\/[^/]+\/)/);
      if (!postMatch) return {error: 'Cannot extract post path from URL', hint: 'Expected: /r/sub/comments/POST_ID/...'};
      const apiPath = postMatch[1] + comment_id + '/';

      const resp = await fetch(apiPath + '.json?context=10&raw_json=1', {credentials: 'include'});
      if (!resp.ok) return {error: 'HTTP ' + resp.status};
      const d = await resp.json();
      if (!d[0]?.data?.children?.[0]?.data) return {error: 'Unexpected response', hint: 'Post may be deleted or URL is incorrect'};
      const post = d[0].data.children[0].data;

      function flatten(children, depth) {
        let result = [];
        for (const child of children) {
          if (child.kind !== 't1') continue;
          const c = child.data;
          result.push({id: c.name, parent_id: c.parent_id, author: c.author, score: c.score, body: c.body, depth});
          if (c.replies?.data?.children)
            result = result.concat(flatten(c.replies.data.children, depth + 1));
        }
        return result;
      }

      const comments = flatten(d[1]?.data?.children || [], 0);
      const target = comments.find(c => c.id === 't1_' + comment_id);
      if (!target) return {error: 'Comment t1_' + comment_id + ' not found', hint: 'Comment may be deleted or URL is incorrect'};

      let chain = [], cur = target;
      while (cur) { chain.unshift(cur); cur = comments.find(c => c.id === cur.parent_id); }

      return {
        post: {id: post.name, title: post.title, author: post.author, url: 'https://www.reddit.com' + (post.permalink || '')},
        target_comment: target,
        ancestor_chain: chain
      };
  };
  return run(params || {});
}

reddit_hot()

获取 subreddit 热门帖子(或 Reddit 首页)

Parameters

subreddit string - Subreddit name without r/ prefix (omit for front page)
count string - Number of posts (default 25, max 100)
after string - Fullname of the last item for pagination (e.g. t3_abc123)

JavaScript Handler

(params) => {
  const run = async function(args) {

      const count = Math.min(parseInt(args.count) || 25, 100);
      const base = args.subreddit ? '/r/' + args.subreddit : '';
      let url = base + '/hot.json?limit=' + count + '&raw_json=1';
      if (args.after) url += '&after=' + args.after;

      const resp = await fetch(url, {credentials: 'include'});
      if (!resp.ok) return {error: 'HTTP ' + resp.status, hint: resp.status === 404 ? 'Subreddit not found' : 'API error'};
      const d = await resp.json();
      if (!d.data?.children) return {error: 'Unexpected response', hint: 'Reddit may be rate-limiting or returning a login page'};

      const posts = d.data.children.map((c, i) => ({
        rank: i + 1,
        id: c.data.name,
        title: c.data.title,
        author: c.data.author,
        subreddit: c.data.subreddit_name_prefixed,
        score: c.data.score,
        upvote_ratio: c.data.upvote_ratio,
        num_comments: c.data.num_comments,
        created_utc: c.data.created_utc,
        url: c.data.url,
        permalink: 'https://www.reddit.com' + c.data.permalink,
        selftext_preview: (c.data.selftext || '').substring(0, 200),
        is_self: c.data.is_self,
        link_flair_text: c.data.link_flair_text || null
      }));

      return {
        subreddit: args.subreddit || 'front page',
        count: posts.length,
        after: d.data.after || null,
        posts
      };
  };
  return run(params || {});
}

reddit_search()

搜索 Reddit 帖子

Parameters

query string required - Search query
subreddit string - Limit search to a subreddit (without r/ prefix)
sort string - Sort order: relevance (default), hot, top, new, comments
time string - Time filter: all (default), hour, day, week, month, year
count string - Number of results (default 25, max 100)
after string - Fullname of the last item for pagination

JavaScript Handler

reddit_subreddit()

Get posts from a specific Subreddit

Parameters

name string required
sort string - Sorting method: hot, new, top, rising, controversial
time string - Time filter for top/controversial: hour, day, week, month, year, all
limit number

JavaScript Handler

(params) => {
  const args = Object.assign({"sort": "hot", "time": "all", "limit": 15}, params || {});
  let data = null;
  let __wbContextUrl = globalThis.location?.href || '';
  let __wbContextDocument = globalThis.document || null;
    const __wbDefault = (value, fallback) => (value === undefined || value === null || value === '' ? fallback : value);
    const __wbJoin = (value, separator) => Array.isArray(value) ? value.join(separator) : (value ?? '');
    const __wbGet = (value, path) => {
      if (!path) return value;
      return String(path).split('.').reduce((acc, part) => (acc == null ? undefined : acc[part]), value);
    };
    const __wbBaseUrl = () => (__wbContextUrl || globalThis.location?.href || '');
    const __wbResolve = (target, base) => {
      if (target == null) return '';
      const text = String(target);
      try {
        if (/^https?:/i.test(text)) return text;
        const resolvedBase = base || __wbBaseUrl();
        return resolvedBase ? new URL(text, resolvedBase).toString() : text;
      } catch (_error) {
        return text;
      }
    };
    const __wbFetch = async (target, options) => {
      const url = __wbResolve(target, __wbBaseUrl());
      const resp = await globalThis.fetch(url, Object.assign({ credentials: 'include' }, options || {}));
      if (!resp.ok) {
        throw new Error(`HTTP ${resp.status} for ${url}`);
      }
      const text = await resp.text();
      try {
        return { url, text, data: JSON.parse(text) };
      } catch (_error) {
        return { url, text, data: text };
      }
    };
  return (async () => {
    {
      const __wbResp = await __wbFetch("https://www.reddit.com");
      __wbContextUrl = __wbResp.url;
      __wbContextDocument = new DOMParser().parseFromString(__wbResp.text, 'text/html');
      data = __wbContextDocument;
    }
    {
      const document = __wbContextDocument || globalThis.document;
      const location = new URL(__wbBaseUrl());
      const window = { document, location };
      const fetch = (target, options) => globalThis.fetch(__wbResolve(target, __wbBaseUrl()), Object.assign({ credentials: 'include' }, options || {}));
      data = await (((async () => {
  let sub = ((args.name));
  if (sub.startsWith('r/')) sub = sub.slice(2);
  const sort = ((args.sort));
  const time = ((args.time));
  const limit = (args.limit);
  let url = '/r/' + sub + '/' + sort + '.json?limit=' + limit + '&raw_json=1';
  if ((sort === 'top' || sort === 'controversial') && time) {
    url += '&t=' + time;
  }
  const res = await fetch(url, { credentials: 'include' });
  const j = await res.json();
  return j?.data?.children || [];
})())());
    }
    {
      const source = Array.isArray(data) ? data : (data == null ? [] : [data]);
      data = source.map((item, index) => ({
        "title": item.data.title,
        "author": item.data.author,
        "upvotes": item.data.score,
        "comments": item.data.num_comments,
        "url": `https://www.reddit.com${item.data.permalink}`,
      }));
    }
    if (Array.isArray(data)) data = data.slice(0, Number(args.limit) || 0);
    return data;
  })();
}

reddit_thread()

获取 Reddit 帖子的完整讨论树

Parameters

url string required - Reddit post URL

JavaScript Handler

(params) => {
  const run = async function(args) {

      if (!args.url) return {error: 'Missing argument: url', hint: 'Provide a Reddit post URL'};
      let path = args.url.replace(/https?:\/\/[^/]*/, '').replace(/\?.*/, '').replace(/\/*$/, '/');
      // Normalize to /r/sub/comments/POST_ID/ — strip slug and any comment suffixes
      // Handles: .../comments/ID/slug/, .../comments/ID/comment/CID/, .../comments/ID/slug/CID/
      const m = path.match(/(\/r\/[^/]+\/comments\/[^/]+\/)/);
      if (m) path = m[1];
      const resp = await fetch(path + '.json?limit=500&depth=10&raw_json=1', {credentials: 'include'});
      if (!resp.ok) return {error: 'HTTP ' + resp.status};
      const d = await resp.json();
      if (!d[0]?.data?.children?.[0]?.data) return {error: 'Unexpected response', hint: 'Post may be deleted or URL is incorrect'};
      const post = d[0].data.children[0].data;

      function flatten(children, depth) {
        let result = [];
        for (const child of children) {
          if (child.kind !== 't1') continue;
          const c = child.data;
          result.push({id: c.name, parent_id: c.parent_id, author: c.author, score: c.score, body: c.body, depth});
          if (c.replies?.data?.children)
            result = result.concat(flatten(c.replies.data.children, depth + 1));
        }
        return result;
      }

      const comments = flatten(d[1]?.data?.children || [], 0);
      return {
        post: {id: post.name, title: post.title, author: post.author, subreddit: post.subreddit_name_prefixed,
          score: post.score, num_comments: post.num_comments, selftext: post.selftext, url: post.url, created_utc: post.created_utc},
        comments_total: comments.length,
        comments
      };
  };
  return run(params || {});
}

reddit_user()

View a Reddit user profile

Parameters

username string required

JavaScript Handler

(params) => {
  const args = Object.assign({}, params || {});
  let data = null;
  let __wbContextUrl = globalThis.location?.href || '';
  let __wbContextDocument = globalThis.document || null;
    const __wbDefault = (value, fallback) => (value === undefined || value === null || value === '' ? fallback : value);
    const __wbJoin = (value, separator) => Array.isArray(value) ? value.join(separator) : (value ?? '');
    const __wbGet = (value, path) => {
      if (!path) return value;
      return String(path).split('.').reduce((acc, part) => (acc == null ? undefined : acc[part]), value);
    };
    const __wbBaseUrl = () => (__wbContextUrl || globalThis.location?.href || '');
    const __wbResolve = (target, base) => {
      if (target == null) return '';
      const text = String(target);
      try {
        if (/^https?:/i.test(text)) return text;
        const resolvedBase = base || __wbBaseUrl();
        return resolvedBase ? new URL(text, resolvedBase).toString() : text;
      } catch (_error) {
        return text;
      }
    };
    const __wbFetch = async (target, options) => {
      const url = __wbResolve(target, __wbBaseUrl());
      const resp = await globalThis.fetch(url, Object.assign({ credentials: 'include' }, options || {}));
      if (!resp.ok) {
        throw new Error(`HTTP ${resp.status} for ${url}`);
      }
      const text = await resp.text();
      try {
        return { url, text, data: JSON.parse(text) };
      } catch (_error) {
        return { url, text, data: text };
      }
    };
  return (async () => {
    {
      const __wbResp = await __wbFetch("https://www.reddit.com");
      __wbContextUrl = __wbResp.url;
      __wbContextDocument = new DOMParser().parseFromString(__wbResp.text, 'text/html');
      data = __wbContextDocument;
    }
    {
      const document = __wbContextDocument || globalThis.document;
      const location = new URL(__wbBaseUrl());
      const window = { document, location };
      const fetch = (target, options) => globalThis.fetch(__wbResolve(target, __wbBaseUrl()), Object.assign({ credentials: 'include' }, options || {}));
      data = await (((async () => {
  const username = ((args.username));
  const name = username.startsWith('u/') ? username.slice(2) : username;
  const res = await fetch('/user/' + name + '/about.json?raw_json=1', {
    credentials: 'include'
  });
  const d = await res.json();
  const u = d?.data || d || {};
  const created = u.created_utc ? new Date(u.created_utc * 1000).toISOString().split('T')[0] : '-';
  return [
    { field: 'Username', value: 'u/' + (u.name || name) },
    { field: 'Post Karma', value: String(u.link_karma || 0) },
    { field: 'Comment Karma', value: String(u.comment_karma || 0) },
    { field: 'Total Karma', value: String(u.total_karma || (u.link_karma||0) + (u.comment_karma||0)) },
    { field: 'Account Created', value: created },
    { field: 'Gold', value: u.is_gold ? '⭐ Yes' : 'No' },
    { field: 'Verified', value: u.verified ? '✅ Yes' : 'No' },
  ];
})())());
    }
    {
      const source = Array.isArray(data) ? data : (data == null ? [] : [data]);
      data = source.map((item, index) => ({
        "field": item.field,
        "value": item.value,
      }));
    }
    return data;
  })();
}

reddit_user_posts()

View a Reddit user's submitted posts

Parameters

username string required
limit number

JavaScript Handler

(params) => {
  const args = Object.assign({"limit": 15}, params || {});
  let data = null;
  let __wbContextUrl = globalThis.location?.href || '';
  let __wbContextDocument = globalThis.document || null;
    const __wbDefault = (value, fallback) => (value === undefined || value === null || value === '' ? fallback : value);
    const __wbJoin = (value, separator) => Array.isArray(value) ? value.join(separator) : (value ?? '');
    const __wbGet = (value, path) => {
      if (!path) return value;
      return String(path).split('.').reduce((acc, part) => (acc == null ? undefined : acc[part]), value);
    };
    const __wbBaseUrl = () => (__wbContextUrl || globalThis.location?.href || '');
    const __wbResolve = (target, base) => {
      if (target == null) return '';
      const text = String(target);
      try {
        if (/^https?:/i.test(text)) return text;
        const resolvedBase = base || __wbBaseUrl();
        return resolvedBase ? new URL(text, resolvedBase).toString() : text;
      } catch (_error) {
        return text;
      }
    };
    const __wbFetch = async (target, options) => {
      const url = __wbResolve(target, __wbBaseUrl());
      const resp = await globalThis.fetch(url, Object.assign({ credentials: 'include' }, options || {}));
      if (!resp.ok) {
        throw new Error(`HTTP ${resp.status} for ${url}`);
      }
      const text = await resp.text();
      try {
        return { url, text, data: JSON.parse(text) };
      } catch (_error) {
        return { url, text, data: text };
      }
    };
  return (async () => {
    {
      const __wbResp = await __wbFetch("https://www.reddit.com");
      __wbContextUrl = __wbResp.url;
      __wbContextDocument = new DOMParser().parseFromString(__wbResp.text, 'text/html');
      data = __wbContextDocument;
    }
    {
      const document = __wbContextDocument || globalThis.document;
      const location = new URL(__wbBaseUrl());
      const window = { document, location };
      const fetch = (target, options) => globalThis.fetch(__wbResolve(target, __wbBaseUrl()), Object.assign({ credentials: 'include' }, options || {}));
      data = await (((async () => {
  const username = ((args.username));
  const name = username.startsWith('u/') ? username.slice(2) : username;
  const limit = (args.limit);
  const res = await fetch('/user/' + name + '/submitted.json?limit=' + limit + '&raw_json=1', {
    credentials: 'include'
  });
  const d = await res.json();
  return (d?.data?.children || []).map(c => ({
    title: c.data.title,
    subreddit: c.data.subreddit_name_prefixed,
    score: c.data.score,
    comments: c.data.num_comments,
    url: 'https://www.reddit.com' + c.data.permalink,
  }));
})())());
    }
    {
      const source = Array.isArray(data) ? data : (data == null ? [] : [data]);
      data = source.map((item, index) => ({
        "title": item.title,
        "subreddit": item.subreddit,
        "score": item.score,
        "comments": item.comments,
        "url": item.url,
      }));
    }
    if (Array.isArray(data)) data = data.slice(0, Number(args.limit) || 0);
    return data;
  })();
}

reddit_user_comments()

View a Reddit user's comment history

Parameters

username string required
limit number

JavaScript Handler

(params) => {
  const args = Object.assign({"limit": 15}, params || {});
  let data = null;
  let __wbContextUrl = globalThis.location?.href || '';
  let __wbContextDocument = globalThis.document || null;
    const __wbDefault = (value, fallback) => (value === undefined || value === null || value === '' ? fallback : value);
    const __wbJoin = (value, separator) => Array.isArray(value) ? value.join(separator) : (value ?? '');
    const __wbGet = (value, path) => {
      if (!path) return value;
      return String(path).split('.').reduce((acc, part) => (acc == null ? undefined : acc[part]), value);
    };
    const __wbBaseUrl = () => (__wbContextUrl || globalThis.location?.href || '');
    const __wbResolve = (target, base) => {
      if (target == null) return '';
      const text = String(target);
      try {
        if (/^https?:/i.test(text)) return text;
        const resolvedBase = base || __wbBaseUrl();
        return resolvedBase ? new URL(text, resolvedBase).toString() : text;
      } catch (_error) {
        return text;
      }
    };
    const __wbFetch = async (target, options) => {
      const url = __wbResolve(target, __wbBaseUrl());
      const resp = await globalThis.fetch(url, Object.assign({ credentials: 'include' }, options || {}));
      if (!resp.ok) {
        throw new Error(`HTTP ${resp.status} for ${url}`);
      }
      const text = await resp.text();
      try {
        return { url, text, data: JSON.parse(text) };
      } catch (_error) {
        return { url, text, data: text };
      }
    };
  return (async () => {
    {
      const __wbResp = await __wbFetch("https://www.reddit.com");
      __wbContextUrl = __wbResp.url;
      __wbContextDocument = new DOMParser().parseFromString(__wbResp.text, 'text/html');
      data = __wbContextDocument;
    }
    {
      const document = __wbContextDocument || globalThis.document;
      const location = new URL(__wbBaseUrl());
      const window = { document, location };
      const fetch = (target, options) => globalThis.fetch(__wbResolve(target, __wbBaseUrl()), Object.assign({ credentials: 'include' }, options || {}));
      data = await (((async () => {
  const username = ((args.username));
  const name = username.startsWith('u/') ? username.slice(2) : username;
  const limit = (args.limit);
  const res = await fetch('/user/' + name + '/comments.json?limit=' + limit + '&raw_json=1', {
    credentials: 'include'
  });
  const d = await res.json();
  return (d?.data?.children || []).map(c => {
    let body = c.data.body || '';
    if (body.length > 300) body = body.slice(0, 300) + '...';
    return {
      subreddit: c.data.subreddit_name_prefixed,
      score: c.data.score,
      body: body,
      url: 'https://www.reddit.com' + c.data.permalink,
    };
  });
})())());
    }
    {
      const source = Array.isArray(data) ? data : (data == null ? [] : [data]);
      data = source.map((item, index) => ({
        "subreddit": item.subreddit,
        "score": item.score,
        "body": item.body,
        "url": item.url,
      }));
    }
    if (Array.isArray(data)) data = data.slice(0, Number(args.limit) || 0);
    return data;
  })();
}

🔌 Chrome MCP Server Extension

Use these tools with Claude, ChatGPT, and other AI assistants.

Get Extension →

How to Use WebMCP

WebMCP tools are designed for browser extensions or automation frameworks. The browser extension matches the current URL against the pattern and executes the JavaScript handler when the tool is invoked.

API Endpoint:

GET /api/webmcp/match?url=https://www.reddit.com/...