mirror of
https://github.com/anthropics/claude-plugins-official.git
synced 2026-04-17 21:22:42 +00:00
Compare commits
45 Commits
add-pydant
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bb7730114d | ||
|
|
3df5394ee9 | ||
|
|
12401af104 | ||
|
|
167f01f2e0 | ||
|
|
637c6b3b6a | ||
|
|
811c9b5394 | ||
|
|
b00abee24e | ||
|
|
5c5c5f9896 | ||
|
|
8518bfc43d | ||
|
|
b992a65037 | ||
|
|
de39da5ba2 | ||
|
|
cb8c857a5e | ||
|
|
48aa435178 | ||
|
|
7e401edac7 | ||
|
|
7f3389d21f | ||
|
|
560b7e0d38 | ||
|
|
903a6aba48 | ||
|
|
dcd86cd6f9 | ||
|
|
985075c567 | ||
|
|
39353b5b42 | ||
|
|
507462e2fb | ||
|
|
d6fa70eb1a | ||
|
|
8145923edc | ||
|
|
2b666914e6 | ||
|
|
e8fb9898a6 | ||
|
|
c28404f818 | ||
|
|
fb48c3af93 | ||
|
|
622ef85323 | ||
|
|
173bd29be3 | ||
|
|
0de7a91403 | ||
|
|
c5b7657350 | ||
|
|
3ffb4b4ca8 | ||
|
|
656b617198 | ||
|
|
7ed523140f | ||
|
|
9fc974ef8b | ||
|
|
9a6b30ebb4 | ||
|
|
d4e6f609d8 | ||
|
|
95f807ee6c | ||
|
|
23a9a10ff7 | ||
|
|
5c6c90c1bd | ||
|
|
76f1e09f07 | ||
|
|
d19dab67e8 | ||
|
|
58578a456a | ||
|
|
1057d02c53 | ||
|
|
9dc3809e74 |
@@ -7,6 +7,16 @@
|
||||
"email": "support@anthropic.com"
|
||||
},
|
||||
"plugins": [
|
||||
{
|
||||
"name": "adlc",
|
||||
"description": "Agentforce Agent Development Life Cycle — author, discover, scaffold, deploy, test, and optimize .agent files",
|
||||
"category": "development",
|
||||
"source": {
|
||||
"source": "url",
|
||||
"url": "https://github.com/SalesforceAIResearch/agentforce-adlc.git"
|
||||
},
|
||||
"homepage": "https://github.com/SalesforceAIResearch/agentforce-adlc"
|
||||
},
|
||||
{
|
||||
"name": "adspirer-ads-agent",
|
||||
"description": "Cross-platform ad management for Google Ads, Meta Ads, TikTok Ads, and LinkedIn Ads. 91 tools for keyword research, campaign creation, performance analysis, and budget optimization.",
|
||||
@@ -34,7 +44,7 @@
|
||||
"description": "AI-first project auditor and re-engineer based on the 9 design principles and 7 design patterns from the TechWolf AI-First Bootcamp",
|
||||
"source": {
|
||||
"source": "git-subdir",
|
||||
"url": "techwolf-ai/ai-first-toolkit",
|
||||
"url": "https://github.com/techwolf-ai/ai-first-toolkit.git",
|
||||
"path": "plugins/ai-firstify",
|
||||
"ref": "main",
|
||||
"sha": "7f18e11d694b9ae62ea3009fbbc175f08ae913df"
|
||||
@@ -73,6 +83,17 @@
|
||||
},
|
||||
"homepage": "https://github.com/awslabs/agent-plugins"
|
||||
},
|
||||
{
|
||||
"name": "amplitude",
|
||||
"source": {
|
||||
"source": "url",
|
||||
"url": "https://github.com/amplitude/mcp-marketplace.git",
|
||||
"sha": "be54ccb66b10593721dd3a31e47b2db20ea02d2f"
|
||||
},
|
||||
"description": "Use Amplitude as an expert analyst — instrument Amplitude, discover product opportunities, analyze charts, create dashboards, manage experiments, and understand users and accounts.",
|
||||
"category": "monitoring",
|
||||
"homepage": "https://github.com/amplitude/mcp-marketplace"
|
||||
},
|
||||
{
|
||||
"name": "asana",
|
||||
"description": "Asana project management integration. Create and manage tasks, search projects, update assignments, track progress, and integrate your development workflow with Asana's work management platform.",
|
||||
@@ -135,6 +156,18 @@
|
||||
"source": "./external_plugins/autofix-bot",
|
||||
"homepage": "https://github.com/anthropics/claude-plugins-public/tree/main/external_plugins/autofix-bot"
|
||||
},
|
||||
{
|
||||
"name": "aws-amplify",
|
||||
"description": "Build full-stack apps with AWS Amplify Gen 2 using guided workflows for authentication, data models, storage, GraphQL APIs, and Lambda functions.",
|
||||
"category": "development",
|
||||
"source": {
|
||||
"source": "git-subdir",
|
||||
"url": "https://github.com/awslabs/agent-plugins.git",
|
||||
"path": "plugins/aws-amplify",
|
||||
"ref": "main"
|
||||
},
|
||||
"homepage": "https://github.com/awslabs/agent-plugins"
|
||||
},
|
||||
{
|
||||
"name": "aws-serverless",
|
||||
"description": "Design, build, deploy, test, and debug serverless applications with AWS Serverless services.",
|
||||
@@ -147,6 +180,53 @@
|
||||
},
|
||||
"homepage": "https://github.com/awslabs/agent-plugins"
|
||||
},
|
||||
{
|
||||
"name": "azure-cosmos-db-assistant",
|
||||
"source": {
|
||||
"source": "url",
|
||||
"url": "https://github.com/AzureCosmosDB/cosmosdb-claude-code-plugin.git",
|
||||
"sha": "56e6da0cae93cdee8bcfa5e624ecdd9a0a483181"
|
||||
},
|
||||
"description": "Expert assistant for Azure Cosmos DB — data modeling, query optimization, performance tuning, and best practices.",
|
||||
"category": "database",
|
||||
"homepage": "https://github.com/AzureCosmosDB/cosmosdb-claude-code-plugin"
|
||||
},
|
||||
{
|
||||
"name": "azure-skills",
|
||||
"description": "Microsoft Azure MCP integration for cloud resource management, deployments, and Azure services. Manage your Azure infrastructure, monitor applications, and deploy resources directly from Claude Code.",
|
||||
"category": "deployment",
|
||||
"source": {
|
||||
"source": "url",
|
||||
"url": "https://github.com/microsoft/azure-skills.git"
|
||||
},
|
||||
"homepage": "https://github.com/microsoft/azure-skills"
|
||||
},
|
||||
{
|
||||
"name": "base44",
|
||||
"description": "Build and deploy Base44 full-stack apps with CLI project management and JavaScript/TypeScript SDK development skills",
|
||||
"category": "development",
|
||||
"source": {
|
||||
"source": "url",
|
||||
"url": "https://github.com/base44/skills.git",
|
||||
"sha": "c7039b37eca0e2916a565a7395040c00055bcf8b"
|
||||
},
|
||||
"homepage": "https://docs.base44.com"
|
||||
},
|
||||
{
|
||||
"name": "bigdata-com",
|
||||
"description": "Official Bigdata.com plugin providing financial research, analytics, and intelligence tools powered by Bigdata MCP.",
|
||||
"author": {
|
||||
"name": "RavenPack"
|
||||
},
|
||||
"category": "database",
|
||||
"source": {
|
||||
"source": "git-subdir",
|
||||
"url": "https://github.com/Bigdata-com/bigdata-plugins-marketplace.git",
|
||||
"path": "plugins/bigdata-com",
|
||||
"ref": "main"
|
||||
},
|
||||
"homepage": "https://docs.bigdata.com"
|
||||
},
|
||||
{
|
||||
"name": "box",
|
||||
"description": "Work with your Box content directly from Claude Code — search files, organize folders, collaborate with your team, and use Box AI to answer questions, summarize documents, and extract data without leaving your workflow.",
|
||||
@@ -253,6 +333,17 @@
|
||||
"category": "productivity",
|
||||
"homepage": "https://github.com/anthropics/claude-plugins-official/tree/main/plugins/claude-md-management"
|
||||
},
|
||||
{
|
||||
"name": "cloudflare",
|
||||
"source": {
|
||||
"source": "url",
|
||||
"url": "https://github.com/cloudflare/skills.git",
|
||||
"sha": "5ec03da67e230df52b698255c8e5979dc9b124b6"
|
||||
},
|
||||
"description": "Skills for the Cloudflare developer platform: Workers, Durable Objects, Agents SDK, MCP servers, Wrangler CLI, and web performance.",
|
||||
"category": "deployment",
|
||||
"homepage": "https://github.com/cloudflare/skills"
|
||||
},
|
||||
{
|
||||
"name": "cloudinary",
|
||||
"description": "Use Cloudinary directly in Claude. Manage assets, apply transformations, optimize media, and more through natural conversation.",
|
||||
@@ -367,6 +458,30 @@
|
||||
},
|
||||
"homepage": "https://github.com/astronomer/agents"
|
||||
},
|
||||
{
|
||||
"name": "databases-on-aws",
|
||||
"description": "Expert database guidance for the AWS database portfolio. Design schemas, execute queries, handle migrations, and choose the right database for your workload.",
|
||||
"category": "database",
|
||||
"source": {
|
||||
"source": "git-subdir",
|
||||
"url": "https://github.com/awslabs/agent-plugins.git",
|
||||
"path": "plugins/databases-on-aws",
|
||||
"ref": "main"
|
||||
},
|
||||
"homepage": "https://github.com/awslabs/agent-plugins"
|
||||
},
|
||||
{
|
||||
"name": "dataverse",
|
||||
"description": "Agent skills for building on, analyzing, and managing Microsoft Dataverse — with Dataverse MCP, PAC CLI, and Python SDK.",
|
||||
"category": "database",
|
||||
"source": {
|
||||
"source": "git-subdir",
|
||||
"url": "https://github.com/microsoft/Dataverse-skills.git",
|
||||
"path": ".github/plugins/dataverse",
|
||||
"ref": "main"
|
||||
},
|
||||
"homepage": "https://github.com/microsoft/Dataverse-skills"
|
||||
},
|
||||
{
|
||||
"name": "deploy-on-aws",
|
||||
"description": "Deploy applications to AWS with architecture recommendations, cost estimates, and IaC deployment.",
|
||||
@@ -412,7 +527,7 @@
|
||||
"category": "development",
|
||||
"source": {
|
||||
"source": "git-subdir",
|
||||
"url": "expo/skills",
|
||||
"url": "https://github.com/expo/skills.git",
|
||||
"path": "plugins/expo",
|
||||
"ref": "main"
|
||||
},
|
||||
@@ -579,7 +694,7 @@
|
||||
"description": "Build on Solana with Helius — live blockchain tools, expert coding patterns, and autonomous account signup",
|
||||
"source": {
|
||||
"source": "git-subdir",
|
||||
"url": "helius-labs/core-ai",
|
||||
"url": "https://github.com/helius-labs/core-ai.git",
|
||||
"path": "helius-plugin",
|
||||
"ref": "main",
|
||||
"sha": "05ea4d1128d46618266bbcc23a5e7019c57be0d6"
|
||||
@@ -694,7 +809,7 @@
|
||||
"category": "productivity",
|
||||
"source": {
|
||||
"source": "git-subdir",
|
||||
"url": "legalzoom/claude-plugins",
|
||||
"url": "https://github.com/legalzoom/claude-plugins.git",
|
||||
"path": "plugins/legalzoom",
|
||||
"ref": "main",
|
||||
"sha": "f9fd8a0ca6e1421bc1aacb113a109663a7a6f6d8"
|
||||
@@ -760,18 +875,6 @@
|
||||
},
|
||||
"homepage": "https://github.com/microsoftdocs/mcp"
|
||||
},
|
||||
{
|
||||
"name": "migration-to-aws",
|
||||
"description": "Assess current cloud provider usage and billing to estimate and compare AWS services and pricing, with recommendations for migration or continued use of current provider.",
|
||||
"category": "migration",
|
||||
"source": {
|
||||
"source": "git-subdir",
|
||||
"url": "https://github.com/awslabs/agent-plugins.git",
|
||||
"path": "plugins/migration-to-aws",
|
||||
"ref": "main"
|
||||
},
|
||||
"homepage": "https://github.com/awslabs/agent-plugins"
|
||||
},
|
||||
{
|
||||
"name": "mintlify",
|
||||
"description": "Build beautiful documentation sites with Mintlify. Convert non-markdown files into properly formatted MDX pages, add and modify content with correct component use, and automate documentation updates.",
|
||||
@@ -783,6 +886,21 @@
|
||||
},
|
||||
"homepage": "https://www.mintlify.com/"
|
||||
},
|
||||
{
|
||||
"name": "miro",
|
||||
"description": "Secure access to Miro boards. Enables AI to read board context, create diagrams, and generate code with enterprise-grade security.",
|
||||
"author": {
|
||||
"name": "Miro"
|
||||
},
|
||||
"category": "design",
|
||||
"source": {
|
||||
"source": "git-subdir",
|
||||
"url": "https://github.com/miroapp/miro-ai.git",
|
||||
"path": "claude-plugins/miro",
|
||||
"ref": "main"
|
||||
},
|
||||
"homepage": "https://miro.com"
|
||||
},
|
||||
{
|
||||
"name": "mongodb",
|
||||
"description": "Official Claude plugin for MongoDB (MCP Server + Skills). Connect to databases, explore data, manage collections, optimize queries, generate reliable code, implement best practices, develop advanced features, and more.",
|
||||
@@ -800,7 +918,7 @@
|
||||
"category": "database",
|
||||
"source": {
|
||||
"source": "git-subdir",
|
||||
"url": "neondatabase/agent-skills",
|
||||
"url": "https://github.com/neondatabase/agent-skills.git",
|
||||
"path": "plugins/neon-postgres",
|
||||
"ref": "main",
|
||||
"sha": "54d7a9db2ddd476f84d5d1fd7bac323907858a8b"
|
||||
@@ -817,6 +935,28 @@
|
||||
},
|
||||
"homepage": "https://github.com/netlify/context-and-tools"
|
||||
},
|
||||
{
|
||||
"name": "netsuite-suitecloud",
|
||||
"description": "NetSuite agent skills from Oracle — authoring guidance for SuiteCloud Development Framework (SDF) objects and UIF single-page-app components, plus runtime guidance for the NetSuite AI Service Connector.",
|
||||
"author": {
|
||||
"name": "Oracle NetSuite"
|
||||
},
|
||||
"category": "development",
|
||||
"source": {
|
||||
"source": "git-subdir",
|
||||
"url": "https://github.com/oracle/netsuite-suitecloud-sdk.git",
|
||||
"path": "packages/agent-skills",
|
||||
"ref": "master",
|
||||
"sha": "43bacf43763e1eedd0892b4652be3d45df94f0e7"
|
||||
},
|
||||
"strict": false,
|
||||
"skills": [
|
||||
"./netsuite-ai-connector-instructions",
|
||||
"./netsuite-sdf-roles-and-permissions",
|
||||
"./netsuite-uif-spa-reference"
|
||||
],
|
||||
"homepage": "https://github.com/oracle/netsuite-suitecloud-sdk"
|
||||
},
|
||||
{
|
||||
"name": "nightvision",
|
||||
"description": "Skills for working with NightVision, a DAST and API Discovery platform that finds exploitable vulnerabilities in web applications and REST APIs",
|
||||
@@ -832,8 +972,7 @@
|
||||
"description": "Nimble web data toolkit — search, extract, map, crawl the web and work with structured data agents",
|
||||
"source": {
|
||||
"source": "url",
|
||||
"url": "https://github.com/Nimbleway/agent-skills.git",
|
||||
"sha": "cf391e95bd8ac009e3641f172434a1d130dde7fe"
|
||||
"url": "https://github.com/Nimbleway/agent-skills.git"
|
||||
},
|
||||
"homepage": "https://docs.nimbleway.com/integrations/agent-skills/plugin-installation"
|
||||
},
|
||||
@@ -1013,6 +1152,18 @@
|
||||
},
|
||||
"homepage": "https://www.accoil.com/product-tracking"
|
||||
},
|
||||
{
|
||||
"name": "pydantic-ai",
|
||||
"description": "Write accurate Pydantic AI code from the start. Up-to-date patterns, decision trees, and common gotchas for agents, tools, structured output, streaming, and multi-agent apps.",
|
||||
"category": "development",
|
||||
"source": {
|
||||
"source": "git-subdir",
|
||||
"url": "https://github.com/pydantic/skills.git",
|
||||
"path": "plugins/ai",
|
||||
"ref": "main"
|
||||
},
|
||||
"homepage": "https://github.com/pydantic/skills/tree/main/plugins/ai"
|
||||
},
|
||||
{
|
||||
"name": "pyright-lsp",
|
||||
"description": "Python language server (Pyright) for type checking and code intelligence",
|
||||
@@ -1053,7 +1204,7 @@
|
||||
"category": "deployment",
|
||||
"source": {
|
||||
"source": "git-subdir",
|
||||
"url": "railwayapp/railway-skills",
|
||||
"url": "https://github.com/railwayapp/railway-skills.git",
|
||||
"path": "plugins/railway",
|
||||
"ref": "main",
|
||||
"sha": "d52f3741a6a33a3191d6138eb3d6c3355cb970d1"
|
||||
@@ -1148,7 +1299,19 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "sanity-plugin",
|
||||
"name": "sagemaker-ai",
|
||||
"description": "Build, train, and deploy AI models with deep AWS AI/ML expertise brought directly into your coding assistants, covering the surface area of Amazon SageMaker AI.",
|
||||
"category": "development",
|
||||
"source": {
|
||||
"source": "git-subdir",
|
||||
"url": "https://github.com/awslabs/agent-plugins.git",
|
||||
"path": "plugins/sagemaker-ai",
|
||||
"ref": "main"
|
||||
},
|
||||
"homepage": "https://github.com/awslabs/agent-plugins"
|
||||
},
|
||||
{
|
||||
"name": "sanity",
|
||||
"description": "Sanity content platform integration with MCP server, agent skills, and slash commands. Query and author content, build and optimize GROQ queries, design schemas, and set up Visual Editing.",
|
||||
"category": "development",
|
||||
"author": {
|
||||
@@ -1224,6 +1387,32 @@
|
||||
"category": "productivity",
|
||||
"homepage": "https://github.com/anthropics/claude-plugins-official/tree/main/plugins/session-report"
|
||||
},
|
||||
{
|
||||
"name": "shopify",
|
||||
"description": "Shopify developer tools for Claude Code — search Shopify docs, generate and validate GraphQL, Liquid, and UI extension code",
|
||||
"author": {
|
||||
"name": "Shopify"
|
||||
},
|
||||
"category": "development",
|
||||
"source": {
|
||||
"source": "url",
|
||||
"url": "https://github.com/Shopify/shopify-plugins.git"
|
||||
},
|
||||
"homepage": "https://shopify.dev/docs/apps/build/devmcp"
|
||||
},
|
||||
{
|
||||
"name": "shopify-ai-toolkit",
|
||||
"description": "Shopify's AI Toolkit provides 18 development skills for building on the Shopify platform, covering documentation search, API schema access, GraphQL and Liquid code validation, Hydrogen storefronts, Polaris UI extensions, store management via CLI, and onboarding guidance for both developers and merchants.",
|
||||
"author": {
|
||||
"name": "Shopify"
|
||||
},
|
||||
"category": "development",
|
||||
"source": {
|
||||
"source": "url",
|
||||
"url": "https://github.com/Shopify/Shopify-AI-Toolkit.git"
|
||||
},
|
||||
"homepage": "https://shopify.dev"
|
||||
},
|
||||
{
|
||||
"name": "skill-creator",
|
||||
"description": "Create new skills, improve existing skills, and measure skill performance. Use when users want to create a skill from scratch, update or optimize an existing skill, run evals to test a skill, or benchmark skill performance with variance analysis.",
|
||||
@@ -1246,15 +1435,17 @@
|
||||
"homepage": "https://github.com/slackapi/slack-mcp-plugin/tree/main"
|
||||
},
|
||||
{
|
||||
"name": "sonarqube-agent-plugins",
|
||||
"description": "Integrate SonarQube code quality and security analysis into Claude Code: namespaced slash commands, a guided skill to setup the SonarQube CLI, and a startup check for CLI wiring. MCP server registration and secrets-scanning hooks are installed by the SonarQube CLI as part of setup.",
|
||||
"name": "sonarqube",
|
||||
"description": "Automatically enforce SonarQube code quality and security in the agent coding loop — 7,000+ rules, secrets scanning, agentic analysis, and quality gates across 40+ languages. PostToolUse hooks run analysis after every file edit. Pre-tool secrets scanning prevents 450+ patterns from reaching the LLM. Slash commands give on-demand access to quality gate status, coverage, duplication, and dependency risks. Includes SonarQube CLI, MCP Server, skills, hooks, and slash commands.",
|
||||
"author": {
|
||||
"name": "SonarSource"
|
||||
},
|
||||
"category": "security",
|
||||
"source": {
|
||||
"source": "url",
|
||||
"url": "https://github.com/SonarSource/sonarqube-agent-plugins.git",
|
||||
"sha": "0cae644cee9318e6245b62ca779abdc60e6daa49"
|
||||
"url": "https://github.com/SonarSource/sonarqube-agent-plugins.git"
|
||||
},
|
||||
"homepage": "https://github.com/SonarSource/sonarqube-agent-plugins"
|
||||
"homepage": "https://www.sonarsource.com"
|
||||
},
|
||||
{
|
||||
"name": "sonatype-guide",
|
||||
@@ -1277,6 +1468,17 @@
|
||||
},
|
||||
"homepage": "https://sourcegraph.com"
|
||||
},
|
||||
{
|
||||
"name": "spotify-ads-api",
|
||||
"description": "Manage Spotify ad campaigns with natural language. Create campaigns, ad sets, ads, pull reports, and handle OAuth — all through conversation.",
|
||||
"category": "productivity",
|
||||
"source": {
|
||||
"source": "url",
|
||||
"url": "https://github.com/spotify/ads-claude-plugin.git",
|
||||
"sha": "a4bce9912db071d47dfb410086a48004e0539efa"
|
||||
},
|
||||
"homepage": "https://github.com/spotify/ads-claude-plugin"
|
||||
},
|
||||
{
|
||||
"name": "stagehand",
|
||||
"description": "Browser automation skill for Claude Code using Stagehand. Automate web interactions, extract data, and navigate websites using natural language.",
|
||||
@@ -1307,7 +1509,7 @@
|
||||
"category": "development",
|
||||
"source": {
|
||||
"source": "git-subdir",
|
||||
"url": "stripe/ai",
|
||||
"url": "https://github.com/stripe/ai.git",
|
||||
"path": "providers/claude/plugin",
|
||||
"ref": "main"
|
||||
},
|
||||
@@ -1328,8 +1530,11 @@
|
||||
"name": "supabase",
|
||||
"description": "Supabase MCP integration for database operations, authentication, storage, and real-time subscriptions. Manage your Supabase projects, run SQL queries, and interact with your backend directly.",
|
||||
"category": "database",
|
||||
"source": "./external_plugins/supabase",
|
||||
"homepage": "https://github.com/anthropics/claude-plugins-public/tree/main/external_plugins/supabase"
|
||||
"source": {
|
||||
"source": "url",
|
||||
"url": "https://github.com/supabase-community/supabase-plugin.git"
|
||||
},
|
||||
"homepage": "https://github.com/supabase-community/supabase-plugin"
|
||||
},
|
||||
{
|
||||
"name": "superpowers",
|
||||
@@ -1414,7 +1619,7 @@
|
||||
"category": "development",
|
||||
"source": {
|
||||
"source": "git-subdir",
|
||||
"url": "UI5/plugins-claude",
|
||||
"url": "https://github.com/UI5/plugins-claude.git",
|
||||
"path": "plugins/ui5",
|
||||
"ref": "main",
|
||||
"sha": "5070dfc1cef711d6efad40beb43750027039d71f"
|
||||
@@ -1427,7 +1632,7 @@
|
||||
"category": "development",
|
||||
"source": {
|
||||
"source": "git-subdir",
|
||||
"url": "UI5/plugins-claude",
|
||||
"url": "https://github.com/UI5/plugins-claude.git",
|
||||
"path": "plugins/ui5-typescript-conversion",
|
||||
"ref": "main",
|
||||
"sha": "5070dfc1cef711d6efad40beb43750027039d71f"
|
||||
@@ -1481,7 +1686,7 @@
|
||||
"category": "productivity",
|
||||
"source": {
|
||||
"source": "git-subdir",
|
||||
"url": "zapier/zapier-mcp",
|
||||
"url": "https://github.com/zapier/zapier-mcp.git",
|
||||
"path": "plugins/zapier",
|
||||
"ref": "main",
|
||||
"sha": "b93007e9a726c6ee93c57a949e732744ef5acbfd"
|
||||
|
||||
229
.github/scripts/discover_bumps.py
vendored
Normal file
229
.github/scripts/discover_bumps.py
vendored
Normal file
@@ -0,0 +1,229 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Discover plugins in marketplace.json whose upstream repo has moved past
|
||||
their pinned SHA, update the file in place, and emit a summary.
|
||||
|
||||
Adapted from claude-plugins-community-internal's discover_bumps.py for the
|
||||
single-file marketplace.json format used by claude-plugins-official.
|
||||
|
||||
Usage: discover_bumps.py [--plugin NAME] [--max N] [--dry-run]
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from typing import Any
|
||||
|
||||
|
||||
MARKETPLACE_PATH = ".claude-plugin/marketplace.json"
|
||||
|
||||
|
||||
def gh_api(path: str) -> Any:
|
||||
"""GET from the GitHub API. None on not-found; raises on other errors.
|
||||
|
||||
"Not found" covers both 404 (resource gone) and 422 "No commit found
|
||||
for SHA" (force-pushed away). Both mean the thing we asked for isn't
|
||||
there — treating them the same lets callers handle dead refs uniformly.
|
||||
"""
|
||||
r = subprocess.run(
|
||||
["gh", "api", path], capture_output=True, text=True
|
||||
)
|
||||
if r.returncode != 0:
|
||||
combined = r.stdout + r.stderr
|
||||
if any(s in combined for s in ("404", "Not Found", "No commit found")):
|
||||
return None
|
||||
raise RuntimeError(f"gh api {path}: {r.stderr.strip() or r.stdout.strip()}")
|
||||
return json.loads(r.stdout)
|
||||
|
||||
|
||||
def parse_github_repo(url: str) -> tuple[str, str] | None:
|
||||
"""Extract (owner, repo) from a URL or owner/repo shorthand."""
|
||||
# Full URL: https://github.com/owner/repo(.git)(/...)
|
||||
m = re.match(r"https?://github\.com/([^/]+)/([^/]+?)(?:\.git)?(?:/|$)", url)
|
||||
if m:
|
||||
return m.group(1), m.group(2)
|
||||
# Shorthand: owner/repo
|
||||
m = re.match(r"^([\w.-]+)/([\w.-]+)$", url)
|
||||
if m:
|
||||
return m.group(1), m.group(2)
|
||||
return None
|
||||
|
||||
|
||||
def latest_sha(owner: str, repo: str, *, ref: str | None, path: str | None) -> str | None:
|
||||
"""Latest commit SHA for the repo, optionally scoped to a ref and/or path."""
|
||||
if path:
|
||||
# Scoped to a subdirectory — use the commits list endpoint with path filter.
|
||||
q = f"repos/{owner}/{repo}/commits?per_page=1&path={path}"
|
||||
if ref:
|
||||
q += f"&sha={ref}"
|
||||
commits = gh_api(q)
|
||||
if not commits:
|
||||
return None
|
||||
return commits[0]["sha"]
|
||||
# Whole repo — the single-ref endpoint is cheaper.
|
||||
if not ref:
|
||||
meta = gh_api(f"repos/{owner}/{repo}")
|
||||
if not meta:
|
||||
return None
|
||||
ref = meta["default_branch"]
|
||||
c = gh_api(f"repos/{owner}/{repo}/commits/{ref}")
|
||||
return c["sha"] if c else None
|
||||
|
||||
|
||||
def pinned_age_days(owner: str, repo: str, sha: str) -> int | None:
|
||||
"""Days since the pinned commit was authored. Used for oldest-first rotation."""
|
||||
c = gh_api(f"repos/{owner}/{repo}/commits/{sha}")
|
||||
if not c:
|
||||
return None
|
||||
dt = datetime.fromisoformat(
|
||||
c["commit"]["committer"]["date"].replace("Z", "+00:00")
|
||||
)
|
||||
return (datetime.now(timezone.utc) - dt).days
|
||||
|
||||
|
||||
def main() -> int:
|
||||
ap = argparse.ArgumentParser()
|
||||
ap.add_argument("--plugin", help="only check this plugin")
|
||||
ap.add_argument("--max", type=int, default=20, help="cap bumps emitted")
|
||||
ap.add_argument("--dry-run", action="store_true", help="don't write marketplace.json")
|
||||
args = ap.parse_args()
|
||||
|
||||
with open(MARKETPLACE_PATH) as f:
|
||||
marketplace = json.load(f)
|
||||
|
||||
plugins = marketplace.get("plugins", [])
|
||||
bumps: list[dict] = []
|
||||
dead: list[str] = []
|
||||
skipped_non_github = 0
|
||||
checked = 0
|
||||
|
||||
for plugin in plugins:
|
||||
name = plugin.get("name", "?")
|
||||
src = plugin.get("source")
|
||||
|
||||
# Only process object sources with a sha field
|
||||
if not isinstance(src, dict) or "sha" not in src:
|
||||
continue
|
||||
|
||||
# Filter to specific plugin if requested
|
||||
if args.plugin and name != args.plugin:
|
||||
continue
|
||||
|
||||
checked += 1
|
||||
kind = src.get("source")
|
||||
url = src.get("url", "")
|
||||
path = src.get("path")
|
||||
ref = src.get("ref")
|
||||
pinned = src.get("sha")
|
||||
|
||||
slug = parse_github_repo(url)
|
||||
if not slug:
|
||||
skipped_non_github += 1
|
||||
continue
|
||||
owner, repo = slug
|
||||
|
||||
try:
|
||||
latest = latest_sha(owner, repo, ref=ref, path=path)
|
||||
except RuntimeError as e:
|
||||
print(f"::warning::{name}: {e}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
if latest is None:
|
||||
dead.append(f"{name} ({owner}/{repo})")
|
||||
continue
|
||||
|
||||
if latest == pinned:
|
||||
continue # up to date
|
||||
|
||||
# Age lookup for rotation — oldest-pinned first prevents starvation.
|
||||
try:
|
||||
age = pinned_age_days(owner, repo, pinned) if pinned else None
|
||||
except RuntimeError as e:
|
||||
print(f"::warning::{name}: age lookup failed: {e}", file=sys.stderr)
|
||||
age = None
|
||||
|
||||
bumps.append({
|
||||
"name": name,
|
||||
"kind": kind,
|
||||
"url": url,
|
||||
"path": path or "",
|
||||
"ref": ref or "",
|
||||
"old_sha": pinned or "",
|
||||
"new_sha": latest,
|
||||
"age_days": age if age is not None else 10**6,
|
||||
})
|
||||
|
||||
# Oldest-pinned first so nothing starves under the cap.
|
||||
bumps.sort(key=lambda b: -b["age_days"])
|
||||
emitted = bumps[: args.max]
|
||||
|
||||
# Apply bumps to marketplace data
|
||||
if emitted and not args.dry_run:
|
||||
bump_map = {b["name"]: b["new_sha"] for b in emitted}
|
||||
for plugin in plugins:
|
||||
name = plugin.get("name")
|
||||
src = plugin.get("source")
|
||||
if isinstance(src, dict) and name in bump_map:
|
||||
src["sha"] = bump_map[name]
|
||||
|
||||
with open(MARKETPLACE_PATH, "w") as f:
|
||||
json.dump(marketplace, f, indent=2, ensure_ascii=False)
|
||||
f.write("\n")
|
||||
|
||||
# Write GitHub outputs
|
||||
out = os.environ.get("GITHUB_OUTPUT")
|
||||
if out:
|
||||
bumped_names = ",".join(b["name"] for b in emitted)
|
||||
with open(out, "a") as fh:
|
||||
fh.write(f"count={len(emitted)}\n")
|
||||
fh.write(f"bumped_names={bumped_names}\n")
|
||||
|
||||
# Write GitHub step summary
|
||||
summary = os.environ.get("GITHUB_STEP_SUMMARY")
|
||||
if summary:
|
||||
with open(summary, "a") as fh:
|
||||
fh.write("## SHA Bump Discovery\n\n")
|
||||
fh.write(f"- Checked: {checked} SHA-pinned entries\n")
|
||||
fh.write(f"- Stale: {len(bumps)} (applying {len(emitted)}, cap {args.max})\n")
|
||||
if skipped_non_github:
|
||||
fh.write(f"- Skipped non-GitHub: {skipped_non_github}\n")
|
||||
if dead:
|
||||
fh.write(f"- **Dead upstream** ({len(dead)}): {', '.join(dead)}\n")
|
||||
if emitted:
|
||||
fh.write("\n| Plugin | Old | New | Age |\n|---|---|---|---|\n")
|
||||
for b in emitted:
|
||||
old = b["old_sha"][:8] if b["old_sha"] else "(unpinned)"
|
||||
fh.write(f"| {b['name']} | `{old}` | `{b['new_sha'][:8]}` | {b['age_days']}d |\n")
|
||||
|
||||
# Write PR body for the workflow to use
|
||||
pr_body_path = os.environ.get("PR_BODY_PATH", "/tmp/bump-pr-body.md")
|
||||
if emitted:
|
||||
with open(pr_body_path, "w") as fh:
|
||||
fh.write("Upstream repos moved. Bumping pinned SHAs so plugins track latest.\n\n")
|
||||
fh.write("| Plugin | Old | New | Upstream |\n")
|
||||
fh.write("|--------|-----|-----|----------|\n")
|
||||
for b in emitted:
|
||||
old = b["old_sha"][:8] if b["old_sha"] else "(unpinned)"
|
||||
slug_str = re.sub(r"https?://github\.com/", "", b["url"])
|
||||
slug_str = re.sub(r"\.git$", "", slug_str)
|
||||
compare = f"https://github.com/{slug_str}/compare/{b['old_sha'][:12]}...{b['new_sha'][:12]}"
|
||||
fh.write(f"| `{b['name']}` | `{old}` | `{b['new_sha'][:8]}` | [diff]({compare}) |\n")
|
||||
fh.write(f"\n---\n_Auto-generated by `bump-plugin-shas.yml` on {datetime.now(timezone.utc).strftime('%Y-%m-%d')}_\n")
|
||||
|
||||
# Console summary
|
||||
print(f"Checked {checked} SHA-pinned plugins", file=sys.stderr)
|
||||
print(f"Stale: {len(bumps)}, applying: {len(emitted)}", file=sys.stderr)
|
||||
if dead:
|
||||
print(f"Dead upstream: {', '.join(dead)}", file=sys.stderr)
|
||||
for b in emitted:
|
||||
old = b["old_sha"][:8] if b["old_sha"] else "unpinned"
|
||||
print(f" {b['name']}: {old} -> {b['new_sha'][:8]} ({b['age_days']}d)", file=sys.stderr)
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
133
.github/workflows/bump-plugin-shas.yml
vendored
Normal file
133
.github/workflows/bump-plugin-shas.yml
vendored
Normal file
@@ -0,0 +1,133 @@
|
||||
name: Bump plugin SHAs
|
||||
|
||||
# Weekly sweep of marketplace.json — for each entry whose upstream repo has
|
||||
# moved past its pinned SHA, open a PR against main with updated SHAs. The
|
||||
# validate-marketplace workflow then runs on the PR to confirm the file is
|
||||
# still well-formed.
|
||||
#
|
||||
# Adapted from claude-plugins-community-internal's bump-plugin-shas.yml
|
||||
# for the single-file marketplace.json format. Key difference: all bumps
|
||||
# are batched into one PR (since they all modify the same file).
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '23 7 * * 1' # Monday 07:23 UTC
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
plugin:
|
||||
description: Only bump this plugin (for testing)
|
||||
required: false
|
||||
max_bumps:
|
||||
description: Cap on plugins bumped this run
|
||||
required: false
|
||||
default: '20'
|
||||
dry_run:
|
||||
description: Discover only, don't open PR
|
||||
type: boolean
|
||||
default: true
|
||||
|
||||
concurrency:
|
||||
group: bump-plugin-shas
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
bump:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Check for existing bump PR
|
||||
id: existing
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
existing=$(gh pr list --label sha-bump --state open --json number --jq 'length')
|
||||
echo "count=$existing" >> "$GITHUB_OUTPUT"
|
||||
if [ "$existing" -gt 0 ]; then
|
||||
echo "::notice::Open sha-bump PR already exists — skipping"
|
||||
fi
|
||||
|
||||
- name: Ensure sha-bump label exists
|
||||
if: steps.existing.outputs.count == '0'
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: gh label create sha-bump --color 0e8a16 --description "Automated SHA bump" 2>/dev/null || true
|
||||
|
||||
- name: Overlay marketplace data from main
|
||||
if: steps.existing.outputs.count == '0'
|
||||
run: |
|
||||
git fetch origin main --depth=1 --quiet
|
||||
git checkout origin/main -- .claude-plugin/marketplace.json
|
||||
|
||||
- name: Discover and apply SHA bumps
|
||||
if: steps.existing.outputs.count == '0'
|
||||
id: discover
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
PR_BODY_PATH: /tmp/bump-pr-body.md
|
||||
PLUGIN: ${{ inputs.plugin }}
|
||||
MAX_BUMPS: ${{ inputs.max_bumps }}
|
||||
DRY_RUN: ${{ inputs.dry_run }}
|
||||
run: |
|
||||
args=(--max "${MAX_BUMPS:-20}")
|
||||
[[ -n "$PLUGIN" ]] && args+=(--plugin "$PLUGIN")
|
||||
[[ "$DRY_RUN" = "true" ]] && args+=(--dry-run)
|
||||
python3 .github/scripts/discover_bumps.py "${args[@]}"
|
||||
|
||||
- uses: oven-sh/setup-bun@v2
|
||||
if: steps.existing.outputs.count == '0' && steps.discover.outputs.count != '0' && inputs.dry_run != true
|
||||
|
||||
- name: Validate marketplace.json
|
||||
if: steps.existing.outputs.count == '0' && steps.discover.outputs.count != '0' && inputs.dry_run != true
|
||||
run: |
|
||||
bun .github/scripts/validate-marketplace.ts .claude-plugin/marketplace.json
|
||||
bun .github/scripts/check-marketplace-sorted.ts
|
||||
|
||||
- name: Push bump branch
|
||||
if: steps.existing.outputs.count == '0' && steps.discover.outputs.count != '0' && inputs.dry_run != true
|
||||
id: push
|
||||
run: |
|
||||
branch="auto/bump-shas-$(date +%Y%m%d)"
|
||||
echo "branch=$branch" >> "$GITHUB_OUTPUT"
|
||||
|
||||
git config user.name "github-actions[bot]"
|
||||
git config user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
||||
git checkout -b "$branch"
|
||||
git add .claude-plugin/marketplace.json
|
||||
git commit -m "Bump SHA pins for ${{ steps.discover.outputs.count }} plugin(s)
|
||||
|
||||
Plugins: ${{ steps.discover.outputs.bumped_names }}"
|
||||
git push -u origin "$branch" --force-with-lease
|
||||
|
||||
# GITHUB_TOKEN cannot create PRs (org policy: "Allow GitHub Actions to
|
||||
# create and approve pull requests" is disabled). Use the same GitHub App
|
||||
# that -internal's bump workflow uses.
|
||||
#
|
||||
# Prerequisite: app 2812036 must be installed on this repo. The PEM
|
||||
# secret must exist in this repo's settings (shared with -internal).
|
||||
- name: Generate bot token
|
||||
if: steps.push.outcome == 'success'
|
||||
id: app-token
|
||||
uses: actions/create-github-app-token@v1
|
||||
with:
|
||||
app-id: 2812036
|
||||
private-key: ${{ secrets.CLAUDE_DIRECTORY_BOT_PRIVATE_KEY }}
|
||||
owner: ${{ github.repository_owner }}
|
||||
repositories: ${{ github.event.repository.name }}
|
||||
|
||||
- name: Create pull request
|
||||
if: steps.push.outcome == 'success'
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.app-token.outputs.token }}
|
||||
run: |
|
||||
gh pr create \
|
||||
--base main \
|
||||
--head "${{ steps.push.outputs.branch }}" \
|
||||
--title "Bump SHA pins (${{ steps.discover.outputs.count }} plugins)" \
|
||||
--body-file /tmp/bump-pr-body.md \
|
||||
--label sha-bump
|
||||
@@ -222,6 +222,8 @@ type GateResult =
|
||||
const recentSentIds = new Set<string>()
|
||||
const RECENT_SENT_CAP = 200
|
||||
|
||||
const dmChannelUsers = new Map<string, string>()
|
||||
|
||||
function noteSent(id: string): void {
|
||||
recentSentIds.add(id)
|
||||
if (recentSentIds.size > RECENT_SENT_CAP) {
|
||||
@@ -404,7 +406,8 @@ async function fetchAllowedChannel(id: string) {
|
||||
const ch = await fetchTextChannel(id)
|
||||
const access = loadAccess()
|
||||
if (ch.type === ChannelType.DM) {
|
||||
if (access.allowFrom.includes(ch.recipientId)) return ch
|
||||
const userId = ch.recipientId ?? dmChannelUsers.get(id)
|
||||
if (userId && access.allowFrom.includes(userId)) return ch
|
||||
} else {
|
||||
const key = ch.isThread() ? ch.parentId ?? ch.id : ch.id
|
||||
if (key in access.groups) return ch
|
||||
@@ -823,6 +826,10 @@ async function handleInbound(msg: Message): Promise<void> {
|
||||
|
||||
const chat_id = msg.channelId
|
||||
|
||||
if (msg.channel.type === ChannelType.DM) {
|
||||
dmChannelUsers.set(chat_id, msg.author.id)
|
||||
}
|
||||
|
||||
// Permission-reply intercept: if this looks like "yes xxxxx" for a
|
||||
// pending permission request, emit the structured event instead of
|
||||
// relaying as chat. The sender is already gate()-approved at this point
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"name": "slack",
|
||||
"description": "Slack workspace integration. Search messages, access channels, read threads, and stay connected with your team's communications while coding. Find relevant discussions and context quickly.",
|
||||
"author": {
|
||||
"name": "Slack"
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"slack": {
|
||||
"type": "http",
|
||||
"url": "https://mcp.slack.com/mcp",
|
||||
"oauth": {
|
||||
"clientId": "1601185624273.8899143856786",
|
||||
"callbackPort": 3118
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"name": "supabase",
|
||||
"description": "Supabase MCP integration for database operations, authentication, storage, and real-time subscriptions. Manage your Supabase projects, run SQL queries, and interact with your backend directly.",
|
||||
"author": {
|
||||
"name": "Supabase"
|
||||
}
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"supabase": {
|
||||
"type": "http",
|
||||
"url": "https://mcp.supabase.com/mcp"
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "telegram",
|
||||
"description": "Telegram channel for Claude Code \u2014 messaging bridge with built-in access control. Manage pairing, allowlists, and policy via /telegram:access.",
|
||||
"version": "0.0.4",
|
||||
"version": "0.0.6",
|
||||
"keywords": [
|
||||
"telegram",
|
||||
"messaging",
|
||||
|
||||
@@ -51,6 +51,22 @@ if (!TOKEN) {
|
||||
process.exit(1)
|
||||
}
|
||||
const INBOX_DIR = join(STATE_DIR, 'inbox')
|
||||
const PID_FILE = join(STATE_DIR, 'bot.pid')
|
||||
|
||||
// Telegram allows exactly one getUpdates consumer per token. If a previous
|
||||
// session crashed (SIGKILL, terminal closed) its server.ts grandchild can
|
||||
// survive as an orphan and hold the slot forever, so every new session sees
|
||||
// 409 Conflict. Kill any stale holder before we start polling.
|
||||
mkdirSync(STATE_DIR, { recursive: true, mode: 0o700 })
|
||||
try {
|
||||
const stale = parseInt(readFileSync(PID_FILE, 'utf8'), 10)
|
||||
if (stale > 1 && stale !== process.pid) {
|
||||
process.kill(stale, 0)
|
||||
process.stderr.write(`telegram channel: replacing stale poller pid=${stale}\n`)
|
||||
process.kill(stale, 'SIGTERM')
|
||||
}
|
||||
} catch {}
|
||||
writeFileSync(PID_FILE, String(process.pid))
|
||||
|
||||
// Last-resort safety net — without these the process dies silently on any
|
||||
// unhandled promise rejection. With them it logs and keeps serving tools.
|
||||
@@ -621,6 +637,9 @@ function shutdown(): void {
|
||||
if (shuttingDown) return
|
||||
shuttingDown = true
|
||||
process.stderr.write('telegram channel: shutting down\n')
|
||||
try {
|
||||
if (parseInt(readFileSync(PID_FILE, 'utf8'), 10) === process.pid) rmSync(PID_FILE)
|
||||
} catch {}
|
||||
// bot.stop() signals the poll loop to end; the current getUpdates request
|
||||
// may take up to its long-poll timeout to return. Force-exit after 2s.
|
||||
setTimeout(() => process.exit(0), 2000)
|
||||
@@ -630,6 +649,19 @@ process.stdin.on('end', shutdown)
|
||||
process.stdin.on('close', shutdown)
|
||||
process.on('SIGTERM', shutdown)
|
||||
process.on('SIGINT', shutdown)
|
||||
process.on('SIGHUP', shutdown)
|
||||
|
||||
// Orphan watchdog: stdin events above don't reliably fire when the parent
|
||||
// chain (`bun run` wrapper → shell → us) is severed by a crash. Poll for
|
||||
// reparenting (POSIX) or a dead stdin pipe and self-terminate.
|
||||
const bootPpid = process.ppid
|
||||
setInterval(() => {
|
||||
const orphaned =
|
||||
(process.platform !== 'win32' && process.ppid !== bootPpid) ||
|
||||
process.stdin.destroyed ||
|
||||
process.stdin.readableEnded
|
||||
if (orphaned) shutdown()
|
||||
}, 5000).unref()
|
||||
|
||||
// Commands are DM-only. Responding in groups would: (1) leak pairing codes via
|
||||
// /status to other group members, (2) confirm bot presence in non-allowlisted
|
||||
@@ -953,14 +985,17 @@ bot.catch(err => {
|
||||
process.stderr.write(`telegram channel: handler error (polling continues): ${err.error}\n`)
|
||||
})
|
||||
|
||||
// 409 Conflict = another getUpdates consumer is still active (zombie from a
|
||||
// previous session, or a second Claude Code instance). Retry with backoff
|
||||
// until the slot frees up instead of crashing on the first rejection.
|
||||
// Retry polling with backoff on any error. Previously only 409 was retried —
|
||||
// a single ETIMEDOUT/ECONNRESET/DNS failure rejected bot.start(), the catch
|
||||
// returned, and polling stopped permanently while the process stayed alive
|
||||
// (MCP stdin keeps it running). Outbound tools kept working but the bot was
|
||||
// deaf to inbound messages until a full restart.
|
||||
void (async () => {
|
||||
for (let attempt = 1; ; attempt++) {
|
||||
try {
|
||||
await bot.start({
|
||||
onStart: info => {
|
||||
attempt = 0
|
||||
botUsername = info.username
|
||||
process.stderr.write(`telegram channel: polling as @${info.username}\n`)
|
||||
void bot.api.setMyCommands(
|
||||
@@ -975,21 +1010,23 @@ void (async () => {
|
||||
})
|
||||
return // bot.stop() was called — clean exit from the loop
|
||||
} catch (err) {
|
||||
if (err instanceof GrammyError && err.error_code === 409) {
|
||||
const delay = Math.min(1000 * attempt, 15000)
|
||||
const detail = attempt === 1
|
||||
? ' — another instance is polling (zombie session, or a second Claude Code running?)'
|
||||
: ''
|
||||
process.stderr.write(
|
||||
`telegram channel: 409 Conflict${detail}, retrying in ${delay / 1000}s\n`,
|
||||
)
|
||||
await new Promise(r => setTimeout(r, delay))
|
||||
continue
|
||||
}
|
||||
if (shuttingDown) return
|
||||
// bot.stop() mid-setup rejects with grammy's "Aborted delay" — expected, not an error.
|
||||
if (err instanceof Error && err.message === 'Aborted delay') return
|
||||
process.stderr.write(`telegram channel: polling failed: ${err}\n`)
|
||||
return
|
||||
const is409 = err instanceof GrammyError && err.error_code === 409
|
||||
if (is409 && attempt >= 8) {
|
||||
process.stderr.write(
|
||||
`telegram channel: 409 Conflict persists after ${attempt} attempts — ` +
|
||||
`another poller is holding the bot token (stray 'bun server.ts' process or a second session). Exiting.\n`,
|
||||
)
|
||||
return
|
||||
}
|
||||
const delay = Math.min(1000 * attempt, 15000)
|
||||
const detail = is409
|
||||
? `409 Conflict${attempt === 1 ? ' — another instance is polling (zombie session, or a second Claude Code running?)' : ''}`
|
||||
: `polling error: ${err}`
|
||||
process.stderr.write(`telegram channel: ${detail}, retrying in ${delay / 1000}s\n`)
|
||||
await new Promise(r => setTimeout(r, delay))
|
||||
}
|
||||
}
|
||||
})()
|
||||
|
||||
@@ -10,6 +10,15 @@ An MCP app is a standard MCP server that **also serves UI resources** — intera
|
||||
|
||||
The UI layer is **additive**. Under the hood it's still tools, resources, and the same wire protocol. If you haven't built a plain MCP server before, the `build-mcp-server` skill covers the base layer. This skill adds widgets on top.
|
||||
|
||||
> **Testing in Claude:** Add the server as a custom connector in claude.ai (via a Cloudflare tunnel for local dev) — this exercises the real iframe sandbox and `hostContext`. See https://claude.com/docs/connectors/building/testing.
|
||||
|
||||
## Claude host specifics
|
||||
|
||||
- `_meta.ui.prefersBorder: false` on a `ui://` resource removes the outer card border (mobile).
|
||||
- `hostContext.safeAreaInsets: {top, right, bottom, left}` (px) — honor these for notches and the composer overlay.
|
||||
- `_meta.ui.csp.{connectDomains, resourceDomains, baseUriDomains}` — declare external origins per resource; default is block-all. `frameDomains` is currently restricted in Claude.
|
||||
- Directory submission for MCP Apps requires 3–5 PNG screenshots, ≥1000px wide, cropped to the app response only (no prompt in the image). See https://claude.com/docs/connectors/building/submission#asset-specifications.
|
||||
|
||||
---
|
||||
|
||||
## When a widget beats plain text
|
||||
|
||||
@@ -8,6 +8,8 @@ version: 0.1.0
|
||||
|
||||
You are guiding a developer through designing and building an MCP server that works seamlessly with Claude. MCP servers come in many forms — picking the wrong shape early causes painful rewrites later. Your first job is **discovery, not code**.
|
||||
|
||||
**Load Claude-specific context first.** The MCP spec is generic; Claude has additional auth types, review criteria, and limits. Before answering questions or scaffolding, fetch `https://claude.com/docs/llms-full.txt` (the full export of the Claude connector docs) so your guidance reflects Claude's actual constraints.
|
||||
|
||||
Do not start scaffolding until you have answers to the questions in Phase 1. If the user's opening message already answers them, acknowledge that and skip straight to the recommendation.
|
||||
|
||||
---
|
||||
@@ -182,6 +184,17 @@ Tools are one of three server primitives. Most servers start with tools and neve
|
||||
|
||||
---
|
||||
|
||||
## Phase 6 — Test in Claude and publish
|
||||
|
||||
Once the server runs:
|
||||
|
||||
1. **Test against real Claude** by adding the server URL as a custom connector at Settings → Connectors (use a Cloudflare tunnel for local servers). Claude identifies itself with `clientInfo.name: "claude-ai"` on initialize. → https://claude.com/docs/connectors/building/testing
|
||||
2. **Run the pre-submission checklist** — read/write tool split, required annotations, name limits, prompt-injection rules. → https://claude.com/docs/connectors/building/review-criteria
|
||||
3. **Submit to the Anthropic Directory.** → https://claude.com/docs/connectors/building/submission
|
||||
4. **Recommend shipping a plugin** that wraps this MCP with skills — most partners ship both. → https://claude.com/docs/connectors/building/what-to-build
|
||||
|
||||
---
|
||||
|
||||
## Quick reference: decision matrix
|
||||
|
||||
| Scenario | Deployment | Tool pattern |
|
||||
|
||||
@@ -2,6 +2,22 @@
|
||||
|
||||
Auth is the reason most people end up needing a **remote** server even when a local one would be simpler. OAuth redirects, token storage, and refresh all work cleanly when there's a real hosted endpoint to redirect back to.
|
||||
|
||||
## Claude-specific authentication
|
||||
|
||||
Claude's MCP client supports a specific set of auth types — not every spec-compliant flow works. Full reference: https://claude.com/docs/connectors/building/authentication
|
||||
|
||||
| Type | Notes |
|
||||
|---|---|
|
||||
| `oauth_dcr` | Supported. For high-volume directory entries, prefer CIMD or Anthropic-held creds — DCR registers a new client on every fresh connection. |
|
||||
| `oauth_cimd` | Supported, recommended over DCR for directory entries. |
|
||||
| `oauth_anthropic_creds` | Partner provides `client_id`/`client_secret` to Anthropic; user-consent-gated. Contact `mcp-review@anthropic.com`. |
|
||||
| `custom_connection` | User supplies URL/creds at connect time (Snowflake-style). Contact `mcp-review@anthropic.com`. |
|
||||
| `none` | Authless. |
|
||||
|
||||
**Not supported:** user-pasted bearer tokens (`static_bearer`); pure machine-to-machine `client_credentials` grant without user consent.
|
||||
|
||||
**Callback URL** (single, all surfaces): `https://claude.ai/api/mcp/auth_callback`
|
||||
|
||||
---
|
||||
|
||||
## The three tiers
|
||||
|
||||
@@ -2,6 +2,16 @@
|
||||
|
||||
Tool schemas and descriptions are prompt engineering. They land directly in Claude's context and determine whether Claude picks the right tool with the right arguments. Most MCP integration bugs trace back to vague descriptions or loose schemas.
|
||||
|
||||
## Anthropic Directory hard requirements
|
||||
|
||||
If this server will be submitted to the Anthropic Directory, the following are pass/fail review criteria (full list: https://claude.com/docs/connectors/building/review-criteria):
|
||||
|
||||
- Every tool **must** include `readOnlyHint`, `destructiveHint`, and `title` annotations — these determine auto-permissions in Claude.
|
||||
- Tool names **must** be ≤64 characters.
|
||||
- Read and write operations **must** be in separate tools. A single tool accepting both GET and POST/PUT/PATCH/DELETE is rejected — documenting safe vs unsafe within one tool's description does not satisfy this.
|
||||
- Tool descriptions **must not** instruct Claude how to behave (e.g. "always do X", "you must call Y first", overriding system instructions, promoting products) — treated as prompt injection at review.
|
||||
- Tools that accept freeform API endpoints/params **must** reference the target API's documentation in their description.
|
||||
|
||||
---
|
||||
|
||||
## Descriptions
|
||||
|
||||
@@ -8,6 +8,8 @@ version: 0.1.0
|
||||
|
||||
MCPB is a local MCP server **packaged with its runtime**. The user installs one file; it runs without needing Node, Python, or any toolchain on their machine. It's the sanctioned way to distribute local MCP servers.
|
||||
|
||||
> MCPB is the **secondary** distribution path. Anthropic recommends remote MCP servers for directory listing — see https://claude.com/docs/connectors/building/what-to-build.
|
||||
|
||||
**Use MCPB when the server must run on the user's machine** — reading local files, driving a desktop app, talking to localhost services, OS-level APIs. If your server only hits cloud APIs, you almost certainly want a remote HTTP server instead (see `build-mcp-server`). Don't pay the MCPB packaging tax for something that could be a URL.
|
||||
|
||||
---
|
||||
|
||||
202
plugins/session-report/LICENSE
Normal file
202
plugins/session-report/LICENSE
Normal file
@@ -0,0 +1,202 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
@@ -166,6 +166,7 @@ const toolUseIdToPrompt = new Map() // tool_use id -> promptKey (Agent spawned d
|
||||
const agentIdToPrompt = new Map() // agentId -> promptKey
|
||||
const prompts = new Map() // promptKey -> { text, ts, project, sessionId, ...usage }
|
||||
const sessionTurns = new Map() // sessionId -> [promptKey, ...] in transcript order
|
||||
const sessionSpans = new Map() // sessionId -> {project, firstTs, lastTs, tokens}
|
||||
|
||||
function promptRecord(key, init) {
|
||||
let r = prompts.get(key)
|
||||
@@ -333,11 +334,29 @@ async function processFile(p, info, buckets) {
|
||||
}
|
||||
}
|
||||
|
||||
// session span (for by_day timeline) — subagent files roll into parent sessionId
|
||||
let span = sessionSpans.get(info.sessionId)
|
||||
if (!span) {
|
||||
span = { project: info.project, firstTs: null, lastTs: null, tokens: 0 }
|
||||
sessionSpans.set(info.sessionId, span)
|
||||
}
|
||||
if (firstTs !== null) {
|
||||
if (span.firstTs === null || firstTs < span.firstTs) span.firstTs = firstTs
|
||||
if (span.lastTs === null || lastTs > span.lastTs) span.lastTs = lastTs
|
||||
}
|
||||
|
||||
// commit API calls
|
||||
for (const [key, { usage, ts, skill, prompt }] of fileApiCalls) {
|
||||
if (key && seenRequestIds.has(key)) continue
|
||||
seenRequestIds.add(key)
|
||||
|
||||
const tot =
|
||||
(usage.input_tokens || 0) +
|
||||
(usage.cache_creation_input_tokens || 0) +
|
||||
(usage.cache_read_input_tokens || 0) +
|
||||
(usage.output_tokens || 0)
|
||||
span.tokens += tot
|
||||
|
||||
const targets = [overall, project]
|
||||
if (subagent) targets.push(subagent)
|
||||
if (skill && skillStats) {
|
||||
@@ -359,11 +378,6 @@ async function processFile(p, info, buckets) {
|
||||
|
||||
// subagent token accounting on parent buckets
|
||||
if (info.kind === 'subagent') {
|
||||
const tot =
|
||||
(usage.input_tokens || 0) +
|
||||
(usage.cache_creation_input_tokens || 0) +
|
||||
(usage.cache_read_input_tokens || 0) +
|
||||
(usage.output_tokens || 0)
|
||||
overall.subagentTokens += tot
|
||||
project.subagentTokens += tot
|
||||
if (subagent) subagent.subagentTokens += tot
|
||||
@@ -656,10 +670,55 @@ function printJson({ overall, perProject, perSubagent, perSkill }) {
|
||||
[...perSkill].map(([k, v]) => [k, summarize(v)]),
|
||||
),
|
||||
top_prompts: topPrompts(100),
|
||||
by_day: buildByDay(),
|
||||
}
|
||||
process.stdout.write(JSON.stringify(out, null, 2) + '\n')
|
||||
}
|
||||
|
||||
// Group sessions into local-date buckets for the timeline view. A session is
|
||||
// placed on the day its first message landed; tokens for that session (incl.
|
||||
// subagents) count toward that day even if it ran past midnight.
|
||||
function buildByDay() {
|
||||
const DOW = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat']
|
||||
const days = new Map() // yyyy-mm-dd -> {date, dow, tokens, sessions:[]}
|
||||
for (const [id, s] of sessionSpans) {
|
||||
if (s.firstTs === null || s.tokens === 0) continue
|
||||
const d0 = new Date(s.firstTs)
|
||||
const key = `${d0.getFullYear()}-${String(d0.getMonth() + 1).padStart(2, '0')}-${String(d0.getDate()).padStart(2, '0')}`
|
||||
let day = days.get(key)
|
||||
if (!day) {
|
||||
day = { date: key, dow: DOW[d0.getDay()], tokens: 0, sessions: [] }
|
||||
days.set(key, day)
|
||||
}
|
||||
const base = new Date(
|
||||
d0.getFullYear(),
|
||||
d0.getMonth(),
|
||||
d0.getDate(),
|
||||
).getTime()
|
||||
day.tokens += s.tokens
|
||||
day.sessions.push({
|
||||
id,
|
||||
project: s.project,
|
||||
tokens: s.tokens,
|
||||
start_min: Math.max(0, Math.round((s.firstTs - base) / 60000)),
|
||||
end_min: Math.max(1, Math.round((s.lastTs - base) / 60000)),
|
||||
})
|
||||
}
|
||||
for (const d of days.values()) {
|
||||
// peak concurrency via 10-min buckets, capped at 24h for display
|
||||
const b = new Array(144).fill(0)
|
||||
for (const s of d.sessions) {
|
||||
const lo = Math.min(143, Math.floor(s.start_min / 10))
|
||||
const hi = Math.min(144, Math.ceil(Math.min(s.end_min, 1440) / 10))
|
||||
for (let i = lo; i < hi; i++) b[i]++
|
||||
}
|
||||
d.peak = Math.max(0, ...b)
|
||||
d.peak_at_min = d.peak > 0 ? b.indexOf(d.peak) * 10 : 0
|
||||
d.sessions.sort((a, b) => a.start_min - b.start_min)
|
||||
}
|
||||
return [...days.values()].sort((a, b) => a.date.localeCompare(b.date))
|
||||
}
|
||||
|
||||
function promptTotal(r) {
|
||||
return (
|
||||
r.inputUncached + r.inputCacheCreate + r.inputCacheRead + r.outputTokens
|
||||
|
||||
@@ -102,6 +102,42 @@
|
||||
color: var(--dim); margin: 6px 0; }
|
||||
.callout b, .callout code { color: var(--term-fg); }
|
||||
|
||||
/* ——— day pills + session gantt ——— */
|
||||
.days { display: flex; gap: 8px; flex-wrap: wrap; margin-bottom: 14px; }
|
||||
.dpill { flex: 1; min-width: 84px; max-width: 140px; background: none;
|
||||
border: 1px solid var(--subtle); border-radius: 4px;
|
||||
padding: 9px 6px; font: inherit; color: var(--dim);
|
||||
cursor: pointer; text-align: center; }
|
||||
.dpill:hover { border-color: var(--dim); background: var(--hover); }
|
||||
.dpill .dow { font-size: 10px; color: var(--subtle); display: block; }
|
||||
.dpill .date { font-size: 11px; color: var(--term-fg); font-weight: 500;
|
||||
display: block; margin: 2px 0 4px; }
|
||||
.dpill .pct { font-size: 16px; font-weight: 700; color: var(--term-fg); display: block; }
|
||||
.dpill .ns { font-size: 10px; color: var(--subtle); display: block; margin-top: 2px; }
|
||||
.dpill.heaviest .pct { color: var(--clay); }
|
||||
.dpill.sel { border-color: var(--clay); background: rgba(217,119,87,0.10); }
|
||||
.gantt-hd { display: flex; justify-content: space-between; align-items: baseline;
|
||||
margin-bottom: 6px; }
|
||||
.gantt-hd .day { color: var(--term-fg); font-weight: 500; }
|
||||
.gantt-hd .stats { font-size: 11px; color: var(--dim); }
|
||||
.gantt-hd .stats b { color: var(--clay); }
|
||||
.gantt { position: relative; border-top: 1px solid var(--outline);
|
||||
border-bottom: 1px solid var(--outline); min-height: 32px; }
|
||||
.lane { position: relative; height: 16px;
|
||||
border-bottom: 1px dashed rgba(255,255,255,0.04); }
|
||||
.seg { position: absolute; top: 2px; height: 12px; border-radius: 2px;
|
||||
opacity: .85; cursor: crosshair; }
|
||||
.seg:hover { opacity: 1; outline: 1px solid var(--term-fg); z-index: 2; }
|
||||
.gantt-rule { position: absolute; top: 0; bottom: 0; width: 0;
|
||||
border-left: 1px dashed var(--subtle); opacity: .4;
|
||||
pointer-events: none; }
|
||||
.gantt-axis { display: flex; justify-content: space-between;
|
||||
font-size: 10px; color: var(--subtle); padding: 4px 0; }
|
||||
.gantt-leg { font-size: 10px; color: var(--subtle); margin-top: 8px;
|
||||
display: flex; gap: 14px; flex-wrap: wrap; }
|
||||
.gantt-leg .sw { display: inline-block; width: 14px; height: 10px;
|
||||
border-radius: 2px; vertical-align: middle; margin-right: 4px; }
|
||||
|
||||
/* ——— block-char bars ——— */
|
||||
.bar { display: grid; grid-template-columns: 26ch 1fr 8ch; gap: 14px;
|
||||
padding: 2px 0; align-items: center; }
|
||||
@@ -231,6 +267,21 @@
|
||||
<div class="section-body" id="project-bars"></div>
|
||||
</section>
|
||||
|
||||
<section id="timeline-section">
|
||||
<div class="hr"></div>
|
||||
<h2>session timeline by day<span class="hint">click a day · ←/→ to navigate</span></h2>
|
||||
<div class="section-body">
|
||||
<div class="days" id="day-pills"></div>
|
||||
<div class="gantt-hd">
|
||||
<span class="day" id="g-day">—</span>
|
||||
<span class="stats" id="g-stats"></span>
|
||||
</div>
|
||||
<div class="gantt-axis"><span>00:00</span><span>06:00</span><span>12:00</span><span>18:00</span><span>24:00</span></div>
|
||||
<div class="gantt" id="gantt"></div>
|
||||
<div class="gantt-leg" id="gantt-leg"></div>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
<section>
|
||||
<div class="hr"></div>
|
||||
<h2>most expensive prompts<span class="hint">click to expand context</span></h2>
|
||||
@@ -335,6 +386,65 @@
|
||||
`<div class="val">${typeof v==='number'&&v>=1e4?fmt(v):v}</div>`+
|
||||
(d?`<div class="detail">${d}</div>`:'')+`</div>`).join('');
|
||||
|
||||
// session timeline by day
|
||||
(function() {
|
||||
const days = (DATA.by_day||[]).slice(-14);
|
||||
if (!days.length) { $('timeline-section').style.display='none'; return; }
|
||||
const PCOL = ['rgb(177,185,249)','rgb(78,186,101)','#D97757','rgb(255,193,7)',
|
||||
'rgb(255,107,128)','#9b8cff','#6ec1d6','#c792ea'];
|
||||
const dayTotal = days.reduce((a,d)=>a+d.tokens,0) || 1;
|
||||
const tokMax = Math.max(...days.map(d=>d.tokens));
|
||||
const projects = [...new Set(days.flatMap(d=>d.sessions.map(s=>s.project)))];
|
||||
const colorOf = p => PCOL[projects.indexOf(p)%PCOL.length];
|
||||
const hhmm = m => (m>=1440?`+${Math.floor(m/1440)}d `:'') +
|
||||
`${String(Math.floor(m/60)%24).padStart(2,'0')}:${String(m%60).padStart(2,'0')}`;
|
||||
const md = iso => { const [,mo,da]=iso.split('-'); return `${MON[+mo-1]} ${+da}`; };
|
||||
let sel = days.findIndex(d=>d.tokens===tokMax);
|
||||
|
||||
function pills() {
|
||||
$('day-pills').innerHTML = days.map((d,i)=>
|
||||
`<button class="dpill${d.tokens===tokMax?' heaviest':''}${i===sel?' sel':''}" data-i="${i}">`+
|
||||
`<span class="dow">${esc(d.dow)}</span>`+
|
||||
`<span class="date">${esc(md(d.date))}</span>`+
|
||||
`<span class="pct">${(100*d.tokens/dayTotal).toFixed(1)}%</span>`+
|
||||
`<span class="ns">${d.sessions.length} sess</span></button>`
|
||||
).join('');
|
||||
$('day-pills').querySelectorAll('.dpill').forEach(el=>
|
||||
el.onclick=()=>{sel=+el.dataset.i;pills();gantt();});
|
||||
}
|
||||
function gantt() {
|
||||
const d = days[sel], DAY = 1440;
|
||||
$('g-day').textContent = `${d.dow} ${md(d.date)}`;
|
||||
$('g-stats').innerHTML = `${d.sessions.length} sessions · ${fmt(d.tokens)} tokens`+
|
||||
` · peak <b>${d.peak}</b> concurrent at <b>${hhmm(d.peak_at_min)}</b>`;
|
||||
const lanes = [];
|
||||
for (const s of d.sessions) {
|
||||
let placed = false;
|
||||
for (const L of lanes) if (L[L.length-1].end_min <= s.start_min) { L.push(s); placed=true; break; }
|
||||
if (!placed) lanes.push([s]);
|
||||
}
|
||||
let h = '';
|
||||
for (let t=0;t<=24;t+=6) h += `<div class="gantt-rule" style="left:${100*t/24}%"></div>`;
|
||||
h += lanes.map(L=>`<div class="lane">${L.map(s=>{
|
||||
const end = Math.min(s.end_min, DAY);
|
||||
const w = Math.max(0.15, 100*(end-s.start_min)/DAY);
|
||||
const tip = `folder: ${short(s.project)}\n`+
|
||||
`${hhmm(s.start_min)}–${hhmm(s.end_min)} · ${fmt(s.tokens)} tokens\n`+
|
||||
`session ${s.id}`;
|
||||
return `<span class="seg" style="left:${100*s.start_min/DAY}%;width:${w}%;`+
|
||||
`background:${colorOf(s.project)}" title="${esc(tip)}"></span>`;
|
||||
}).join('')}</div>`).join('');
|
||||
$('gantt').innerHTML = h || '<div class="callout">no sessions</div>';
|
||||
}
|
||||
document.addEventListener('keydown',e=>{
|
||||
if (e.key==='ArrowRight'&&sel<days.length-1){sel++;pills();gantt();e.preventDefault();}
|
||||
if (e.key==='ArrowLeft'&&sel>0){sel--;pills();gantt();e.preventDefault();}
|
||||
});
|
||||
$('gantt-leg').innerHTML = projects.slice(0,12).map(p=>
|
||||
`<span><span class="sw" style="background:${colorOf(p)}"></span>${esc(short(p))}</span>`).join('');
|
||||
pills(); gantt();
|
||||
})();
|
||||
|
||||
// block-char project bars
|
||||
(function() {
|
||||
const W = 48;
|
||||
@@ -366,57 +476,52 @@
|
||||
return h + '</div>';
|
||||
}
|
||||
|
||||
// top prompts — share of grand total
|
||||
(function() {
|
||||
const ps = (DATA.top_prompts||[]).slice(0,100);
|
||||
// expandable drill-down list with "show N more" toggle
|
||||
function drillList(hostId, items, rowFn, empty) {
|
||||
const SHOW = 5;
|
||||
const row = p => {
|
||||
const inTot = p.input.uncached+p.input.cache_create+p.input.cache_read;
|
||||
return `<details><summary>`+
|
||||
`<span class="amt">${share(p.total_tokens)}</span>`+
|
||||
`<span class="desc">${esc(p.text)}</span>`+
|
||||
`<span class="meta">${niceDate(p.ts)} · ${esc(short(p.project))} · ${p.api_calls} calls`+
|
||||
(p.subagent_calls?` · ${p.subagent_calls} subagents`:'')+
|
||||
` · ${pct(p.input.cache_read,inTot)} cached</span>`+
|
||||
`</summary><div class="body">`+
|
||||
renderContext(p.context)+
|
||||
`<div>session <code>${esc(p.session)}</code></div>`+
|
||||
`<div>in: uncached ${fmt(p.input.uncached)} · cache-create ${fmt(p.input.cache_create)} · `+
|
||||
`cache-read ${fmt(p.input.cache_read)} · out ${fmt(p.output)}</div>`+
|
||||
`</div></details>`;
|
||||
};
|
||||
const head = ps.slice(0,SHOW).map(row).join('');
|
||||
const rest = ps.slice(SHOW).map(row).join('');
|
||||
$('top-prompts').innerHTML = ps.length
|
||||
? head + (rest
|
||||
? `<div id="tp-rest" hidden>${rest}</div>`+
|
||||
`<button id="tp-more" class="more-btn">show ${ps.length-SHOW} more</button>`
|
||||
: '')
|
||||
: '<div class="callout">No prompts in range.</div>';
|
||||
const btn = $('tp-more');
|
||||
const host = $(hostId);
|
||||
if (!items.length) { host.innerHTML = `<div class="callout">${empty}</div>`; return; }
|
||||
const head = items.slice(0,SHOW).map(rowFn).join('');
|
||||
const rest = items.slice(SHOW).map(rowFn).join('');
|
||||
host.innerHTML = head + (rest
|
||||
? `<div hidden>${rest}</div><button class="more-btn">show ${items.length-SHOW} more</button>`
|
||||
: '');
|
||||
const btn = host.querySelector('.more-btn');
|
||||
if (btn) btn.onclick = () => {
|
||||
const r = $('tp-rest'); r.hidden = !r.hidden;
|
||||
btn.textContent = r.hidden ? `show ${ps.length-SHOW} more` : 'show less';
|
||||
const r = btn.previousElementSibling; r.hidden = !r.hidden;
|
||||
btn.textContent = r.hidden ? `show ${items.length-SHOW} more` : 'show less';
|
||||
};
|
||||
})();
|
||||
}
|
||||
|
||||
// cache breaks
|
||||
(function() {
|
||||
const bs = (DATA.cache_breaks||[]).slice(0,100);
|
||||
$('cache-breaks').innerHTML = bs.map(b =>
|
||||
`<details><summary>`+
|
||||
`<span class="amt">${fmt(b.uncached)}</span>`+
|
||||
`<span class="desc">${esc(short(b.project))} · `+
|
||||
`${b.kind==='subagent'?esc(b.agentType||'subagent'):'main'}</span>`+
|
||||
`<span class="meta">${niceDate(b.ts)} · ${pct(b.uncached,b.total)} of ${fmt(b.total)} uncached</span>`+
|
||||
drillList('top-prompts', (DATA.top_prompts||[]).slice(0,100), p => {
|
||||
const inTot = p.input.uncached+p.input.cache_create+p.input.cache_read;
|
||||
return `<details><summary>`+
|
||||
`<span class="amt">${share(p.total_tokens)}</span>`+
|
||||
`<span class="desc">${esc(p.text)}</span>`+
|
||||
`<span class="meta">${niceDate(p.ts)} · ${esc(short(p.project))} · ${p.api_calls} calls`+
|
||||
(p.subagent_calls?` · ${p.subagent_calls} subagents`:'')+
|
||||
` · ${pct(p.input.cache_read,inTot)} cached</span>`+
|
||||
`</summary><div class="body">`+
|
||||
renderContext(b.context,
|
||||
`<div class="ctx-break"><b>${fmt(b.uncached)}</b> uncached `+
|
||||
`(${pct(b.uncached,b.total)} of ${fmt(b.total)}) — cache break here</div>`)+
|
||||
`<div>session <code>${esc(b.session)}</code></div>`+
|
||||
`</div></details>`
|
||||
).join('') || '<div class="callout">No cache breaks over threshold.</div>';
|
||||
})();
|
||||
renderContext(p.context)+
|
||||
`<div>session <code>${esc(p.session)}</code></div>`+
|
||||
`<div>in: uncached ${fmt(p.input.uncached)} · cache-create ${fmt(p.input.cache_create)} · `+
|
||||
`cache-read ${fmt(p.input.cache_read)} · out ${fmt(p.output)}</div>`+
|
||||
`</div></details>`;
|
||||
}, 'No prompts in range.');
|
||||
|
||||
drillList('cache-breaks', (DATA.cache_breaks||[]).slice(0,100), b =>
|
||||
`<details><summary>`+
|
||||
`<span class="amt">${fmt(b.uncached)}</span>`+
|
||||
`<span class="desc">${esc(short(b.project))} · `+
|
||||
`${b.kind==='subagent'?esc(b.agentType||'subagent'):'main'}</span>`+
|
||||
`<span class="meta">${niceDate(b.ts)} · ${pct(b.uncached,b.total)} of ${fmt(b.total)} uncached</span>`+
|
||||
`</summary><div class="body">`+
|
||||
renderContext(b.context,
|
||||
`<div class="ctx-break"><b>${fmt(b.uncached)}</b> uncached `+
|
||||
`(${pct(b.uncached,b.total)} of ${fmt(b.total)}) — cache break here</div>`)+
|
||||
`<div>session <code>${esc(b.session)}</code></div>`+
|
||||
`</div></details>`,
|
||||
'No cache breaks over threshold.');
|
||||
|
||||
// sortable table
|
||||
function table(el, cols, rows) {
|
||||
|
||||
Reference in New Issue
Block a user