mirror of
https://github.com/Postman-Devrel/postman-claude-code-plugin.git
synced 2026-04-16 10:29:58 +00:00
Add checks
This commit is contained in:
14
.github/.markdownlint.json
vendored
Normal file
14
.github/.markdownlint.json
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"MD009": false,
|
||||
"MD013": false,
|
||||
"MD022": false,
|
||||
"MD026": false,
|
||||
"MD029": false,
|
||||
"MD031": false,
|
||||
"MD032": false,
|
||||
"MD033": false,
|
||||
"MD034": false,
|
||||
"MD040": false,
|
||||
"MD041": false,
|
||||
"MD060": false
|
||||
}
|
||||
29
.github/labeler.yml
vendored
Normal file
29
.github/labeler.yml
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
commands:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "commands/**"
|
||||
|
||||
skills:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "skills/**"
|
||||
|
||||
agents:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "agents/**"
|
||||
|
||||
docs:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- "README.md"
|
||||
- "CLAUDE.md"
|
||||
- "examples/**"
|
||||
|
||||
config:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file:
|
||||
- ".claude-plugin/**"
|
||||
- ".mcp.json"
|
||||
- ".github/**"
|
||||
|
||||
examples:
|
||||
- changed-files:
|
||||
- any-glob-to-any-file: "examples/**"
|
||||
63
.github/scripts/check-internal-links.py
vendored
Executable file
63
.github/scripts/check-internal-links.py
vendored
Executable file
@@ -0,0 +1,63 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Check that internal markdown links resolve to existing files."""
|
||||
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Matches [text](path) and  — excludes URLs
|
||||
LINK_RE = re.compile(r"!?\[([^\]]*)\]\(([^)]+)\)")
|
||||
|
||||
|
||||
def check_file_links(file_path: Path, root: Path) -> list[str]:
|
||||
errors = []
|
||||
text = file_path.read_text()
|
||||
file_dir = file_path.parent
|
||||
|
||||
for match in LINK_RE.finditer(text):
|
||||
target = match.group(2)
|
||||
|
||||
# Skip external URLs
|
||||
if target.startswith(("http://", "https://", "mailto:")):
|
||||
continue
|
||||
|
||||
# Skip anchor-only links
|
||||
if target.startswith("#"):
|
||||
continue
|
||||
|
||||
# Strip anchor fragments from file paths
|
||||
target_path = target.split("#")[0]
|
||||
if not target_path:
|
||||
continue
|
||||
|
||||
# Resolve relative to the file's directory
|
||||
resolved = (file_dir / target_path).resolve()
|
||||
if not resolved.exists():
|
||||
rel = file_path.relative_to(root)
|
||||
errors.append(f"{rel}: Broken link '{target}' — file not found")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def main():
|
||||
root = Path(__file__).resolve().parent.parent.parent
|
||||
errors = []
|
||||
|
||||
for md_file in sorted(root.rglob("*.md")):
|
||||
# Skip .git and node_modules
|
||||
parts = md_file.relative_to(root).parts
|
||||
if any(p.startswith(".git") or p == "node_modules" for p in parts):
|
||||
continue
|
||||
errors.extend(check_file_links(md_file, root))
|
||||
|
||||
if errors:
|
||||
print("Link check failed:")
|
||||
for e in errors:
|
||||
print(f" ✗ {e}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("✓ Link check passed")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
125
.github/scripts/validate-frontmatter.py
vendored
Executable file
125
.github/scripts/validate-frontmatter.py
vendored
Executable file
@@ -0,0 +1,125 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Validate YAML frontmatter in commands, skills, and agents."""
|
||||
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
# PyYAML is not guaranteed on all runners, so parse simple YAML manually
|
||||
FRONTMATTER_RE = re.compile(r"^---\s*\n(.*?)\n---", re.DOTALL)
|
||||
|
||||
KNOWN_TOOLS = {"Bash", "Read", "Write", "Glob", "Grep", "mcp__postman__*"}
|
||||
|
||||
|
||||
def parse_frontmatter(text: str) -> Optional[dict]:
|
||||
match = FRONTMATTER_RE.match(text)
|
||||
if not match:
|
||||
return None
|
||||
result = {}
|
||||
for line in match.group(1).splitlines():
|
||||
line = line.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
if ":" in line:
|
||||
key, _, value = line.partition(":")
|
||||
key = key.strip()
|
||||
value = value.strip().strip('"').strip("'")
|
||||
result[key] = value
|
||||
return result
|
||||
|
||||
|
||||
def validate_commands(root: Path) -> list[str]:
|
||||
errors = []
|
||||
commands_dir = root / "commands"
|
||||
if not commands_dir.is_dir():
|
||||
return [f"{commands_dir}: Directory not found"]
|
||||
|
||||
for f in sorted(commands_dir.glob("*.md")):
|
||||
text = f.read_text()
|
||||
fm = parse_frontmatter(text)
|
||||
if fm is None:
|
||||
errors.append(f"{f.name}: Missing YAML frontmatter")
|
||||
continue
|
||||
if "description" not in fm or not fm["description"]:
|
||||
errors.append(f"{f.name}: Missing required field 'description'")
|
||||
if "allowed-tools" in fm and fm["allowed-tools"]:
|
||||
tools = [t.strip() for t in fm["allowed-tools"].split(",")]
|
||||
for tool in tools:
|
||||
if tool not in KNOWN_TOOLS:
|
||||
errors.append(f"{f.name}: Unknown tool '{tool}' in allowed-tools")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def validate_skills(root: Path) -> list[str]:
|
||||
errors = []
|
||||
skills_dir = root / "skills"
|
||||
if not skills_dir.is_dir():
|
||||
return [f"{skills_dir}: Directory not found"]
|
||||
|
||||
for skill_dir in sorted(skills_dir.iterdir()):
|
||||
if not skill_dir.is_dir():
|
||||
continue
|
||||
skill_file = skill_dir / "SKILL.md"
|
||||
if not skill_file.exists():
|
||||
errors.append(f"skills/{skill_dir.name}/: Missing SKILL.md")
|
||||
continue
|
||||
text = skill_file.read_text()
|
||||
fm = parse_frontmatter(text)
|
||||
if fm is None:
|
||||
errors.append(f"skills/{skill_dir.name}/SKILL.md: Missing YAML frontmatter")
|
||||
continue
|
||||
if "name" not in fm or not fm["name"]:
|
||||
errors.append(f"skills/{skill_dir.name}/SKILL.md: Missing required field 'name'")
|
||||
if "description" not in fm or not fm["description"]:
|
||||
errors.append(f"skills/{skill_dir.name}/SKILL.md: Missing required field 'description'")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def validate_agents(root: Path) -> list[str]:
|
||||
errors = []
|
||||
agents_dir = root / "agents"
|
||||
if not agents_dir.is_dir():
|
||||
return [f"{agents_dir}: Directory not found"]
|
||||
|
||||
required_fields = ["name", "description", "model", "allowed-tools"]
|
||||
|
||||
for f in sorted(agents_dir.glob("*.md")):
|
||||
text = f.read_text()
|
||||
fm = parse_frontmatter(text)
|
||||
if fm is None:
|
||||
errors.append(f"agents/{f.name}: Missing YAML frontmatter")
|
||||
continue
|
||||
for field in required_fields:
|
||||
if field not in fm or not fm[field]:
|
||||
errors.append(f"agents/{f.name}: Missing required field '{field}'")
|
||||
if "allowed-tools" in fm and fm["allowed-tools"]:
|
||||
tools = [t.strip() for t in fm["allowed-tools"].split(",")]
|
||||
for tool in tools:
|
||||
if tool not in KNOWN_TOOLS:
|
||||
errors.append(f"agents/{f.name}: Unknown tool '{tool}' in allowed-tools")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def main():
|
||||
root = Path(__file__).resolve().parent.parent.parent
|
||||
errors = []
|
||||
|
||||
errors.extend(validate_commands(root))
|
||||
errors.extend(validate_skills(root))
|
||||
errors.extend(validate_agents(root))
|
||||
|
||||
if errors:
|
||||
print("Frontmatter validation failed:")
|
||||
for e in errors:
|
||||
print(f" ✗ {e}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("✓ Frontmatter validation passed")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
80
.github/scripts/validate-json.py
vendored
Executable file
80
.github/scripts/validate-json.py
vendored
Executable file
@@ -0,0 +1,80 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Validate JSON config files for the Claude Code plugin."""
|
||||
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def validate_plugin_json(path: Path) -> list[str]:
|
||||
errors = []
|
||||
try:
|
||||
with open(path) as f:
|
||||
data = json.load(f)
|
||||
except json.JSONDecodeError as e:
|
||||
return [f"{path}: Invalid JSON — {e}"]
|
||||
|
||||
required_fields = ["name", "version", "description"]
|
||||
for field in required_fields:
|
||||
if field not in data:
|
||||
errors.append(f"{path}: Missing required field '{field}'")
|
||||
elif not isinstance(data[field], str) or not data[field].strip():
|
||||
errors.append(f"{path}: Field '{field}' must be a non-empty string")
|
||||
|
||||
if "version" in data and isinstance(data["version"], str):
|
||||
parts = data["version"].split(".")
|
||||
if len(parts) != 3 or not all(p.isdigit() for p in parts):
|
||||
errors.append(f"{path}: Field 'version' must be semver (e.g. 1.0.0)")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def validate_mcp_json(path: Path) -> list[str]:
|
||||
errors = []
|
||||
try:
|
||||
with open(path) as f:
|
||||
data = json.load(f)
|
||||
except json.JSONDecodeError as e:
|
||||
return [f"{path}: Invalid JSON — {e}"]
|
||||
|
||||
if "mcpServers" not in data:
|
||||
errors.append(f"{path}: Missing required key 'mcpServers'")
|
||||
elif not isinstance(data["mcpServers"], dict):
|
||||
errors.append(f"{path}: 'mcpServers' must be an object")
|
||||
else:
|
||||
for name, config in data["mcpServers"].items():
|
||||
if "type" not in config:
|
||||
errors.append(f"{path}: Server '{name}' missing 'type'")
|
||||
if "url" not in config:
|
||||
errors.append(f"{path}: Server '{name}' missing 'url'")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def main():
|
||||
root = Path(__file__).resolve().parent.parent.parent
|
||||
errors = []
|
||||
|
||||
plugin_json = root / ".claude-plugin" / "plugin.json"
|
||||
if plugin_json.exists():
|
||||
errors.extend(validate_plugin_json(plugin_json))
|
||||
else:
|
||||
errors.append(f"{plugin_json}: File not found")
|
||||
|
||||
mcp_json = root / ".mcp.json"
|
||||
if mcp_json.exists():
|
||||
errors.extend(validate_mcp_json(mcp_json))
|
||||
else:
|
||||
errors.append(f"{mcp_json}: File not found")
|
||||
|
||||
if errors:
|
||||
print("JSON validation failed:")
|
||||
for e in errors:
|
||||
print(f" ✗ {e}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("✓ JSON validation passed")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
91
.github/scripts/validate-structure.py
vendored
Executable file
91
.github/scripts/validate-structure.py
vendored
Executable file
@@ -0,0 +1,91 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Validate plugin structure integrity — cross-reference components."""
|
||||
|
||||
import json
|
||||
import re
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
FRONTMATTER_RE = re.compile(r"^---\s*\n(.*?)\n---", re.DOTALL)
|
||||
|
||||
|
||||
def has_frontmatter(path: Path) -> bool:
|
||||
text = path.read_text()
|
||||
return FRONTMATTER_RE.match(text) is not None
|
||||
|
||||
|
||||
def main():
|
||||
root = Path(__file__).resolve().parent.parent.parent
|
||||
errors = []
|
||||
|
||||
# 1. plugin.json must exist and be valid
|
||||
plugin_json = root / ".claude-plugin" / "plugin.json"
|
||||
if not plugin_json.exists():
|
||||
errors.append(".claude-plugin/plugin.json: File not found")
|
||||
else:
|
||||
try:
|
||||
json.loads(plugin_json.read_text())
|
||||
except json.JSONDecodeError:
|
||||
errors.append(".claude-plugin/plugin.json: Invalid JSON")
|
||||
|
||||
# 2. .mcp.json must exist
|
||||
mcp_json = root / ".mcp.json"
|
||||
if not mcp_json.exists():
|
||||
errors.append(".mcp.json: File not found")
|
||||
|
||||
# 3. Every commands/*.md must have frontmatter
|
||||
commands_dir = root / "commands"
|
||||
if commands_dir.is_dir():
|
||||
command_files = sorted(commands_dir.glob("*.md"))
|
||||
if not command_files:
|
||||
errors.append("commands/: No command files found")
|
||||
for f in command_files:
|
||||
if not has_frontmatter(f):
|
||||
errors.append(f"commands/{f.name}: Missing YAML frontmatter")
|
||||
else:
|
||||
errors.append("commands/: Directory not found")
|
||||
|
||||
# 4. Every skills/*/ directory must have a SKILL.md
|
||||
skills_dir = root / "skills"
|
||||
if skills_dir.is_dir():
|
||||
skill_dirs = sorted([d for d in skills_dir.iterdir() if d.is_dir()])
|
||||
if not skill_dirs:
|
||||
errors.append("skills/: No skill directories found")
|
||||
for d in skill_dirs:
|
||||
skill_file = d / "SKILL.md"
|
||||
if not skill_file.exists():
|
||||
errors.append(f"skills/{d.name}/: Missing SKILL.md")
|
||||
elif not has_frontmatter(skill_file):
|
||||
errors.append(f"skills/{d.name}/SKILL.md: Missing YAML frontmatter")
|
||||
else:
|
||||
errors.append("skills/: Directory not found")
|
||||
|
||||
# 5. Every agents/*.md must have frontmatter
|
||||
agents_dir = root / "agents"
|
||||
if agents_dir.is_dir():
|
||||
agent_files = sorted(agents_dir.glob("*.md"))
|
||||
if not agent_files:
|
||||
errors.append("agents/: No agent files found")
|
||||
for f in agent_files:
|
||||
if not has_frontmatter(f):
|
||||
errors.append(f"agents/{f.name}: Missing YAML frontmatter")
|
||||
else:
|
||||
errors.append("agents/: Directory not found")
|
||||
|
||||
# 6. Check for stray markdown files in root (not README, CLAUDE, LICENSE, or examples)
|
||||
expected_root_md = {"README.md", "CLAUDE.md", "LICENSE"}
|
||||
for f in sorted(root.glob("*.md")):
|
||||
if f.name not in expected_root_md:
|
||||
errors.append(f"{f.name}: Unexpected markdown file in repo root")
|
||||
|
||||
if errors:
|
||||
print("Structure validation failed:")
|
||||
for e in errors:
|
||||
print(f" ✗ {e}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("✓ Structure validation passed")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
59
.github/workflows/plugin-checks.yml
vendored
Normal file
59
.github/workflows/plugin-checks.yml
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
name: Plugin Checks
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
json-validation:
|
||||
name: JSON Validation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- run: python .github/scripts/validate-json.py
|
||||
|
||||
frontmatter-validation:
|
||||
name: Frontmatter Validation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- run: python .github/scripts/validate-frontmatter.py
|
||||
|
||||
markdown-lint:
|
||||
name: Markdown Lint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
- run: npm install -g markdownlint-cli
|
||||
- run: markdownlint "**/*.md" --config .github/.markdownlint.json
|
||||
|
||||
link-check:
|
||||
name: Link Check
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- run: python .github/scripts/check-internal-links.py
|
||||
|
||||
structure-integrity:
|
||||
name: Structure Integrity
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- run: python .github/scripts/validate-structure.py
|
||||
18
.github/workflows/pr-labeler.yml
vendored
Normal file
18
.github/workflows/pr-labeler.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
name: PR Labeler
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
label:
|
||||
name: Label PR
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/labeler@v5
|
||||
with:
|
||||
configuration-path: .github/labeler.yml
|
||||
Reference in New Issue
Block a user