1016 lines
33 KiB
Python
1016 lines
33 KiB
Python
#!/usr/bin/env python3
|
||
import sys
|
||
from datetime import datetime, timezone
|
||
from pathlib import Path
|
||
from shutil import copy2, copytree, which
|
||
import subprocess
|
||
|
||
try:
|
||
import tomllib
|
||
except ModuleNotFoundError: # Python < 3.11
|
||
tomllib = None
|
||
|
||
ORDER = ["vendor", "sync_templates", "sync_standards", "install_skills", "format_md"]
|
||
SCRIPT_DIR = Path(__file__).resolve().parent
|
||
PLAYBOOK_ROOT = SCRIPT_DIR.parent
|
||
|
||
|
||
def usage() -> str:
|
||
return "Usage:\n python scripts/playbook.py -config <path>\n python scripts/playbook.py -h"
|
||
|
||
|
||
def strip_inline_comment(value: str) -> str:
|
||
in_single = False
|
||
in_double = False
|
||
escape = False
|
||
for idx, ch in enumerate(value):
|
||
if escape:
|
||
escape = False
|
||
continue
|
||
if in_double and ch == "\\":
|
||
escape = True
|
||
continue
|
||
if ch == "'" and not in_double:
|
||
in_single = not in_single
|
||
continue
|
||
if ch == '"' and not in_single:
|
||
in_double = not in_double
|
||
continue
|
||
if ch == "#" and not in_single and not in_double:
|
||
return value[:idx].rstrip()
|
||
return value
|
||
|
||
|
||
def split_list_items(raw: str) -> list[str]:
|
||
items: list[str] = []
|
||
buf: list[str] = []
|
||
in_single = False
|
||
in_double = False
|
||
escape = False
|
||
for ch in raw:
|
||
if escape:
|
||
buf.append(ch)
|
||
escape = False
|
||
continue
|
||
if in_double and ch == "\\":
|
||
buf.append(ch)
|
||
escape = True
|
||
continue
|
||
if ch == "'" and not in_double:
|
||
in_single = not in_single
|
||
buf.append(ch)
|
||
continue
|
||
if ch == '"' and not in_single:
|
||
in_double = not in_double
|
||
buf.append(ch)
|
||
continue
|
||
if ch == "," and not in_single and not in_double:
|
||
items.append("".join(buf).strip())
|
||
buf = []
|
||
continue
|
||
buf.append(ch)
|
||
tail = "".join(buf).strip()
|
||
if tail:
|
||
items.append(tail)
|
||
return items
|
||
|
||
|
||
def parse_toml_value(raw: str) -> object:
|
||
value = raw.strip()
|
||
if not value:
|
||
return ""
|
||
if value.startswith("[") and value.endswith("]"):
|
||
inner = value[1:-1].strip()
|
||
if not inner:
|
||
return []
|
||
return [parse_toml_value(item) for item in split_list_items(inner)]
|
||
lowered = value.lower()
|
||
if lowered == "true":
|
||
return True
|
||
if lowered == "false":
|
||
return False
|
||
if value[0] in ("'", '"') and value[-1] == value[0]:
|
||
if value[0] == "'":
|
||
return value[1:-1]
|
||
import ast
|
||
|
||
try:
|
||
return ast.literal_eval(value)
|
||
except (ValueError, SyntaxError):
|
||
return value[1:-1]
|
||
try:
|
||
if "." in value:
|
||
return float(value)
|
||
return int(value)
|
||
except ValueError:
|
||
return value
|
||
|
||
|
||
def loads_toml_minimal(raw: str) -> dict:
|
||
data: dict[str, dict] = {}
|
||
current = None
|
||
for line in raw.splitlines():
|
||
stripped = line.strip()
|
||
if not stripped or stripped.startswith("#"):
|
||
continue
|
||
if stripped.startswith("[") and stripped.endswith("]"):
|
||
section = stripped[1:-1].strip()
|
||
if not section:
|
||
raise ValueError("empty section header")
|
||
current = data.setdefault(section, {})
|
||
if not isinstance(current, dict):
|
||
raise ValueError(f"invalid section: {section}")
|
||
continue
|
||
if "=" not in stripped:
|
||
raise ValueError(f"invalid line: {line}")
|
||
key, value = stripped.split("=", 1)
|
||
key = key.strip()
|
||
if not key:
|
||
raise ValueError("missing key")
|
||
value = strip_inline_comment(value.strip())
|
||
target = current if current is not None else data
|
||
target[key] = parse_toml_value(value)
|
||
return data
|
||
|
||
|
||
def load_config(path: Path) -> dict:
|
||
raw = path.read_text(encoding="utf-8")
|
||
if tomllib is not None:
|
||
return tomllib.loads(raw)
|
||
return loads_toml_minimal(raw)
|
||
|
||
|
||
def log(message: str) -> None:
|
||
print(message)
|
||
|
||
|
||
def ensure_dir(path: Path) -> None:
|
||
path.mkdir(parents=True, exist_ok=True)
|
||
|
||
|
||
def normalize_langs(raw: object) -> list[str]:
|
||
if raw is None:
|
||
return ["tsl"]
|
||
if isinstance(raw, str):
|
||
langs = [raw]
|
||
else:
|
||
langs = list(raw)
|
||
cleaned: list[str] = []
|
||
for lang in langs:
|
||
item = str(lang).strip()
|
||
if not item:
|
||
continue
|
||
if "/" in item or "\\" in item or ".." in item:
|
||
raise ValueError(f"invalid lang: {item}")
|
||
cleaned.append(item)
|
||
if not cleaned:
|
||
raise ValueError("langs is empty")
|
||
return cleaned
|
||
|
||
|
||
def resolve_main_language(config: dict, context: dict) -> str:
|
||
raw = config.get("main_language")
|
||
if raw is not None and str(raw).strip():
|
||
return str(raw).strip()
|
||
|
||
full_config = context.get("config", {})
|
||
if isinstance(full_config, dict):
|
||
sync_conf = full_config.get("sync_standards")
|
||
if isinstance(sync_conf, dict):
|
||
langs_raw = sync_conf.get("langs")
|
||
if langs_raw is not None:
|
||
try:
|
||
langs = normalize_langs(langs_raw)
|
||
except ValueError:
|
||
langs = []
|
||
if langs:
|
||
return langs[0]
|
||
|
||
return "tsl"
|
||
|
||
|
||
def resolve_playbook_scripts(project_root: Path, context: dict) -> str:
|
||
playbook_scripts = PLAYBOOK_ROOT / "scripts"
|
||
try:
|
||
rel = playbook_scripts.resolve().relative_to(project_root.resolve())
|
||
return rel.as_posix()
|
||
except ValueError:
|
||
full_config = context.get("config", {})
|
||
if isinstance(full_config, dict):
|
||
vendor_conf = full_config.get("vendor")
|
||
if isinstance(vendor_conf, dict):
|
||
target_dir = vendor_conf.get("target_dir")
|
||
if target_dir:
|
||
target_str = str(target_dir).strip().rstrip("/").rstrip("\\")
|
||
if target_str:
|
||
return f"{target_str}/scripts"
|
||
return "docs/standards/playbook/scripts"
|
||
|
||
|
||
def read_git_commit(root: Path) -> str:
|
||
try:
|
||
result = subprocess.run(
|
||
["git", "-C", str(root), "rev-parse", "HEAD"],
|
||
capture_output=True,
|
||
text=True,
|
||
check=True,
|
||
)
|
||
except (OSError, subprocess.CalledProcessError):
|
||
return "N/A"
|
||
return result.stdout.strip() or "N/A"
|
||
|
||
|
||
def write_docs_index(dest_prefix: Path, langs: list[str]) -> None:
|
||
lines = [
|
||
"# 文档导航(Docs Index)",
|
||
"",
|
||
f"本快照为裁剪版 Playbook(langs: {','.join(langs)})。",
|
||
"",
|
||
"## 跨语言(common)",
|
||
"",
|
||
"- 提交信息与版本号:`common/commit_message.md`",
|
||
]
|
||
for lang in langs:
|
||
if lang == "tsl":
|
||
lines += [
|
||
"",
|
||
"## TSL(tsl)",
|
||
"",
|
||
"- 代码风格:`tsl/code_style.md`",
|
||
"- 命名规范:`tsl/naming.md`",
|
||
"- 语法手册:`tsl/syntax_book/index.md`",
|
||
"- 工具链与验证命令(模板):`tsl/toolchain.md`",
|
||
]
|
||
elif lang == "cpp":
|
||
lines += [
|
||
"",
|
||
"## C++(cpp)",
|
||
"",
|
||
"- 代码风格:`cpp/code_style.md`",
|
||
"- 命名规范:`cpp/naming.md`",
|
||
"- 工具链与验证命令(模板):`cpp/toolchain.md`",
|
||
"- 第三方依赖(Conan):`cpp/dependencies_conan.md`",
|
||
"- clangd 配置:`cpp/clangd.md`",
|
||
]
|
||
elif lang == "python":
|
||
lines += [
|
||
"",
|
||
"## Python(python)",
|
||
"",
|
||
"- 代码风格:`python/style_guide.md`",
|
||
"- 工具链:`python/tooling.md`",
|
||
"- 配置清单:`python/configuration.md`",
|
||
]
|
||
elif lang == "markdown":
|
||
lines += [
|
||
"",
|
||
"## Markdown(markdown)",
|
||
"",
|
||
"- 代码块与行内代码格式:`markdown/index.md`",
|
||
]
|
||
docs_index = dest_prefix / "docs/index.md"
|
||
ensure_dir(docs_index.parent)
|
||
docs_index.write_text("\n".join(lines) + "\n", encoding="utf-8")
|
||
|
||
|
||
def write_snapshot_readme(dest_prefix: Path, langs: list[str]) -> None:
|
||
lines = [
|
||
"# Playbook(裁剪快照)",
|
||
"",
|
||
f"本目录为从 Playbook vendoring 的裁剪快照(langs: {','.join(langs)})。",
|
||
"",
|
||
"## 使用",
|
||
"",
|
||
"在目标项目根目录执行:",
|
||
"",
|
||
"```sh",
|
||
"python docs/standards/playbook/scripts/playbook.py -config playbook.toml",
|
||
"```",
|
||
"",
|
||
"配置示例:`docs/standards/playbook/playbook.toml.example`",
|
||
"",
|
||
"文档入口:",
|
||
"",
|
||
"- `docs/standards/playbook/docs/index.md`",
|
||
"- `.agents/index.md`",
|
||
]
|
||
(dest_prefix / "README.md").write_text("\n".join(lines) + "\n", encoding="utf-8")
|
||
|
||
|
||
def write_source_file(dest_prefix: Path, langs: list[str]) -> None:
|
||
commit = read_git_commit(PLAYBOOK_ROOT)
|
||
timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
|
||
lines = [
|
||
"# SOURCE",
|
||
"",
|
||
f"- Source: {PLAYBOOK_ROOT}",
|
||
f"- Commit: {commit}",
|
||
f"- Date: {timestamp}",
|
||
f"- Langs: {','.join(langs)}",
|
||
"- Generated-by: scripts/playbook.py",
|
||
]
|
||
(dest_prefix / "SOURCE.md").write_text("\n".join(lines) + "\n", encoding="utf-8")
|
||
|
||
|
||
def vendor_action(config: dict, context: dict) -> int:
|
||
try:
|
||
langs = normalize_langs(config.get("langs"))
|
||
except ValueError as exc:
|
||
print(f"ERROR: {exc}", file=sys.stderr)
|
||
return 2
|
||
|
||
target_dir = config.get("target_dir", "docs/standards/playbook")
|
||
target_path = Path(target_dir)
|
||
if target_path.is_absolute() or ".." in target_path.parts:
|
||
print(f"ERROR: invalid target_dir: {target_dir}", file=sys.stderr)
|
||
return 2
|
||
|
||
project_root: Path = context["project_root"]
|
||
dest_prefix = project_root / target_path
|
||
dest_standards = dest_prefix.parent
|
||
|
||
ensure_dir(dest_standards)
|
||
|
||
if dest_prefix.exists():
|
||
timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
|
||
backup = dest_standards / f"{dest_prefix.name}.bak.{timestamp}"
|
||
dest_prefix.rename(backup)
|
||
log(f"Backed up existing snapshot -> {backup}")
|
||
|
||
ensure_dir(dest_prefix)
|
||
|
||
gitattributes_src = PLAYBOOK_ROOT / ".gitattributes"
|
||
if gitattributes_src.is_file():
|
||
copy2(gitattributes_src, dest_prefix / ".gitattributes")
|
||
|
||
copytree(PLAYBOOK_ROOT / "scripts", dest_prefix / "scripts")
|
||
copytree(PLAYBOOK_ROOT / "codex", dest_prefix / "codex")
|
||
copy2(PLAYBOOK_ROOT / "SKILLS.md", dest_prefix / "SKILLS.md")
|
||
|
||
common_docs = PLAYBOOK_ROOT / "docs/common"
|
||
if common_docs.is_dir():
|
||
copytree(common_docs, dest_prefix / "docs/common")
|
||
|
||
rulesets_root = PLAYBOOK_ROOT / "rulesets"
|
||
ensure_dir(dest_prefix / "rulesets")
|
||
if (rulesets_root / "index.md").is_file():
|
||
copy2(rulesets_root / "index.md", dest_prefix / "rulesets/index.md")
|
||
|
||
templates_ci = PLAYBOOK_ROOT / "templates/ci"
|
||
if templates_ci.is_dir():
|
||
copytree(templates_ci, dest_prefix / "templates/ci")
|
||
|
||
for lang in langs:
|
||
docs_src = PLAYBOOK_ROOT / "docs" / lang
|
||
rules_src = PLAYBOOK_ROOT / "rulesets" / lang
|
||
if not docs_src.is_dir():
|
||
print(f"ERROR: docs not found for lang={lang}", file=sys.stderr)
|
||
return 2
|
||
if not rules_src.is_dir():
|
||
print(f"ERROR: rulesets not found for lang={lang}", file=sys.stderr)
|
||
return 2
|
||
copytree(docs_src, dest_prefix / "docs" / lang)
|
||
copytree(rules_src, dest_prefix / "rulesets" / lang)
|
||
templates_src = PLAYBOOK_ROOT / "templates" / lang
|
||
if templates_src.is_dir():
|
||
copytree(templates_src, dest_prefix / "templates" / lang)
|
||
|
||
example_config = PLAYBOOK_ROOT / "playbook.toml.example"
|
||
if example_config.is_file():
|
||
copy2(example_config, dest_prefix / "playbook.toml.example")
|
||
|
||
write_docs_index(dest_prefix, langs)
|
||
write_snapshot_readme(dest_prefix, langs)
|
||
write_source_file(dest_prefix, langs)
|
||
|
||
log(f"Vendored snapshot -> {dest_prefix}")
|
||
return 0
|
||
|
||
|
||
def replace_placeholders(
|
||
text: str,
|
||
project_name: str | None,
|
||
date_value: str,
|
||
main_language: str | None,
|
||
playbook_scripts: str | None,
|
||
) -> str:
|
||
result = text.replace("{{DATE}}", date_value)
|
||
if project_name:
|
||
result = result.replace("{{PROJECT_NAME}}", project_name)
|
||
if main_language:
|
||
result = result.replace("{{MAIN_LANGUAGE}}", main_language)
|
||
if playbook_scripts:
|
||
result = result.replace("{{PLAYBOOK_SCRIPTS}}", playbook_scripts)
|
||
return result
|
||
|
||
|
||
def backup_path(path: Path, no_backup: bool) -> None:
|
||
if not path.exists() or no_backup:
|
||
return
|
||
timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
|
||
backup = path.with_name(f"{path.name}.bak.{timestamp}")
|
||
path.rename(backup)
|
||
log(f"Backed up: {path} -> {backup}")
|
||
|
||
|
||
def rename_template_files(root: Path) -> None:
|
||
for template in root.rglob("*.template.md"):
|
||
target = template.with_name(template.name.replace(".template.md", ".md"))
|
||
template.rename(target)
|
||
|
||
|
||
def replace_placeholders_in_dir(
|
||
root: Path,
|
||
project_name: str | None,
|
||
date_value: str,
|
||
main_language: str | None,
|
||
playbook_scripts: str | None,
|
||
) -> None:
|
||
for file_path in root.rglob("*.md"):
|
||
text = file_path.read_text(encoding="utf-8")
|
||
updated = replace_placeholders(
|
||
text, project_name, date_value, main_language, playbook_scripts
|
||
)
|
||
if updated != text:
|
||
file_path.write_text(updated, encoding="utf-8")
|
||
|
||
|
||
def extract_block_lines(text: str, start: str, end: str) -> list[str]:
|
||
lines = text.splitlines()
|
||
block: list[str] = []
|
||
in_block = False
|
||
for line in lines:
|
||
if line.strip() == start:
|
||
in_block = True
|
||
if in_block:
|
||
block.append(line)
|
||
if in_block and line.strip() == end:
|
||
break
|
||
if not block or block[-1].strip() != end:
|
||
return []
|
||
return block
|
||
|
||
|
||
def update_agents_section(
|
||
agents_path: Path,
|
||
template_path: Path,
|
||
start_marker: str,
|
||
end_marker: str,
|
||
project_name: str | None,
|
||
date_value: str,
|
||
main_language: str | None,
|
||
playbook_scripts: str | None,
|
||
) -> None:
|
||
template_text = template_path.read_text(encoding="utf-8")
|
||
template_text = replace_placeholders(
|
||
template_text, project_name, date_value, main_language, playbook_scripts
|
||
)
|
||
block = extract_block_lines(template_text, start_marker, end_marker)
|
||
if not block:
|
||
log("Skip: markers not found in template")
|
||
return
|
||
|
||
if not agents_path.exists():
|
||
agents_path.write_text(template_text + "\n", encoding="utf-8")
|
||
log("Created: AGENTS.md")
|
||
return
|
||
|
||
agents_text = agents_path.read_text(encoding="utf-8")
|
||
if start_marker in agents_text:
|
||
lines = agents_text.splitlines()
|
||
updated: list[str] = []
|
||
in_block = False
|
||
replaced = False
|
||
for line in lines:
|
||
if not replaced and line.strip() == start_marker:
|
||
updated.extend(block)
|
||
in_block = True
|
||
replaced = True
|
||
continue
|
||
if in_block:
|
||
if line.strip() == end_marker:
|
||
in_block = False
|
||
continue
|
||
updated.append(line)
|
||
agents_path.write_text("\n".join(updated) + "\n", encoding="utf-8")
|
||
log("Updated: AGENTS.md (section)")
|
||
else:
|
||
if ".agents/index.md" in agents_text:
|
||
log("Skip: AGENTS.md already references .agents/index.md")
|
||
return
|
||
updated = agents_text.rstrip("\n") + "\n\n" + "\n".join(block) + "\n"
|
||
agents_path.write_text(updated, encoding="utf-8")
|
||
log("Appended: AGENTS.md (section)")
|
||
|
||
|
||
def sync_templates_action(config: dict, context: dict) -> int:
|
||
project_root: Path = context["project_root"]
|
||
if project_root.resolve() == PLAYBOOK_ROOT.resolve():
|
||
log("Skip: playbook root equals project root.")
|
||
return 0
|
||
|
||
templates_dir = PLAYBOOK_ROOT / "templates"
|
||
if not templates_dir.is_dir():
|
||
print(f"ERROR: templates not found: {templates_dir}", file=sys.stderr)
|
||
return 2
|
||
|
||
project_name = config.get("project_name")
|
||
main_language = resolve_main_language(config, context)
|
||
playbook_scripts = resolve_playbook_scripts(project_root, context)
|
||
date_value = config.get("date") or datetime.now().strftime("%Y-%m-%d")
|
||
force = bool(config.get("force", False))
|
||
no_backup = bool(config.get("no_backup", False))
|
||
full = bool(config.get("full", False))
|
||
|
||
memory_src = templates_dir / "memory-bank"
|
||
prompts_src = templates_dir / "prompts"
|
||
agents_src = templates_dir / "AGENTS.template.md"
|
||
rules_src = templates_dir / "AGENT_RULES.template.md"
|
||
|
||
if memory_src.is_dir():
|
||
memory_dst = project_root / "memory-bank"
|
||
if memory_dst.exists() and not force:
|
||
log("memory-bank/ already exists. Use force to overwrite.")
|
||
else:
|
||
backup_path(memory_dst, no_backup)
|
||
copytree(memory_src, memory_dst)
|
||
rename_template_files(memory_dst)
|
||
replace_placeholders_in_dir(
|
||
memory_dst,
|
||
project_name,
|
||
date_value,
|
||
main_language,
|
||
playbook_scripts,
|
||
)
|
||
log("Synced: memory-bank/")
|
||
|
||
if prompts_src.is_dir():
|
||
prompts_dst = project_root / "docs/prompts"
|
||
if prompts_dst.exists() and not force:
|
||
log("docs/prompts/ already exists. Use force to overwrite.")
|
||
else:
|
||
backup_path(prompts_dst, no_backup)
|
||
ensure_dir(prompts_dst.parent)
|
||
copytree(prompts_src, prompts_dst)
|
||
rename_template_files(prompts_dst)
|
||
replace_placeholders_in_dir(
|
||
prompts_dst,
|
||
project_name,
|
||
date_value,
|
||
main_language,
|
||
playbook_scripts,
|
||
)
|
||
log("Synced: docs/prompts/")
|
||
|
||
if agents_src.is_file():
|
||
agents_dst = project_root / "AGENTS.md"
|
||
if full:
|
||
start_marker = "<!-- playbook:framework:start -->"
|
||
end_marker = "<!-- playbook:framework:end -->"
|
||
else:
|
||
start_marker = "<!-- playbook:templates:start -->"
|
||
end_marker = "<!-- playbook:templates:end -->"
|
||
update_agents_section(
|
||
agents_dst,
|
||
agents_src,
|
||
start_marker,
|
||
end_marker,
|
||
project_name,
|
||
date_value,
|
||
main_language,
|
||
playbook_scripts,
|
||
)
|
||
|
||
if rules_src.is_file():
|
||
rules_dst = project_root / "AGENT_RULES.md"
|
||
if rules_dst.exists() and not force:
|
||
log("AGENT_RULES.md already exists. Use force to overwrite.")
|
||
else:
|
||
backup_path(rules_dst, no_backup)
|
||
text = rules_src.read_text(encoding="utf-8")
|
||
text = replace_placeholders(
|
||
text, project_name, date_value, main_language, playbook_scripts
|
||
)
|
||
rules_dst.write_text(text + "\n", encoding="utf-8")
|
||
log("Synced: AGENT_RULES.md")
|
||
|
||
return 0
|
||
|
||
|
||
def render_agents_block(langs: list[str]) -> list[str]:
|
||
entries = [f"`.agents/{lang}/index.md`" for lang in langs]
|
||
langs_line = "、".join(entries) if entries else ""
|
||
lines = [
|
||
"<!-- playbook:agents:start -->",
|
||
"",
|
||
"请以 `.agents/` 下的规则为准:",
|
||
"",
|
||
"- 入口:`.agents/index.md`",
|
||
f"- 语言规则:{langs_line}" if langs_line else "- 语言规则:",
|
||
"<!-- playbook:agents:end -->",
|
||
]
|
||
return lines
|
||
|
||
|
||
def update_agents_block(agents_md: Path, block_lines: list[str]) -> None:
|
||
start = "<!-- playbook:agents:start -->"
|
||
end = "<!-- playbook:agents:end -->"
|
||
if not agents_md.exists():
|
||
content = "# Agent Instructions\n\n" + "\n".join(block_lines) + "\n"
|
||
agents_md.write_text(content, encoding="utf-8")
|
||
log("Created AGENTS.md")
|
||
return
|
||
|
||
text = agents_md.read_text(encoding="utf-8")
|
||
if start in text:
|
||
lines = text.splitlines()
|
||
updated: list[str] = []
|
||
in_block = False
|
||
replaced = False
|
||
for line in lines:
|
||
if not replaced and line.strip() == start:
|
||
updated.extend(block_lines)
|
||
in_block = True
|
||
replaced = True
|
||
continue
|
||
if in_block:
|
||
if line.strip() == end:
|
||
in_block = False
|
||
continue
|
||
updated.append(line)
|
||
agents_md.write_text("\n".join(updated) + "\n", encoding="utf-8")
|
||
log("Updated AGENTS.md (playbook block).")
|
||
else:
|
||
if ".agents/index.md" in text:
|
||
log("Skip: AGENTS.md already references .agents/index.md")
|
||
return
|
||
updated = text.rstrip("\n") + "\n\n" + "\n".join(block_lines) + "\n"
|
||
agents_md.write_text(updated, encoding="utf-8")
|
||
log("Appended playbook block to AGENTS.md")
|
||
|
||
|
||
def create_agents_index(agents_root: Path, langs: list[str], docs_prefix: str | None) -> None:
|
||
agents_index = agents_root / "index.md"
|
||
if agents_index.exists():
|
||
return
|
||
lines = [
|
||
"# .agents(多语言)",
|
||
"",
|
||
"本目录用于存放仓库级/语言级的代理规则集。",
|
||
"",
|
||
"建议约定:",
|
||
"",
|
||
"- `.agents/tsl/`:TSL 相关规则集(由 playbook 同步;适用于 `.tsl`/`.tsf`)",
|
||
"- `.agents/cpp/`:C++ 相关规则集(由 playbook 同步;适用于 C++23/Modules)",
|
||
"- `.agents/python/`:Python 相关规则集(由 playbook 同步)",
|
||
"- `.agents/markdown/`:Markdown 相关规则集(仅代码格式化)",
|
||
"",
|
||
"规则发生冲突时,建议以“更靠近代码的目录规则更具体”为准。",
|
||
"",
|
||
"入口建议从:",
|
||
"",
|
||
]
|
||
for lang in langs:
|
||
lines.append(f"- `.agents/{lang}/index.md`")
|
||
lines += [
|
||
"",
|
||
"标准快照文档入口:",
|
||
"",
|
||
f"- {docs_prefix or 'docs/standards/playbook/docs/'}",
|
||
]
|
||
agents_index.write_text("\n".join(lines) + "\n", encoding="utf-8")
|
||
log("Created .agents/index.md")
|
||
|
||
|
||
def rewrite_agents_docs_links(agents_dir: Path, docs_prefix: str) -> None:
|
||
replacements = {
|
||
"`docs/tsl/": f"`{docs_prefix}/tsl/",
|
||
"`docs/cpp/": f"`{docs_prefix}/cpp/",
|
||
"`docs/python/": f"`{docs_prefix}/python/",
|
||
"`docs/markdown/": f"`{docs_prefix}/markdown/",
|
||
"`docs/common/": f"`{docs_prefix}/common/",
|
||
}
|
||
for md_path in agents_dir.glob("*.md"):
|
||
if not md_path.is_file():
|
||
continue
|
||
text = md_path.read_text(encoding="utf-8")
|
||
updated = text
|
||
for old, new in replacements.items():
|
||
updated = updated.replace(old, new)
|
||
if updated != text:
|
||
md_path.write_text(updated, encoding="utf-8")
|
||
|
||
|
||
def read_gitattributes_entries(path: Path) -> list[str]:
|
||
entries: list[str] = []
|
||
for line in path.read_text(encoding="utf-8").splitlines():
|
||
stripped = line.strip()
|
||
if not stripped or stripped.startswith("#"):
|
||
continue
|
||
entries.append(stripped)
|
||
return entries
|
||
|
||
|
||
def sync_gitattributes_overwrite(src: Path, dst: Path) -> None:
|
||
if src.resolve() == dst.resolve():
|
||
log("Skip: .gitattributes source equals destination.")
|
||
return
|
||
backup_path(dst, False)
|
||
copy2(src, dst)
|
||
log("Synced .gitattributes from standards (overwrite).")
|
||
|
||
|
||
def sync_gitattributes_append(src: Path, dst: Path, source_note: str) -> None:
|
||
src_entries = read_gitattributes_entries(src)
|
||
dst_entries: list[str] = []
|
||
if dst.exists():
|
||
dst_entries = read_gitattributes_entries(dst)
|
||
missing = [line for line in src_entries if line not in set(dst_entries)]
|
||
if not missing:
|
||
log("No missing .gitattributes rules to append.")
|
||
return
|
||
|
||
original = dst.read_text(encoding="utf-8") if dst.exists() else ""
|
||
backup_path(dst, False)
|
||
header = f"# Added from playbook .gitattributes (source: {source_note})"
|
||
content = original.rstrip("\n")
|
||
if content:
|
||
content += "\n\n"
|
||
content += header + "\n" + "\n".join(missing) + "\n"
|
||
dst.write_text(content, encoding="utf-8")
|
||
log("Appended missing .gitattributes rules from standards.")
|
||
|
||
|
||
def sync_gitattributes_block(src: Path, dst: Path) -> None:
|
||
begin = "# BEGIN playbook .gitattributes"
|
||
end = "# END playbook .gitattributes"
|
||
begin_old = "# BEGIN tsl-playbook .gitattributes"
|
||
end_old = "# END tsl-playbook .gitattributes"
|
||
|
||
src_lines = src.read_text(encoding="utf-8").splitlines()
|
||
block_lines = [begin] + src_lines + [end]
|
||
|
||
if dst.exists():
|
||
original = dst.read_text(encoding="utf-8").splitlines()
|
||
updated: list[str] = []
|
||
in_block = False
|
||
replaced = False
|
||
for line in original:
|
||
if line == begin or line == begin_old:
|
||
if not replaced:
|
||
updated.extend(block_lines)
|
||
replaced = True
|
||
in_block = True
|
||
continue
|
||
if in_block:
|
||
if line == end or line == end_old:
|
||
in_block = False
|
||
continue
|
||
updated.append(line)
|
||
if not replaced:
|
||
if updated and updated[-1].strip():
|
||
updated.append("")
|
||
updated.extend(block_lines)
|
||
backup_path(dst, False)
|
||
dst.write_text("\n".join(updated) + "\n", encoding="utf-8")
|
||
else:
|
||
dst.write_text("\n".join(block_lines) + "\n", encoding="utf-8")
|
||
log("Synced .gitattributes from standards (block).")
|
||
|
||
|
||
def sync_standards_action(config: dict, context: dict) -> int:
|
||
if "langs" not in config:
|
||
print("ERROR: langs is required for sync_standards", file=sys.stderr)
|
||
return 2
|
||
try:
|
||
langs = normalize_langs(config.get("langs"))
|
||
except ValueError as exc:
|
||
print(f"ERROR: {exc}", file=sys.stderr)
|
||
return 2
|
||
|
||
project_root: Path = context["project_root"]
|
||
agents_root = project_root / ".agents"
|
||
ensure_dir(agents_root)
|
||
|
||
timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
|
||
for lang in langs:
|
||
src = PLAYBOOK_ROOT / "rulesets" / lang
|
||
if not src.is_dir():
|
||
print(f"ERROR: agents ruleset not found: {src}", file=sys.stderr)
|
||
return 2
|
||
dst = agents_root / lang
|
||
if dst.exists():
|
||
backup = agents_root / f"{lang}.bak.{timestamp}"
|
||
dst.rename(backup)
|
||
log(f"Backed up existing {lang} agents -> {backup.name}")
|
||
copytree(src, dst)
|
||
log(f"Synced .agents/{lang} from standards.")
|
||
|
||
docs_prefix = None
|
||
try:
|
||
rel_snapshot = PLAYBOOK_ROOT.resolve().relative_to(project_root.resolve())
|
||
if str(rel_snapshot) != ".":
|
||
docs_prefix = f"{rel_snapshot.as_posix()}/docs"
|
||
except ValueError:
|
||
docs_prefix = None
|
||
|
||
if docs_prefix:
|
||
for lang in langs:
|
||
rewrite_agents_docs_links(agents_root / lang, docs_prefix)
|
||
|
||
agents_md = project_root / "AGENTS.md"
|
||
block_lines = render_agents_block(langs)
|
||
update_agents_block(agents_md, block_lines)
|
||
|
||
create_agents_index(agents_root, langs, docs_prefix)
|
||
|
||
gitattributes_src = PLAYBOOK_ROOT / ".gitattributes"
|
||
if gitattributes_src.is_file():
|
||
mode = str(config.get("gitattr_mode", "append")).lower()
|
||
gitattributes_dst = project_root / ".gitattributes"
|
||
source_note = str(gitattributes_src)
|
||
try:
|
||
source_note = str(gitattributes_src.resolve().relative_to(project_root.resolve()))
|
||
except ValueError:
|
||
source_note = str(gitattributes_src)
|
||
|
||
if mode == "skip":
|
||
log("Skip: .gitattributes sync (mode=skip).")
|
||
elif mode == "overwrite":
|
||
sync_gitattributes_overwrite(gitattributes_src, gitattributes_dst)
|
||
elif mode == "block":
|
||
sync_gitattributes_block(gitattributes_src, gitattributes_dst)
|
||
else:
|
||
sync_gitattributes_append(gitattributes_src, gitattributes_dst, source_note)
|
||
|
||
return 0
|
||
|
||
|
||
def normalize_names(raw: object, label: str) -> list[str]:
|
||
if raw is None:
|
||
raise ValueError(f"{label} is required")
|
||
if isinstance(raw, str):
|
||
items = [raw]
|
||
else:
|
||
items = list(raw)
|
||
cleaned: list[str] = []
|
||
for item in items:
|
||
name = str(item).strip()
|
||
if not name:
|
||
continue
|
||
if "/" in name or "\\" in name or ".." in name:
|
||
raise ValueError(f"invalid {label}: {name}")
|
||
cleaned.append(name)
|
||
if not cleaned:
|
||
raise ValueError(f"{label} is empty")
|
||
return cleaned
|
||
|
||
|
||
def normalize_globs(raw: object) -> list[str]:
|
||
if raw is None:
|
||
return ["**/*.md"]
|
||
if isinstance(raw, str):
|
||
items = [raw]
|
||
else:
|
||
items = list(raw)
|
||
cleaned = [str(item).strip() for item in items if str(item).strip()]
|
||
return cleaned or ["**/*.md"]
|
||
|
||
|
||
def install_skills_action(config: dict, context: dict) -> int:
|
||
mode = str(config.get("mode", "list")).lower()
|
||
codex_home = Path(config.get("codex_home", "~/.codex")).expanduser()
|
||
if not codex_home.is_absolute():
|
||
codex_home = (context["project_root"] / codex_home).resolve()
|
||
|
||
skills_src_root = PLAYBOOK_ROOT / "codex/skills"
|
||
if not skills_src_root.is_dir():
|
||
print(f"ERROR: skills source not found: {skills_src_root}", file=sys.stderr)
|
||
return 2
|
||
|
||
skills_dst_root = codex_home / "skills"
|
||
ensure_dir(skills_dst_root)
|
||
|
||
if mode == "all":
|
||
skills = [
|
||
path.name
|
||
for path in skills_src_root.iterdir()
|
||
if path.is_dir() and not path.name.startswith(".")
|
||
]
|
||
elif mode == "list":
|
||
try:
|
||
skills = normalize_names(config.get("skills"), "skills")
|
||
except ValueError as exc:
|
||
print(f"ERROR: {exc}", file=sys.stderr)
|
||
return 2
|
||
else:
|
||
print("ERROR: mode must be list or all", file=sys.stderr)
|
||
return 2
|
||
|
||
timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
|
||
for name in skills:
|
||
src = skills_src_root / name
|
||
if not src.is_dir():
|
||
print(f"ERROR: skill not found: {name}", file=sys.stderr)
|
||
return 2
|
||
dst = skills_dst_root / name
|
||
if dst.exists():
|
||
backup = skills_dst_root / f"{name}.bak.{timestamp}"
|
||
dst.rename(backup)
|
||
log(f"Backed up existing skill: {name} -> {backup.name}")
|
||
copytree(src, dst)
|
||
log(f"Installed: {name}")
|
||
|
||
return 0
|
||
|
||
|
||
def format_md_action(config: dict, context: dict) -> int:
|
||
tool = str(config.get("tool", "prettier")).lower()
|
||
if tool != "prettier":
|
||
print("ERROR: format_md.tool only supports prettier", file=sys.stderr)
|
||
return 2
|
||
|
||
project_root: Path = context["project_root"]
|
||
prettier = project_root / "node_modules/.bin/prettier"
|
||
if not prettier.is_file():
|
||
prettier = PLAYBOOK_ROOT / "node_modules/.bin/prettier"
|
||
if not prettier.is_file():
|
||
resolved = which("prettier")
|
||
if resolved:
|
||
prettier = Path(resolved)
|
||
else:
|
||
log("Skip: prettier not found.")
|
||
return 0
|
||
|
||
globs_raw = config.get("globs", ["**/*.md"])
|
||
globs = normalize_globs(globs_raw)
|
||
result = subprocess.run(
|
||
[str(prettier), "-w", *globs],
|
||
cwd=project_root,
|
||
capture_output=True,
|
||
text=True,
|
||
)
|
||
if result.returncode != 0:
|
||
sys.stderr.write(result.stderr)
|
||
return result.returncode
|
||
|
||
|
||
def run_action(name: str, config: dict, context: dict) -> int:
|
||
print(f"[action] {name}")
|
||
if name == "vendor":
|
||
return vendor_action(config, context)
|
||
if name == "sync_templates":
|
||
return sync_templates_action(config, context)
|
||
if name == "sync_standards":
|
||
return sync_standards_action(config, context)
|
||
if name == "install_skills":
|
||
return install_skills_action(config, context)
|
||
if name == "format_md":
|
||
return format_md_action(config, context)
|
||
return 0
|
||
|
||
|
||
def main(argv: list[str]) -> int:
|
||
if "-h" in argv or "-help" in argv:
|
||
print(usage())
|
||
return 0
|
||
if "-config" not in argv:
|
||
print("ERROR: -config is required.\n" + usage(), file=sys.stderr)
|
||
return 2
|
||
idx = argv.index("-config")
|
||
if idx + 1 >= len(argv) or not argv[idx + 1]:
|
||
print("ERROR: -config requires a path.\n" + usage(), file=sys.stderr)
|
||
return 2
|
||
|
||
config_path = Path(argv[idx + 1]).expanduser()
|
||
if not config_path.is_file():
|
||
print(f"ERROR: config not found: {config_path}", file=sys.stderr)
|
||
return 2
|
||
|
||
config = load_config(config_path)
|
||
playbook_config = config.get("playbook", {})
|
||
project_root = playbook_config.get("project_root")
|
||
if project_root:
|
||
root = Path(project_root).expanduser()
|
||
if not root.is_absolute():
|
||
root = (config_path.parent / root).resolve()
|
||
else:
|
||
root = config_path.parent
|
||
context = {
|
||
"project_root": root.resolve(),
|
||
"config_path": config_path.resolve(),
|
||
"config": config,
|
||
}
|
||
|
||
for name in ORDER:
|
||
if name in config:
|
||
result = run_action(name, config[name], context)
|
||
if result != 0:
|
||
return result
|
||
|
||
return 0
|
||
|
||
|
||
if __name__ == "__main__":
|
||
raise SystemExit(main(sys.argv[1:]))
|