diff --git a/.coderabbit.yaml b/.coderabbit.yaml new file mode 100644 index 000000000..8c1c38680 --- /dev/null +++ b/.coderabbit.yaml @@ -0,0 +1,78 @@ +# yaml-language-server: $schema=https://coderabbit.ai/integrations/schema.v2.json +language: en-US +reviews: + profile: "chill" + high_level_summary: false + changed_files_summary: false + sequence_diagrams: false + poem: false + review_status: false + collapse_walkthrough: true + estimate_code_review_effort: false + assess_linked_issues: false + related_issues: false + related_prs: false + suggested_labels: false + suggested_reviewers: false + in_progress_fortune: false + enable_prompt_for_ai_agents: false + auto_review: + drafts: true + pre_merge_checks: + description: + mode: "off" + title: + mode: "off" + issue_assessment: + mode: "off" + path_instructions: + - path: "payjoin/src/**/*.rs" + instructions: | + Check for these Rust API guideline violations: + + C-CALLER-CONTROL (critical): Flag any pub fn taking &T, &str, + or &[u8] that internally clones, copies, or calls .to_owned() + / .to_string() / .to_vec() on the parameter. The fix is to + take T, String, or Vec by value and let the caller decide + whether to clone or move. + + Also check the Rust API guidelines checklist: + - C-COMMON-TRAITS: pub types should impl Send, Sync, Debug, + Display, Default, serde traits where appropriate + - C-CONV: use From/Into and AsRef for conversions rather than + ad-hoc methods + - C-GETTER: prefer foo() / set_foo() naming for getters and + setters, not get_foo() + - C-ITER: collections should impl IntoIterator and expose + iter(), iter_mut() where appropriate + - C-SERDE: pub types should impl Serialize/Deserialize when + they represent data that crosses process boundaries + - C-SEND-SYNC: err on the side of Send + Sync for pub types + unless there is a specific reason not to + + If no violations are found, say "No findings." Do not invent + issues. Format: file:line - pattern - suggested fix. + - path: "payjoin-cli/src/**/*.rs" + instructions: | + Check for these Rust API guideline violations: + + C-CALLER-CONTROL (critical): Flag any pub fn taking &T, &str, + or &[u8] that internally clones, copies, or calls .to_owned() + / .to_string() / .to_vec() on the parameter. The fix is to + take T, String, or Vec by value and let the caller decide + whether to clone or move. + + Also check the Rust API guidelines checklist: + - C-COMMON-TRAITS: pub types should impl Send, Sync, Debug, + Display, Default, serde traits where appropriate + - C-CONV: use From/Into and AsRef for conversions rather than + ad-hoc methods + - C-GETTER: prefer foo() / set_foo() naming for getters and + setters, not get_foo() + - C-ITER: collections should impl IntoIterator and expose + iter(), iter_mut() where appropriate + - C-SEND-SYNC: err on the side of Send + Sync for pub types + unless there is a specific reason not to + + If no violations are found, say "No findings." Do not invent + issues. Format: file:line - pattern - suggested fix. diff --git a/.github/scripts/compile_standup.py b/.github/scripts/compile_standup.py index cd023d8be..957263baa 100644 --- a/.github/scripts/compile_standup.py +++ b/.github/scripts/compile_standup.py @@ -1,153 +1,159 @@ #!/usr/bin/env python3 -"""Compile standup responses into a GitHub Discussion and close the issue.""" +"""Update the latest Weekly Check-in Discussion body with participation summary.""" import os -from datetime import datetime, timezone, timedelta +from pathlib import Path import requests +import yaml REPO = os.environ["GITHUB_REPOSITORY"] TOKEN = os.environ["STANDUP_TOKEN"] -CATEGORY_NODE_ID = os.environ["DISCUSSION_CATEGORY_NODE_ID"] -API = "https://api.github.com" GRAPHQL = "https://api.github.com/graphql" HEADERS = { "Authorization": f"token {TOKEN}", "Accept": "application/vnd.github+json", } +_CONFIG_PATH = Path(__file__).resolve().parent.parent / "standup-contributors.yml" +with open(_CONFIG_PATH) as _f: + CONTRIBUTORS = [c["username"] for c in yaml.safe_load(_f)["contributors"]] -def find_standup_issue(): - """Find the most recent open standup-input issue from the last 7 days.""" - since = (datetime.now(timezone.utc) - timedelta(days=7)).isoformat() - resp = requests.get( - f"{API}/repos/{REPO}/issues", - headers=HEADERS, - params={ - "labels": "standup-input", - "state": "open", - "since": since, - "sort": "created", - "direction": "desc", - "per_page": 1, - }, - ) - resp.raise_for_status() - issues = resp.json() - if not issues: - print("No standup-input issue found in the last 7 days.") - return None - return issues[0] - - -def fetch_comments(issue_number): - """Fetch all comments on an issue.""" - comments = [] - page = 1 - while True: - resp = requests.get( - f"{API}/repos/{REPO}/issues/{issue_number}/comments", - headers=HEADERS, - params={"per_page": 100, "page": page}, - ) - resp.raise_for_status() - batch = resp.json() - if not batch: - break - comments.extend(batch) - page += 1 - return comments - - -def get_repo_node_id(): - """Get the repository node ID for the GraphQL mutation.""" - resp = requests.get(f"{API}/repos/{REPO}", headers=HEADERS) - resp.raise_for_status() - return resp.json()["node_id"] - - -def create_discussion(title, body, repo_node_id): - """Create a GitHub Discussion via GraphQL.""" - mutation = """ - mutation($repoId: ID!, $categoryId: ID!, $title: String!, $body: String!) { - createDiscussion(input: { - repositoryId: $repoId, - categoryId: $categoryId, - title: $title, - body: $body - }) { - discussion { - url - } - } - } - """ + +def graphql(query, variables=None): + """Run a GraphQL query and return the data.""" resp = requests.post( GRAPHQL, headers=HEADERS, - json={ - "query": mutation, - "variables": { - "repoId": repo_node_id, - "categoryId": CATEGORY_NODE_ID, - "title": title, - "body": body, - }, - }, + json={"query": query, "variables": variables or {}}, ) resp.raise_for_status() data = resp.json() if "errors" in data: raise RuntimeError(f"GraphQL errors: {data['errors']}") - return data["data"]["createDiscussion"]["discussion"]["url"] + return data["data"] + + +def find_latest_checkin(): + """Find the most recent Weekly Check-in Discussion.""" + owner, name = REPO.split("/") + data = graphql( + """ + query($owner: String!, $name: String!) { + repository(owner: $owner, name: $name) { + discussions(first: 10, orderBy: {field: CREATED_AT, direction: DESC}) { + nodes { + id + title + url + body + } + } + } + } + """, + {"owner": owner, "name": name}, + ) + for d in data["repository"]["discussions"]["nodes"]: + if d["title"].startswith("Weekly Check-in:"): + return d + return None + + +def get_discussion_comments(discussion_id): + """Fetch top-level comments and their replies for a Discussion.""" + data = graphql( + """ + query($id: ID!) { + node(id: $id) { + ... on Discussion { + comments(first: 50) { + nodes { + body + replies(first: 50) { + nodes { + author { login } + } + } + } + } + } + } + } + """, + {"id": discussion_id}, + ) + return data["node"]["comments"]["nodes"] -def close_issue(issue_number, discussion_url): - """Close the standup issue with a link to the compiled discussion.""" - requests.post( - f"{API}/repos/{REPO}/issues/{issue_number}/comments", - headers=HEADERS, - json={"body": f"Compiled into discussion: {discussion_url}"}, - ) - requests.patch( - f"{API}/repos/{REPO}/issues/{issue_number}", - headers=HEADERS, - json={"state": "closed"}, +def check_participation(comments): + """Return list of contributors who replied to their thread.""" + participated = [] + for comment in comments: + body = comment["body"] + for user in CONTRIBUTORS: + if f"@{user}" not in body: + continue + reply_authors = { + r["author"]["login"] for r in comment["replies"]["nodes"] if r["author"] + } + if user in reply_authors: + participated.append(user) + return participated + + +def update_discussion_body(discussion_id, new_body): + """Edit the Discussion body via GraphQL.""" + graphql( + """ + mutation($discussionId: ID!, $body: String!) { + updateDiscussion(input: { + discussionId: $discussionId, + body: $body + }) { + discussion { id } + } + } + """, + {"discussionId": discussion_id, "body": new_body}, ) +PARTICIPATION_MARKER = "" + + def main(): - issue = find_standup_issue() - if not issue: + dry_run = os.environ.get("DRY_RUN") + + discussion = find_latest_checkin() + if not discussion: + print("No Weekly Check-in discussion found.") return - issue_number = issue["number"] - # Extract the week label from the issue title - title_suffix = issue["title"].removeprefix("Standup Input: ") - week_label = title_suffix or datetime.now(timezone.utc).strftime("Week of %Y-%m-%d") + print(f"Found: {discussion['url']}") - comments = fetch_comments(issue_number) + comments = get_discussion_comments(discussion["id"]) + participated = check_participation(comments) - # Build sections per contributor - sections = [] - for comment in comments: - user = comment["user"]["login"] - if comment["user"]["type"] == "Bot": - continue - body = comment["body"].strip() - sections.append(f"### @{user}\n{body}") + if participated: + names = ", ".join(f"@{u}" for u in participated) + participation_line = f"**Participated:** {names}" + else: + participation_line = "**Participated:** _(none yet)_" - updates = "\n\n".join(sections) if sections else "_No responses._" + # Strip any previous participation section, then append + body = discussion["body"] + if PARTICIPATION_MARKER in body: + body = body[: body.index(PARTICIPATION_MARKER)].rstrip() - discussion_title = f"Weekly Check-in: {week_label}" - discussion_body = updates + new_body = f"{body}\n\n{PARTICIPATION_MARKER}\n{participation_line}" - repo_node_id = get_repo_node_id() - discussion_url = create_discussion(discussion_title, discussion_body, repo_node_id) - print(f"Created discussion: {discussion_url}") + if dry_run: + print(f"Would update body to:\n---\n{new_body}\n---") + return - close_issue(issue_number, discussion_url) - print(f"Closed issue #{issue_number}") + update_discussion_body(discussion["id"], new_body) + print(f"Updated discussion: {participation_line}") if __name__ == "__main__": diff --git a/.github/scripts/create_standup_discussion.py b/.github/scripts/create_standup_discussion.py new file mode 100644 index 000000000..c6a04c203 --- /dev/null +++ b/.github/scripts/create_standup_discussion.py @@ -0,0 +1,286 @@ +#!/usr/bin/env python3 +"""Create a weekly standup Discussion with auto-gathered activity per contributor.""" + +import os +from datetime import datetime, timezone, timedelta +from pathlib import Path + +import requests +import yaml + +REPO = os.environ["GITHUB_REPOSITORY"] +TOKEN = os.environ["STANDUP_TOKEN"] +CATEGORY_NODE_ID = os.environ["DISCUSSION_CATEGORY_NODE_ID"] +API = "https://api.github.com" +GRAPHQL = "https://api.github.com/graphql" +HEADERS = { + "Authorization": f"token {TOKEN}", + "Accept": "application/vnd.github+json", +} + +_CONFIG_PATH = Path(__file__).resolve().parent.parent / "standup-contributors.yml" +with open(_CONFIG_PATH) as _f: + CONTRIBUTORS = [c["username"] for c in yaml.safe_load(_f)["contributors"]] + +# Public repos to search — explicit list avoids leaking private repo +# data when the bot token has org membership. +REPOS = [ + "payjoin/rust-payjoin", + "payjoin/payjoin.org", + "payjoin/payjoindevkit.org", + "payjoin/cja", + "payjoin/cja-2", + "payjoin/bitcoin-hpke", + "payjoin/ohttp", + "payjoin/bitcoin_uri", + "payjoin/bitcoin-uri-ffi", + "payjoin/research-docs", + "payjoin/multiparty-protocol-docs", + "payjoin/btsim", + "payjoin/tx-indexer", +] + +REPO_BATCH_SIZE = 5 + + +def get_repo_node_id(): + """Get the repository node ID for GraphQL mutations.""" + resp = requests.get(f"{API}/repos/{REPO}", headers=HEADERS) + resp.raise_for_status() + return resp.json()["node_id"] + + +def create_discussion(title, body, repo_node_id): + """Create a GitHub Discussion via GraphQL and return its node ID and URL.""" + mutation = """ + mutation($repoId: ID!, $categoryId: ID!, $title: String!, $body: String!) { + createDiscussion(input: { + repositoryId: $repoId, + categoryId: $categoryId, + title: $title, + body: $body + }) { + discussion { + id + url + } + } + } + """ + resp = requests.post( + GRAPHQL, + headers=HEADERS, + json={ + "query": mutation, + "variables": { + "repoId": repo_node_id, + "categoryId": CATEGORY_NODE_ID, + "title": title, + "body": body, + }, + }, + ) + resp.raise_for_status() + data = resp.json() + if "errors" in data: + raise RuntimeError(f"GraphQL errors: {data['errors']}") + discussion = data["data"]["createDiscussion"]["discussion"] + return discussion["id"], discussion["url"] + + +def add_discussion_comment(discussion_id, body): + """Add a threaded comment to a Discussion via GraphQL.""" + mutation = """ + mutation($discussionId: ID!, $body: String!) { + addDiscussionComment(input: { + discussionId: $discussionId, + body: $body + }) { + comment { + id + } + } + } + """ + resp = requests.post( + GRAPHQL, + headers=HEADERS, + json={ + "query": mutation, + "variables": { + "discussionId": discussion_id, + "body": body, + }, + }, + ) + resp.raise_for_status() + data = resp.json() + if "errors" in data: + raise RuntimeError(f"GraphQL errors: {data['errors']}") + + +def search_issues(query): + """Run a GitHub search/issues query across REPOS in batches.""" + seen = set() + items = [] + for i in range(0, len(REPOS), REPO_BATCH_SIZE): + batch = REPOS[i : i + REPO_BATCH_SIZE] + repo_filter = " ".join(f"repo:{r}" for r in batch) + resp = requests.get( + f"{API}/search/issues", + headers=HEADERS, + params={"q": f"{query} {repo_filter}", "per_page": 30}, + ) + resp.raise_for_status() + for item in resp.json().get("items", []): + if item["id"] not in seen: + seen.add(item["id"]) + items.append(item) + return items + + +def gather_activity(user, since_date): + """Gather a contributor's past-week activity across the org.""" + since = since_date.strftime("%Y-%m-%d") + + # PRs merged (authored) + merged_prs = search_issues(f"author:{user} type:pr merged:>{since}") + + # PRs reviewed + reviewed_prs = search_issues(f"reviewed-by:{user} type:pr updated:>{since}" + + # Exclude PRs the user authored (already counted above) + reviewed_prs = [pr for pr in reviewed_prs if pr["user"]["login"] != user] + + # Issues opened + issues_opened = search_issues(f"author:{user} type:issue created:>{since}") + + return merged_prs, reviewed_prs, issues_opened + + +def gather_potential_bottlenecks(user, since_date): + """Identify potential bottlenecks for a contributor.""" + since = since_date.strftime("%Y-%m-%d") + bottlenecks = [] + + # Open PRs with no reviews + open_prs = search_issues( + f"author:{user} type:pr state:open review:none created:>{since}" + ) + for pr in open_prs: + bottlenecks.append(f"- PR awaiting review: [{pr['title']}]({pr['html_url']})") + + # PRs with requested changes + changes_requested = search_issues( + f"author:{user} type:pr state:open review:changes_requested" + ) + for pr in changes_requested: + bottlenecks.append( + f"- PR has requested changes: [{pr['title']}]({pr['html_url']})" + ) + + return bottlenecks + + +def format_contributor_comment( + user, merged_prs, reviewed_prs, issues_opened, bottlenecks +): + """Format the threaded reply for a contributor.""" + lines = [f"## @{user}", ""] + + # SHIPPED section + lines.append("### Shipped") + if merged_prs or reviewed_prs or issues_opened: + if merged_prs: + lines.append("") + lines.append("**PRs merged:**") + for pr in merged_prs: + lines.append(f"- [{pr['title']}]({pr['html_url']})") + + if reviewed_prs: + lines.append("") + lines.append("**PRs reviewed:**") + for pr in reviewed_prs: + lines.append(f"- [{pr['title']}]({pr['html_url']})") + + if issues_opened: + lines.append("") + lines.append("**Issues opened:**") + for issue in issues_opened: + lines.append(f"- [{issue['title']}]({issue['html_url']})") + else: + lines.append("_No activity found — please edit to add yours._") + + # Fenced template for contributor to copy-paste and fill in + lines.append("") + lines.append("```") + lines.append("### Focus") + lines.append("What are you working on this week? (please edit)") + lines.append("") + lines.append("### Bottleneck") + lines.append("") + lines.append( + "What is the single biggest bottleneck in progress toward your greater goal?" + ) + lines.append( + "Name your goal. Name the constraint. Name who or what can unblock it." + ) + lines.append("") + lines.append( + '(There\'s always one. Not just "waiting on review." Example: ' + '"Goal: ship mailroom to production. Bottleneck: I need 30 min ' + "with @X to align on the ohttp-relay migration plan before I can " + 'write the PR.")' + ) + lines.append("```") + if bottlenecks: + lines.append("") + lines.append("_Auto-detected signals:_") + lines.extend(bottlenecks) + + return "\n".join(lines) + + +def main(): + today = datetime.now(timezone.utc) + week_label = today.strftime("Week of %Y-%m-%d") + since_date = today - timedelta(days=7) + + dry_run = os.environ.get("DRY_RUN") + + # Gather all comments first + comments = [] + for user in CONTRIBUTORS: + merged_prs, reviewed_prs, issues_opened = gather_activity(user, since_date) + bottlenecks = gather_potential_bottlenecks(user, since_date) + comment_body = format_contributor_comment( + user, merged_prs, reviewed_prs, issues_opened, bottlenecks + ) + comments.append((user, comment_body)) + + if dry_run: + for user, comment_body in comments: + print(f"--- {user} ---\n{comment_body}\n") + print("Dry run complete — nothing was created.") + return + + repo_node_id = get_repo_node_id() + title = f"Weekly Check-in: {week_label}" + body = ( + "Weekly standup — each contributor has a thread below " + "with auto-gathered activity.\n\n" + "**Please review your thread and edit to add Focus and Bottleneck " + "by end-of-day Monday (your timezone).**" + ) + discussion_id, discussion_url = create_discussion(title, body, repo_node_id) + print(f"Created discussion: {discussion_url}") + + for user, comment_body in comments: + add_discussion_comment(discussion_id, comment_body) + print(f" Added thread for @{user}") + + print("Done.") + + +if __name__ == "__main__": + main() diff --git a/.github/scripts/create_standup_issue.py b/.github/scripts/create_standup_issue.py deleted file mode 100644 index 13773d5d1..000000000 --- a/.github/scripts/create_standup_issue.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python3 -"""Create a weekly standup input issue and ping contributors.""" - -import os -from datetime import datetime, timezone - -import requests - -REPO = os.environ["GITHUB_REPOSITORY"] -TOKEN = os.environ["STANDUP_TOKEN"] -API = "https://api.github.com" -HEADERS = { - "Authorization": f"token {TOKEN}", - "Accept": "application/vnd.github+json", -} - -CONTRIBUTORS = [ - "DanGould", - "spacebear21", - "arminsabouri", - "benalleng", - "chavic", - "zealsham", - "Mshehu5", -] - - -def main(): - today = datetime.now(timezone.utc) - week_label = today.strftime("%Y-%m-%d") - title = f"Standup Input: Week of {week_label}" - - cc_line = " ".join(f"@{u}" for u in CONTRIBUTORS) - body = ( - "Please reply by **Monday end-of-day** (your timezone).\n\n" - "Format:\n" - "- **Shipped**: What you landed last week (PR/issue links)\n" - "- **Focus**: What you're working on this week\n" - "- **Blockers**: Anything stopping you — name who can help\n\n" - f"cc {cc_line}" - ) - - # Ensure the label exists - label_url = f"{API}/repos/{REPO}/labels/standup-input" - resp = requests.get(label_url, headers=HEADERS) - if resp.status_code == 404: - requests.post( - f"{API}/repos/{REPO}/labels", - headers=HEADERS, - json={ - "name": "standup-input", - "color": "0E8A16", - "description": "Weekly standup input issue", - }, - ) - - # Create the issue - resp = requests.post( - f"{API}/repos/{REPO}/issues", - headers=HEADERS, - json={"title": title, "body": body, "labels": ["standup-input"]}, - ) - resp.raise_for_status() - issue = resp.json() - print(f"Created issue #{issue['number']}: {issue['html_url']}") - - -if __name__ == "__main__": - main() diff --git a/.github/standup-contributors.yml b/.github/standup-contributors.yml new file mode 100644 index 000000000..6a5a0d1b9 --- /dev/null +++ b/.github/standup-contributors.yml @@ -0,0 +1,9 @@ +contributors: + - username: DanGould + - username: spacebear21 + - username: arminsabouri + - username: benalleng + - username: chavic + - username: zealsham + - username: Mshehu5 + - username: 0xZaddyy diff --git a/.github/workflows/standup-compile.yml b/.github/workflows/standup-compile.yml index bdb0b4bed..90e0d5354 100644 --- a/.github/workflows/standup-compile.yml +++ b/.github/workflows/standup-compile.yml @@ -2,7 +2,7 @@ name: Standup Compile on: schedule: - # Tuesday 06:00 UTC = Tuesday 14:00 Taipei + # Tuesday 06:00 UTC = Tuesday 14:00 Taipei (before the meeting) - cron: "0 6 * * 2" workflow_dispatch: @@ -14,9 +14,8 @@ jobs: - uses: actions/setup-python@v5 with: python-version: "3.12" - - run: pip install requests + - run: pip install requests pyyaml - run: python .github/scripts/compile_standup.py env: GITHUB_REPOSITORY: ${{ github.repository }} - STANDUP_TOKEN: ${{ secrets.STANDUP_TOKEN }} - DISCUSSION_CATEGORY_NODE_ID: ${{ secrets.DISCUSSION_CATEGORY_NODE_ID }} + STANDUP_TOKEN: ${{ secrets.STANDUP_BOT_TOKEN || secrets.STANDUP_TOKEN }} diff --git a/.github/workflows/standup-prompt.yml b/.github/workflows/standup-prompt.yml index 80aa84b38..466f4d583 100644 --- a/.github/workflows/standup-prompt.yml +++ b/.github/workflows/standup-prompt.yml @@ -1,4 +1,4 @@ -name: Standup Prompt +name: Weekly Standup on: schedule: @@ -7,15 +7,16 @@ on: workflow_dispatch: jobs: - create-issue: + create-discussion: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - uses: actions/setup-python@v5 with: python-version: "3.12" - - run: pip install requests - - run: python .github/scripts/create_standup_issue.py + - run: pip install requests pyyaml + - run: python .github/scripts/create_standup_discussion.py env: GITHUB_REPOSITORY: ${{ github.repository }} - STANDUP_TOKEN: ${{ secrets.STANDUP_TOKEN }} + STANDUP_TOKEN: ${{ secrets.STANDUP_BOT_TOKEN || secrets.STANDUP_TOKEN }} + DISCUSSION_CATEGORY_NODE_ID: ${{ secrets.DISCUSSION_CATEGORY_NODE_ID }}