From f9cbf5faa98b4fea1e91304fcf1a6684109fb5ec Mon Sep 17 00:00:00 2001 From: agentbox Date: Mon, 2 Mar 2026 17:49:51 +0800 Subject: [PATCH 01/12] Add CodeRabbit config for AI-powered API review Enable advisory (non-blocking) AI review on PRs touching Rust source in payjoin/ and payjoin-cli/. Uses the "chill" profile with noisy features disabled. Path instructions focus on C-CALLER-CONTROL violations (pub fn taking &T but internally cloning) and broader Rust API guidelines from the checklist at rust-lang.github.io/api-guidelines. Closes #1374 --- .coderabbit.yaml | 61 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) create mode 100644 .coderabbit.yaml diff --git a/.coderabbit.yaml b/.coderabbit.yaml new file mode 100644 index 000000000..0d7377450 --- /dev/null +++ b/.coderabbit.yaml @@ -0,0 +1,61 @@ +# yaml-language-server: $schema=https://coderabbit.ai/integrations/schema.v2.json +language: en-US +reviews: + profile: "chill" + high_level_summary: false + changed_files_summary: false + sequence_diagrams: false + estimate_code_review_effort: false + suggested_labels: false + suggested_reviewers: false + path_instructions: + - path: "payjoin/src/**/*.rs" + instructions: | + Check for these Rust API guideline violations: + + C-CALLER-CONTROL (critical): Flag any pub fn taking &T, &str, + or &[u8] that internally clones, copies, or calls .to_owned() + / .to_string() / .to_vec() on the parameter. The fix is to + take T, String, or Vec by value and let the caller decide + whether to clone or move. + + Also check the Rust API guidelines checklist: + - C-COMMON-TRAITS: pub types should impl Send, Sync, Debug, + Display, Default, serde traits where appropriate + - C-CONV: use From/Into and AsRef for conversions rather than + ad-hoc methods + - C-GETTER: prefer foo() / set_foo() naming for getters and + setters, not get_foo() + - C-ITER: collections should impl IntoIterator and expose + iter(), iter_mut() where appropriate + - C-SERDE: pub types should impl Serialize/Deserialize when + they represent data that crosses process boundaries + - C-SEND-SYNC: err on the side of Send + Sync for pub types + unless there is a specific reason not to + + If no violations are found, say "No findings." Do not invent + issues. Format: file:line - pattern - suggested fix. + - path: "payjoin-cli/src/**/*.rs" + instructions: | + Check for these Rust API guideline violations: + + C-CALLER-CONTROL (critical): Flag any pub fn taking &T, &str, + or &[u8] that internally clones, copies, or calls .to_owned() + / .to_string() / .to_vec() on the parameter. The fix is to + take T, String, or Vec by value and let the caller decide + whether to clone or move. + + Also check the Rust API guidelines checklist: + - C-COMMON-TRAITS: pub types should impl Send, Sync, Debug, + Display, Default, serde traits where appropriate + - C-CONV: use From/Into and AsRef for conversions rather than + ad-hoc methods + - C-GETTER: prefer foo() / set_foo() naming for getters and + setters, not get_foo() + - C-ITER: collections should impl IntoIterator and expose + iter(), iter_mut() where appropriate + - C-SEND-SYNC: err on the side of Send + Sync for pub types + unless there is a specific reason not to + + If no violations are found, say "No findings." Do not invent + issues. Format: file:line - pattern - suggested fix. From 326262aac4a3ccc630a72bd90b5f5f976f7f5309 Mon Sep 17 00:00:00 2001 From: DanGould Date: Mon, 2 Mar 2026 19:16:25 +0800 Subject: [PATCH 02/12] Enable CodeRabbit reviews on draft PRs Enable auto_review.drafts so CodeRabbit reviews draft PRs instead of skipping them. Disable review_status since CodeRabbit already reports via commit status checks (pending/success), making the walkthrough status messages redundant. --- .coderabbit.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.coderabbit.yaml b/.coderabbit.yaml index 0d7377450..56c60e228 100644 --- a/.coderabbit.yaml +++ b/.coderabbit.yaml @@ -5,9 +5,14 @@ reviews: high_level_summary: false changed_files_summary: false sequence_diagrams: false + poem: false + review_status: false + collapse_walkthrough: true estimate_code_review_effort: false suggested_labels: false suggested_reviewers: false + auto_review: + drafts: true path_instructions: - path: "payjoin/src/**/*.rs" instructions: | From 19d18e4c4e448fc6acc4d1a722d1a8a47305b156 Mon Sep 17 00:00:00 2001 From: DanGould Date: Mon, 2 Mar 2026 21:43:59 +0800 Subject: [PATCH 03/12] Disable all CodeRabbit noise, keep only path reviews --- .coderabbit.yaml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/.coderabbit.yaml b/.coderabbit.yaml index 56c60e228..8c1c38680 100644 --- a/.coderabbit.yaml +++ b/.coderabbit.yaml @@ -9,10 +9,22 @@ reviews: review_status: false collapse_walkthrough: true estimate_code_review_effort: false + assess_linked_issues: false + related_issues: false + related_prs: false suggested_labels: false suggested_reviewers: false + in_progress_fortune: false + enable_prompt_for_ai_agents: false auto_review: drafts: true + pre_merge_checks: + description: + mode: "off" + title: + mode: "off" + issue_assessment: + mode: "off" path_instructions: - path: "payjoin/src/**/*.rs" instructions: | From 32c12297c15ef8623f61f5d63a2cc970e846fa8d Mon Sep 17 00:00:00 2001 From: DanGould Date: Fri, 6 Mar 2026 12:50:31 +0800 Subject: [PATCH 04/12] Replace issue-based standup with Discussion system MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Replace the two-step Issue→Discussion standup flow with a single Discussion-based system that auto-gathers each contributor's weekly activity (merged PRs, reviews, issues) and creates pre-populated threaded replies. Changes: - Add create_standup_discussion.py with GitHub API activity gathering and GraphQL Discussion/comment creation - Update standup-prompt.yml to call new script and pass DISCUSSION_CATEGORY_NODE_ID - Remove compile_standup.py and standup-compile.yml since the Discussion IS the standup (no compile step needed) - Remove create_standup_issue.py (superseded) --- .github/scripts/compile_standup.py | 154 ------------ .github/scripts/create_standup_discussion.py | 241 +++++++++++++++++++ .github/scripts/create_standup_issue.py | 69 ------ .github/workflows/standup-compile.yml | 22 -- .github/workflows/standup-prompt.yml | 7 +- 5 files changed, 245 insertions(+), 248 deletions(-) delete mode 100644 .github/scripts/compile_standup.py create mode 100644 .github/scripts/create_standup_discussion.py delete mode 100644 .github/scripts/create_standup_issue.py delete mode 100644 .github/workflows/standup-compile.yml diff --git a/.github/scripts/compile_standup.py b/.github/scripts/compile_standup.py deleted file mode 100644 index cd023d8be..000000000 --- a/.github/scripts/compile_standup.py +++ /dev/null @@ -1,154 +0,0 @@ -#!/usr/bin/env python3 -"""Compile standup responses into a GitHub Discussion and close the issue.""" - -import os -from datetime import datetime, timezone, timedelta - -import requests - -REPO = os.environ["GITHUB_REPOSITORY"] -TOKEN = os.environ["STANDUP_TOKEN"] -CATEGORY_NODE_ID = os.environ["DISCUSSION_CATEGORY_NODE_ID"] -API = "https://api.github.com" -GRAPHQL = "https://api.github.com/graphql" -HEADERS = { - "Authorization": f"token {TOKEN}", - "Accept": "application/vnd.github+json", -} - - -def find_standup_issue(): - """Find the most recent open standup-input issue from the last 7 days.""" - since = (datetime.now(timezone.utc) - timedelta(days=7)).isoformat() - resp = requests.get( - f"{API}/repos/{REPO}/issues", - headers=HEADERS, - params={ - "labels": "standup-input", - "state": "open", - "since": since, - "sort": "created", - "direction": "desc", - "per_page": 1, - }, - ) - resp.raise_for_status() - issues = resp.json() - if not issues: - print("No standup-input issue found in the last 7 days.") - return None - return issues[0] - - -def fetch_comments(issue_number): - """Fetch all comments on an issue.""" - comments = [] - page = 1 - while True: - resp = requests.get( - f"{API}/repos/{REPO}/issues/{issue_number}/comments", - headers=HEADERS, - params={"per_page": 100, "page": page}, - ) - resp.raise_for_status() - batch = resp.json() - if not batch: - break - comments.extend(batch) - page += 1 - return comments - - -def get_repo_node_id(): - """Get the repository node ID for the GraphQL mutation.""" - resp = requests.get(f"{API}/repos/{REPO}", headers=HEADERS) - resp.raise_for_status() - return resp.json()["node_id"] - - -def create_discussion(title, body, repo_node_id): - """Create a GitHub Discussion via GraphQL.""" - mutation = """ - mutation($repoId: ID!, $categoryId: ID!, $title: String!, $body: String!) { - createDiscussion(input: { - repositoryId: $repoId, - categoryId: $categoryId, - title: $title, - body: $body - }) { - discussion { - url - } - } - } - """ - resp = requests.post( - GRAPHQL, - headers=HEADERS, - json={ - "query": mutation, - "variables": { - "repoId": repo_node_id, - "categoryId": CATEGORY_NODE_ID, - "title": title, - "body": body, - }, - }, - ) - resp.raise_for_status() - data = resp.json() - if "errors" in data: - raise RuntimeError(f"GraphQL errors: {data['errors']}") - return data["data"]["createDiscussion"]["discussion"]["url"] - - -def close_issue(issue_number, discussion_url): - """Close the standup issue with a link to the compiled discussion.""" - requests.post( - f"{API}/repos/{REPO}/issues/{issue_number}/comments", - headers=HEADERS, - json={"body": f"Compiled into discussion: {discussion_url}"}, - ) - requests.patch( - f"{API}/repos/{REPO}/issues/{issue_number}", - headers=HEADERS, - json={"state": "closed"}, - ) - - -def main(): - issue = find_standup_issue() - if not issue: - return - - issue_number = issue["number"] - # Extract the week label from the issue title - title_suffix = issue["title"].removeprefix("Standup Input: ") - week_label = title_suffix or datetime.now(timezone.utc).strftime("Week of %Y-%m-%d") - - comments = fetch_comments(issue_number) - - # Build sections per contributor - sections = [] - for comment in comments: - user = comment["user"]["login"] - if comment["user"]["type"] == "Bot": - continue - body = comment["body"].strip() - sections.append(f"### @{user}\n{body}") - - updates = "\n\n".join(sections) if sections else "_No responses._" - - discussion_title = f"Weekly Check-in: {week_label}" - discussion_body = updates - - repo_node_id = get_repo_node_id() - discussion_url = create_discussion(discussion_title, discussion_body, repo_node_id) - print(f"Created discussion: {discussion_url}") - - close_issue(issue_number, discussion_url) - print(f"Closed issue #{issue_number}") - - -if __name__ == "__main__": - main() diff --git a/.github/scripts/create_standup_discussion.py b/.github/scripts/create_standup_discussion.py new file mode 100644 index 000000000..bfc15cd50 --- /dev/null +++ b/.github/scripts/create_standup_discussion.py @@ -0,0 +1,241 @@ +#!/usr/bin/env python3 +"""Create a weekly standup Discussion with auto-gathered activity per contributor.""" + +import os +from datetime import datetime, timezone, timedelta + +import requests + +REPO = os.environ["GITHUB_REPOSITORY"] +TOKEN = os.environ["STANDUP_TOKEN"] +CATEGORY_NODE_ID = os.environ["DISCUSSION_CATEGORY_NODE_ID"] +API = "https://api.github.com" +GRAPHQL = "https://api.github.com/graphql" +HEADERS = { + "Authorization": f"token {TOKEN}", + "Accept": "application/vnd.github+json", +} + +CONTRIBUTORS = [ + "DanGould", + "spacebear21", + "arminsabouri", + "benalleng", + "chavic", + "zealsham", + "Mshehu5", +] + +ORG = "payjoin" + + +def get_repo_node_id(): + """Get the repository node ID for GraphQL mutations.""" + resp = requests.get(f"{API}/repos/{REPO}", headers=HEADERS) + resp.raise_for_status() + return resp.json()["node_id"] + + +def create_discussion(title, body, repo_node_id): + """Create a GitHub Discussion via GraphQL and return its node ID and URL.""" + mutation = """ + mutation($repoId: ID!, $categoryId: ID!, $title: String!, $body: String!) { + createDiscussion(input: { + repositoryId: $repoId, + categoryId: $categoryId, + title: $title, + body: $body + }) { + discussion { + id + url + } + } + } + """ + resp = requests.post( + GRAPHQL, + headers=HEADERS, + json={ + "query": mutation, + "variables": { + "repoId": repo_node_id, + "categoryId": CATEGORY_NODE_ID, + "title": title, + "body": body, + }, + }, + ) + resp.raise_for_status() + data = resp.json() + if "errors" in data: + raise RuntimeError(f"GraphQL errors: {data['errors']}") + discussion = data["data"]["createDiscussion"]["discussion"] + return discussion["id"], discussion["url"] + + +def add_discussion_comment(discussion_id, body): + """Add a threaded comment to a Discussion via GraphQL.""" + mutation = """ + mutation($discussionId: ID!, $body: String!) { + addDiscussionComment(input: { + discussionId: $discussionId, + body: $body + }) { + comment { + id + } + } + } + """ + resp = requests.post( + GRAPHQL, + headers=HEADERS, + json={ + "query": mutation, + "variables": { + "discussionId": discussion_id, + "body": body, + }, + }, + ) + resp.raise_for_status() + data = resp.json() + if "errors" in data: + raise RuntimeError(f"GraphQL errors: {data['errors']}") + + +def search_issues(query): + """Run a GitHub search/issues query and return the items.""" + resp = requests.get( + f"{API}/search/issues", + headers=HEADERS, + params={"q": query, "per_page": 30}, + ) + resp.raise_for_status() + return resp.json().get("items", []) + + +def gather_activity(user, since_date): + """Gather a contributor's past-week activity across the org.""" + since = since_date.strftime("%Y-%m-%d") + + # PRs merged (authored) + merged_prs = search_issues(f"author:{user} org:{ORG} type:pr merged:>{since}") + + # PRs reviewed + reviewed_prs = search_issues( + f"reviewed-by:{user} org:{ORG} type:pr updated:>{since}" + ) + # Exclude PRs the user authored (already counted above) + reviewed_prs = [pr for pr in reviewed_prs if pr["user"]["login"] != user] + + # Issues opened + issues_opened = search_issues( + f"author:{user} org:{ORG} type:issue created:>{since}" + ) + + return merged_prs, reviewed_prs, issues_opened + + +def gather_potential_blockers(user, since_date): + """Identify potential blockers for a contributor (stretch goal).""" + since = since_date.strftime("%Y-%m-%d") + blockers = [] + + # Open PRs with no reviews + open_prs = search_issues( + f"author:{user} org:{ORG} type:pr state:open review:none created:>{since}" + ) + for pr in open_prs: + blockers.append(f"- PR awaiting review: [{pr['title']}]({pr['html_url']})") + + # PRs with requested changes + changes_requested = search_issues( + f"author:{user} org:{ORG} type:pr state:open review:changes_requested" + ) + for pr in changes_requested: + blockers.append( + f"- PR has requested changes: [{pr['title']}]({pr['html_url']})" + ) + + return blockers + + +def format_contributor_comment(user, merged_prs, reviewed_prs, issues_opened, blockers): + """Format the threaded reply for a contributor.""" + lines = [f"## @{user}", ""] + + # SHIPPED section + lines.append("### Shipped") + if merged_prs or reviewed_prs or issues_opened: + if merged_prs: + lines.append("") + lines.append("**PRs merged:**") + for pr in merged_prs: + lines.append(f"- [{pr['title']}]({pr['html_url']})") + + if reviewed_prs: + lines.append("") + lines.append("**PRs reviewed:**") + for pr in reviewed_prs: + lines.append(f"- [{pr['title']}]({pr['html_url']})") + + if issues_opened: + lines.append("") + lines.append("**Issues opened:**") + for issue in issues_opened: + lines.append(f"- [{issue['title']}]({issue['html_url']})") + else: + lines.append("_No activity found — please edit to add yours._") + + # Focus section (for contributor to fill in) + lines.append("") + lines.append("### Focus") + lines.append("_What are you working on this week? (please edit)_") + + # Blockers section + lines.append("") + lines.append("### Blockers") + if blockers: + lines.append("_Auto-detected (please edit/confirm):_") + lines.extend(blockers) + else: + lines.append("_Any blockers? Name who can help. (please edit)_") + + return "\n".join(lines) + + +def main(): + today = datetime.now(timezone.utc) + week_label = today.strftime("Week of %Y-%m-%d") + since_date = today - timedelta(days=7) + + repo_node_id = get_repo_node_id() + + # Create the Discussion + title = f"Weekly Check-in: {week_label}" + body = ( + "Weekly standup — each contributor has a thread below " + "with auto-gathered activity.\n\n" + "**Please review your thread and edit to add Focus and Blockers " + "by end-of-day Monday (your timezone).**" + ) + discussion_id, discussion_url = create_discussion(title, body, repo_node_id) + print(f"Created discussion: {discussion_url}") + + # Create a threaded reply for each contributor + for user in CONTRIBUTORS: + merged_prs, reviewed_prs, issues_opened = gather_activity(user, since_date) + blockers = gather_potential_blockers(user, since_date) + comment_body = format_contributor_comment( + user, merged_prs, reviewed_prs, issues_opened, blockers + ) + add_discussion_comment(discussion_id, comment_body) + print(f" Added thread for @{user}") + + print("Done.") + + +if __name__ == "__main__": + main() diff --git a/.github/scripts/create_standup_issue.py b/.github/scripts/create_standup_issue.py deleted file mode 100644 index 13773d5d1..000000000 --- a/.github/scripts/create_standup_issue.py +++ /dev/null @@ -1,69 +0,0 @@ -#!/usr/bin/env python3 -"""Create a weekly standup input issue and ping contributors.""" - -import os -from datetime import datetime, timezone - -import requests - -REPO = os.environ["GITHUB_REPOSITORY"] -TOKEN = os.environ["STANDUP_TOKEN"] -API = "https://api.github.com" -HEADERS = { - "Authorization": f"token {TOKEN}", - "Accept": "application/vnd.github+json", -} - -CONTRIBUTORS = [ - "DanGould", - "spacebear21", - "arminsabouri", - "benalleng", - "chavic", - "zealsham", - "Mshehu5", -] - - -def main(): - today = datetime.now(timezone.utc) - week_label = today.strftime("%Y-%m-%d") - title = f"Standup Input: Week of {week_label}" - - cc_line = " ".join(f"@{u}" for u in CONTRIBUTORS) - body = ( - "Please reply by **Monday end-of-day** (your timezone).\n\n" - "Format:\n" - "- **Shipped**: What you landed last week (PR/issue links)\n" - "- **Focus**: What you're working on this week\n" - "- **Blockers**: Anything stopping you — name who can help\n\n" - f"cc {cc_line}" - ) - - # Ensure the label exists - label_url = f"{API}/repos/{REPO}/labels/standup-input" - resp = requests.get(label_url, headers=HEADERS) - if resp.status_code == 404: - requests.post( - f"{API}/repos/{REPO}/labels", - headers=HEADERS, - json={ - "name": "standup-input", - "color": "0E8A16", - "description": "Weekly standup input issue", - }, - ) - - # Create the issue - resp = requests.post( - f"{API}/repos/{REPO}/issues", - headers=HEADERS, - json={"title": title, "body": body, "labels": ["standup-input"]}, - ) - resp.raise_for_status() - issue = resp.json() - print(f"Created issue #{issue['number']}: {issue['html_url']}") - - -if __name__ == "__main__": - main() diff --git a/.github/workflows/standup-compile.yml b/.github/workflows/standup-compile.yml deleted file mode 100644 index bdb0b4bed..000000000 --- a/.github/workflows/standup-compile.yml +++ /dev/null @@ -1,22 +0,0 @@ -name: Standup Compile - -on: - schedule: - # Tuesday 06:00 UTC = Tuesday 14:00 Taipei - - cron: "0 6 * * 2" - workflow_dispatch: - -jobs: - compile: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 - with: - python-version: "3.12" - - run: pip install requests - - run: python .github/scripts/compile_standup.py - env: - GITHUB_REPOSITORY: ${{ github.repository }} - STANDUP_TOKEN: ${{ secrets.STANDUP_TOKEN }} - DISCUSSION_CATEGORY_NODE_ID: ${{ secrets.DISCUSSION_CATEGORY_NODE_ID }} diff --git a/.github/workflows/standup-prompt.yml b/.github/workflows/standup-prompt.yml index 80aa84b38..059e6f591 100644 --- a/.github/workflows/standup-prompt.yml +++ b/.github/workflows/standup-prompt.yml @@ -1,4 +1,4 @@ -name: Standup Prompt +name: Weekly Standup on: schedule: @@ -7,7 +7,7 @@ on: workflow_dispatch: jobs: - create-issue: + create-discussion: runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 @@ -15,7 +15,8 @@ jobs: with: python-version: "3.12" - run: pip install requests - - run: python .github/scripts/create_standup_issue.py + - run: python .github/scripts/create_standup_discussion.py env: GITHUB_REPOSITORY: ${{ github.repository }} STANDUP_TOKEN: ${{ secrets.STANDUP_TOKEN }} + DISCUSSION_CATEGORY_NODE_ID: ${{ secrets.DISCUSSION_CATEGORY_NODE_ID }} From 5f4e719bb237d3de49e6bb708a542cc38f267f74 Mon Sep 17 00:00:00 2001 From: DanGould Date: Fri, 6 Mar 2026 12:50:52 +0800 Subject: [PATCH 05/12] Reframe Blockers as singular Bottleneck --- .github/scripts/create_standup_discussion.py | 49 +++++++++++++------- 1 file changed, 32 insertions(+), 17 deletions(-) diff --git a/.github/scripts/create_standup_discussion.py b/.github/scripts/create_standup_discussion.py index bfc15cd50..338d5bbe6 100644 --- a/.github/scripts/create_standup_discussion.py +++ b/.github/scripts/create_standup_discussion.py @@ -138,31 +138,33 @@ def gather_activity(user, since_date): return merged_prs, reviewed_prs, issues_opened -def gather_potential_blockers(user, since_date): - """Identify potential blockers for a contributor (stretch goal).""" +def gather_potential_bottlenecks(user, since_date): + """Identify potential bottlenecks for a contributor.""" since = since_date.strftime("%Y-%m-%d") - blockers = [] + bottlenecks = [] # Open PRs with no reviews open_prs = search_issues( f"author:{user} org:{ORG} type:pr state:open review:none created:>{since}" ) for pr in open_prs: - blockers.append(f"- PR awaiting review: [{pr['title']}]({pr['html_url']})") + bottlenecks.append(f"- PR awaiting review: [{pr['title']}]({pr['html_url']})") # PRs with requested changes changes_requested = search_issues( f"author:{user} org:{ORG} type:pr state:open review:changes_requested" ) for pr in changes_requested: - blockers.append( + bottlenecks.append( f"- PR has requested changes: [{pr['title']}]({pr['html_url']})" ) - return blockers + return bottlenecks -def format_contributor_comment(user, merged_prs, reviewed_prs, issues_opened, blockers): +def format_contributor_comment( + user, merged_prs, reviewed_prs, issues_opened, bottlenecks +): """Format the threaded reply for a contributor.""" lines = [f"## @{user}", ""] @@ -194,14 +196,27 @@ def format_contributor_comment(user, merged_prs, reviewed_prs, issues_opened, bl lines.append("### Focus") lines.append("_What are you working on this week? (please edit)_") - # Blockers section + # Bottleneck section lines.append("") - lines.append("### Blockers") - if blockers: - lines.append("_Auto-detected (please edit/confirm):_") - lines.extend(blockers) - else: - lines.append("_Any blockers? Name who can help. (please edit)_") + lines.append("### Bottleneck") + lines.append("") + lines.append( + "What is the single biggest bottleneck in progress toward your greater goal?" + ) + lines.append( + "Name your goal. Name the constraint. Name who or what can unblock it." + ) + lines.append("") + lines.append( + '*(There\'s always one. Not just "waiting on review." Example: ' + '"Goal: ship mailroom to production. Bottleneck: I need 30 min ' + "with @X to align on the ohttp-relay migration plan before I can " + 'write the PR.")*' + ) + if bottlenecks: + lines.append("") + lines.append("_Auto-detected signals:_") + lines.extend(bottlenecks) return "\n".join(lines) @@ -218,7 +233,7 @@ def main(): body = ( "Weekly standup — each contributor has a thread below " "with auto-gathered activity.\n\n" - "**Please review your thread and edit to add Focus and Blockers " + "**Please review your thread and edit to add Focus and Bottleneck " "by end-of-day Monday (your timezone).**" ) discussion_id, discussion_url = create_discussion(title, body, repo_node_id) @@ -227,9 +242,9 @@ def main(): # Create a threaded reply for each contributor for user in CONTRIBUTORS: merged_prs, reviewed_prs, issues_opened = gather_activity(user, since_date) - blockers = gather_potential_blockers(user, since_date) + bottlenecks = gather_potential_bottlenecks(user, since_date) comment_body = format_contributor_comment( - user, merged_prs, reviewed_prs, issues_opened, blockers + user, merged_prs, reviewed_prs, issues_opened, bottlenecks ) add_discussion_comment(discussion_id, comment_body) print(f" Added thread for @{user}") From 0950c04d43f9fc7855176ac6a7b376154b04680c Mon Sep 17 00:00:00 2001 From: DanGould Date: Fri, 6 Mar 2026 12:51:06 +0800 Subject: [PATCH 06/12] Fence contributor template for copy-paste --- .github/scripts/create_standup_discussion.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/scripts/create_standup_discussion.py b/.github/scripts/create_standup_discussion.py index 338d5bbe6..bc2a0ebcb 100644 --- a/.github/scripts/create_standup_discussion.py +++ b/.github/scripts/create_standup_discussion.py @@ -191,12 +191,11 @@ def format_contributor_comment( else: lines.append("_No activity found — please edit to add yours._") - # Focus section (for contributor to fill in) + # Fenced template for contributor to copy-paste and fill in lines.append("") + lines.append("```") lines.append("### Focus") - lines.append("_What are you working on this week? (please edit)_") - - # Bottleneck section + lines.append("What are you working on this week? (please edit)") lines.append("") lines.append("### Bottleneck") lines.append("") @@ -208,11 +207,12 @@ def format_contributor_comment( ) lines.append("") lines.append( - '*(There\'s always one. Not just "waiting on review." Example: ' + '(There\'s always one. Not just "waiting on review." Example: ' '"Goal: ship mailroom to production. Bottleneck: I need 30 min ' "with @X to align on the ohttp-relay migration plan before I can " - 'write the PR.")*' + 'write the PR.")' ) + lines.append("```") if bottlenecks: lines.append("") lines.append("_Auto-detected signals:_") From 0de6f6307f9036abe047c22755ee347ae889c7c4 Mon Sep 17 00:00:00 2001 From: DanGould Date: Fri, 6 Mar 2026 12:51:10 +0800 Subject: [PATCH 07/12] Extract contributor list to config file --- .github/scripts/create_standup_discussion.py | 14 +++++--------- .github/standup-contributors.yml | 9 +++++++++ .github/workflows/standup-prompt.yml | 2 +- 3 files changed, 15 insertions(+), 10 deletions(-) create mode 100644 .github/standup-contributors.yml diff --git a/.github/scripts/create_standup_discussion.py b/.github/scripts/create_standup_discussion.py index bc2a0ebcb..618173096 100644 --- a/.github/scripts/create_standup_discussion.py +++ b/.github/scripts/create_standup_discussion.py @@ -3,8 +3,10 @@ import os from datetime import datetime, timezone, timedelta +from pathlib import Path import requests +import yaml REPO = os.environ["GITHUB_REPOSITORY"] TOKEN = os.environ["STANDUP_TOKEN"] @@ -16,15 +18,9 @@ "Accept": "application/vnd.github+json", } -CONTRIBUTORS = [ - "DanGould", - "spacebear21", - "arminsabouri", - "benalleng", - "chavic", - "zealsham", - "Mshehu5", -] +_CONFIG_PATH = Path(__file__).resolve().parent.parent / "standup-contributors.yml" +with open(_CONFIG_PATH) as _f: + CONTRIBUTORS = [c["username"] for c in yaml.safe_load(_f)["contributors"]] ORG = "payjoin" diff --git a/.github/standup-contributors.yml b/.github/standup-contributors.yml new file mode 100644 index 000000000..6a5a0d1b9 --- /dev/null +++ b/.github/standup-contributors.yml @@ -0,0 +1,9 @@ +contributors: + - username: DanGould + - username: spacebear21 + - username: arminsabouri + - username: benalleng + - username: chavic + - username: zealsham + - username: Mshehu5 + - username: 0xZaddyy diff --git a/.github/workflows/standup-prompt.yml b/.github/workflows/standup-prompt.yml index 059e6f591..40a799b60 100644 --- a/.github/workflows/standup-prompt.yml +++ b/.github/workflows/standup-prompt.yml @@ -14,7 +14,7 @@ jobs: - uses: actions/setup-python@v5 with: python-version: "3.12" - - run: pip install requests + - run: pip install requests pyyaml - run: python .github/scripts/create_standup_discussion.py env: GITHUB_REPOSITORY: ${{ github.repository }} From c98f9dd2da662c193ccbe6417d2ed57a45778e79 Mon Sep 17 00:00:00 2001 From: DanGould Date: Fri, 6 Mar 2026 12:51:24 +0800 Subject: [PATCH 08/12] Update workflow for bot token --- .github/workflows/standup-prompt.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/standup-prompt.yml b/.github/workflows/standup-prompt.yml index 40a799b60..466f4d583 100644 --- a/.github/workflows/standup-prompt.yml +++ b/.github/workflows/standup-prompt.yml @@ -18,5 +18,5 @@ jobs: - run: python .github/scripts/create_standup_discussion.py env: GITHUB_REPOSITORY: ${{ github.repository }} - STANDUP_TOKEN: ${{ secrets.STANDUP_TOKEN }} + STANDUP_TOKEN: ${{ secrets.STANDUP_BOT_TOKEN || secrets.STANDUP_TOKEN }} DISCUSSION_CATEGORY_NODE_ID: ${{ secrets.DISCUSSION_CATEGORY_NODE_ID }} From 2e335bebfde8195f16c092f8519946ee03a3e25a Mon Sep 17 00:00:00 2001 From: DanGould Date: Fri, 6 Mar 2026 12:57:58 +0800 Subject: [PATCH 09/12] Add DRY_RUN mode to standup script When DRY_RUN is set, gather activity and print what would be posted without creating a Discussion or sending notifications. --- .github/scripts/create_standup_discussion.py | 28 +++++++++++++------- 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/.github/scripts/create_standup_discussion.py b/.github/scripts/create_standup_discussion.py index 618173096..c41849a29 100644 --- a/.github/scripts/create_standup_discussion.py +++ b/.github/scripts/create_standup_discussion.py @@ -222,9 +222,25 @@ def main(): week_label = today.strftime("Week of %Y-%m-%d") since_date = today - timedelta(days=7) - repo_node_id = get_repo_node_id() + dry_run = os.environ.get("DRY_RUN") + + # Gather all comments first + comments = [] + for user in CONTRIBUTORS: + merged_prs, reviewed_prs, issues_opened = gather_activity(user, since_date) + bottlenecks = gather_potential_bottlenecks(user, since_date) + comment_body = format_contributor_comment( + user, merged_prs, reviewed_prs, issues_opened, bottlenecks + ) + comments.append((user, comment_body)) - # Create the Discussion + if dry_run: + for user, comment_body in comments: + print(f"--- {user} ---\n{comment_body}\n") + print("Dry run complete — nothing was created.") + return + + repo_node_id = get_repo_node_id() title = f"Weekly Check-in: {week_label}" body = ( "Weekly standup — each contributor has a thread below " @@ -235,13 +251,7 @@ def main(): discussion_id, discussion_url = create_discussion(title, body, repo_node_id) print(f"Created discussion: {discussion_url}") - # Create a threaded reply for each contributor - for user in CONTRIBUTORS: - merged_prs, reviewed_prs, issues_opened = gather_activity(user, since_date) - bottlenecks = gather_potential_bottlenecks(user, since_date) - comment_body = format_contributor_comment( - user, merged_prs, reviewed_prs, issues_opened, bottlenecks - ) + for user, comment_body in comments: add_discussion_comment(discussion_id, comment_body) print(f" Added thread for @{user}") From 2f82c569c6caab2e65bb4fc3a0ed92d11849472e Mon Sep 17 00:00:00 2001 From: DanGould Date: Fri, 6 Mar 2026 13:03:21 +0800 Subject: [PATCH 10/12] Compile s'up participation before Tuesday meeting Runs Tuesday 06:00 UTC before the standup meeting. Edits the Discussion body to show which contributors replied to their thread. Uses an HTML comment marker so re-runs update in place rather than appending. --- .github/scripts/compile_standup.py | 160 ++++++++++++++++++++++++++ .github/workflows/standup-compile.yml | 21 ++++ 2 files changed, 181 insertions(+) create mode 100644 .github/scripts/compile_standup.py create mode 100644 .github/workflows/standup-compile.yml diff --git a/.github/scripts/compile_standup.py b/.github/scripts/compile_standup.py new file mode 100644 index 000000000..957263baa --- /dev/null +++ b/.github/scripts/compile_standup.py @@ -0,0 +1,160 @@ +#!/usr/bin/env python3 +"""Update the latest Weekly Check-in Discussion body with participation summary.""" + +import os +from pathlib import Path + +import requests +import yaml + +REPO = os.environ["GITHUB_REPOSITORY"] +TOKEN = os.environ["STANDUP_TOKEN"] +GRAPHQL = "https://api.github.com/graphql" +HEADERS = { + "Authorization": f"token {TOKEN}", + "Accept": "application/vnd.github+json", +} + +_CONFIG_PATH = Path(__file__).resolve().parent.parent / "standup-contributors.yml" +with open(_CONFIG_PATH) as _f: + CONTRIBUTORS = [c["username"] for c in yaml.safe_load(_f)["contributors"]] + + +def graphql(query, variables=None): + """Run a GraphQL query and return the data.""" + resp = requests.post( + GRAPHQL, + headers=HEADERS, + json={"query": query, "variables": variables or {}}, + ) + resp.raise_for_status() + data = resp.json() + if "errors" in data: + raise RuntimeError(f"GraphQL errors: {data['errors']}") + return data["data"] + + +def find_latest_checkin(): + """Find the most recent Weekly Check-in Discussion.""" + owner, name = REPO.split("/") + data = graphql( + """ + query($owner: String!, $name: String!) { + repository(owner: $owner, name: $name) { + discussions(first: 10, orderBy: {field: CREATED_AT, direction: DESC}) { + nodes { + id + title + url + body + } + } + } + } + """, + {"owner": owner, "name": name}, + ) + for d in data["repository"]["discussions"]["nodes"]: + if d["title"].startswith("Weekly Check-in:"): + return d + return None + + +def get_discussion_comments(discussion_id): + """Fetch top-level comments and their replies for a Discussion.""" + data = graphql( + """ + query($id: ID!) { + node(id: $id) { + ... on Discussion { + comments(first: 50) { + nodes { + body + replies(first: 50) { + nodes { + author { login } + } + } + } + } + } + } + } + """, + {"id": discussion_id}, + ) + return data["node"]["comments"]["nodes"] + + +def check_participation(comments): + """Return list of contributors who replied to their thread.""" + participated = [] + for comment in comments: + body = comment["body"] + for user in CONTRIBUTORS: + if f"@{user}" not in body: + continue + reply_authors = { + r["author"]["login"] for r in comment["replies"]["nodes"] if r["author"] + } + if user in reply_authors: + participated.append(user) + return participated + + +def update_discussion_body(discussion_id, new_body): + """Edit the Discussion body via GraphQL.""" + graphql( + """ + mutation($discussionId: ID!, $body: String!) { + updateDiscussion(input: { + discussionId: $discussionId, + body: $body + }) { + discussion { id } + } + } + """, + {"discussionId": discussion_id, "body": new_body}, + ) + + +PARTICIPATION_MARKER = "" + + +def main(): + dry_run = os.environ.get("DRY_RUN") + + discussion = find_latest_checkin() + if not discussion: + print("No Weekly Check-in discussion found.") + return + + print(f"Found: {discussion['url']}") + + comments = get_discussion_comments(discussion["id"]) + participated = check_participation(comments) + + if participated: + names = ", ".join(f"@{u}" for u in participated) + participation_line = f"**Participated:** {names}" + else: + participation_line = "**Participated:** _(none yet)_" + + # Strip any previous participation section, then append + body = discussion["body"] + if PARTICIPATION_MARKER in body: + body = body[: body.index(PARTICIPATION_MARKER)].rstrip() + + new_body = f"{body}\n\n{PARTICIPATION_MARKER}\n{participation_line}" + + if dry_run: + print(f"Would update body to:\n---\n{new_body}\n---") + return + + update_discussion_body(discussion["id"], new_body) + print(f"Updated discussion: {participation_line}") + + +if __name__ == "__main__": + main() diff --git a/.github/workflows/standup-compile.yml b/.github/workflows/standup-compile.yml new file mode 100644 index 000000000..90e0d5354 --- /dev/null +++ b/.github/workflows/standup-compile.yml @@ -0,0 +1,21 @@ +name: Standup Compile + +on: + schedule: + # Tuesday 06:00 UTC = Tuesday 14:00 Taipei (before the meeting) + - cron: "0 6 * * 2" + workflow_dispatch: + +jobs: + compile: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: "3.12" + - run: pip install requests pyyaml + - run: python .github/scripts/compile_standup.py + env: + GITHUB_REPOSITORY: ${{ github.repository }} + STANDUP_TOKEN: ${{ secrets.STANDUP_BOT_TOKEN || secrets.STANDUP_TOKEN }} From b342adcde31955f81da599974d84af49737144a4 Mon Sep 17 00:00:00 2001 From: DanGould Date: Fri, 6 Mar 2026 13:52:51 +0800 Subject: [PATCH 11/12] Search explicit public repos instead of org The bot token is an org member, so org:payjoin searches return results from private repos. Those titles and URLs would be published into the public Discussion, leaking private work. Replace the org: search qualifier with explicit repo: qualifiers for each public repo to ensure only public activity is surfaced. --- .github/scripts/create_standup_discussion.py | 30 ++++++++++++++++---- 1 file changed, 24 insertions(+), 6 deletions(-) diff --git a/.github/scripts/create_standup_discussion.py b/.github/scripts/create_standup_discussion.py index c41849a29..a60fc3e1d 100644 --- a/.github/scripts/create_standup_discussion.py +++ b/.github/scripts/create_standup_discussion.py @@ -22,7 +22,25 @@ with open(_CONFIG_PATH) as _f: CONTRIBUTORS = [c["username"] for c in yaml.safe_load(_f)["contributors"]] -ORG = "payjoin" +# Public repos to search — explicit list avoids leaking private repo +# data when the bot token has org membership. +REPOS = [ + "payjoin/rust-payjoin", + "payjoin/payjoin.org", + "payjoin/payjoindevkit.org", + "payjoin/cja", + "payjoin/cja-2", + "payjoin/bitcoin-hpke", + "payjoin/ohttp", + "payjoin/bitcoin_uri", + "payjoin/bitcoin-uri-ffi", + "payjoin/research-docs", + "payjoin/multiparty-protocol-docs", + "payjoin/btsim", + "payjoin/tx-indexer", +] + +REPO_FILTER = " ".join(f"repo:{r}" for r in REPOS) def get_repo_node_id(): @@ -117,18 +135,18 @@ def gather_activity(user, since_date): since = since_date.strftime("%Y-%m-%d") # PRs merged (authored) - merged_prs = search_issues(f"author:{user} org:{ORG} type:pr merged:>{since}") + merged_prs = search_issues(f"author:{user} {REPO_FILTER} type:pr merged:>{since}") # PRs reviewed reviewed_prs = search_issues( - f"reviewed-by:{user} org:{ORG} type:pr updated:>{since}" + f"reviewed-by:{user} {REPO_FILTER} type:pr updated:>{since}" ) # Exclude PRs the user authored (already counted above) reviewed_prs = [pr for pr in reviewed_prs if pr["user"]["login"] != user] # Issues opened issues_opened = search_issues( - f"author:{user} org:{ORG} type:issue created:>{since}" + f"author:{user} {REPO_FILTER} type:issue created:>{since}" ) return merged_prs, reviewed_prs, issues_opened @@ -141,14 +159,14 @@ def gather_potential_bottlenecks(user, since_date): # Open PRs with no reviews open_prs = search_issues( - f"author:{user} org:{ORG} type:pr state:open review:none created:>{since}" + f"author:{user} {REPO_FILTER} type:pr state:open review:none created:>{since}" ) for pr in open_prs: bottlenecks.append(f"- PR awaiting review: [{pr['title']}]({pr['html_url']})") # PRs with requested changes changes_requested = search_issues( - f"author:{user} org:{ORG} type:pr state:open review:changes_requested" + f"author:{user} {REPO_FILTER} type:pr state:open review:changes_requested" ) for pr in changes_requested: bottlenecks.append( From fab2bfc023a496b9014378ea90eaa034b8cc12cf Mon Sep 17 00:00:00 2001 From: DanGould Date: Fri, 6 Mar 2026 13:52:51 +0800 Subject: [PATCH 12/12] Search explicit public repos instead of org The bot token is an org member, so org:payjoin searches return results from private repos. Those titles and URLs would be published into the public Discussion, leaking private work. Replace the org: search qualifier with explicit repo: qualifiers for each public repo to ensure only public activity is surfaced. --- .github/scripts/create_standup_discussion.py | 59 ++++++++++++++------ 1 file changed, 41 insertions(+), 18 deletions(-) diff --git a/.github/scripts/create_standup_discussion.py b/.github/scripts/create_standup_discussion.py index c41849a29..136a6a9a1 100644 --- a/.github/scripts/create_standup_discussion.py +++ b/.github/scripts/create_standup_discussion.py @@ -22,7 +22,25 @@ with open(_CONFIG_PATH) as _f: CONTRIBUTORS = [c["username"] for c in yaml.safe_load(_f)["contributors"]] -ORG = "payjoin" +# Public repos to search — explicit list avoids leaking private repo +# data when the bot token has org membership. +REPOS = [ + "payjoin/rust-payjoin", + "payjoin/payjoin.org", + "payjoin/payjoindevkit.org", + "payjoin/cja", + "payjoin/cja-2", + "payjoin/bitcoin-hpke", + "payjoin/ohttp", + "payjoin/bitcoin_uri", + "payjoin/bitcoin-uri-ffi", + "payjoin/research-docs", + "payjoin/multiparty-protocol-docs", + "payjoin/btsim", + "payjoin/tx-indexer", +] + +REPO_BATCH_SIZE = 5 def get_repo_node_id(): @@ -102,14 +120,23 @@ def add_discussion_comment(discussion_id, body): def search_issues(query): - """Run a GitHub search/issues query and return the items.""" - resp = requests.get( - f"{API}/search/issues", - headers=HEADERS, - params={"q": query, "per_page": 30}, - ) - resp.raise_for_status() - return resp.json().get("items", []) + """Run a GitHub search/issues query across REPOS in batches.""" + seen = set() + items = [] + for i in range(0, len(REPOS), REPO_BATCH_SIZE): + batch = REPOS[i : i + REPO_BATCH_SIZE] + repo_filter = " ".join(f"repo:{r}" for r in batch) + resp = requests.get( + f"{API}/search/issues", + headers=HEADERS, + params={"q": f"{query} {repo_filter}", "per_page": 30}, + ) + resp.raise_for_status() + for item in resp.json().get("items", []): + if item["id"] not in seen: + seen.add(item["id"]) + items.append(item) + return items def gather_activity(user, since_date): @@ -117,19 +144,15 @@ def gather_activity(user, since_date): since = since_date.strftime("%Y-%m-%d") # PRs merged (authored) - merged_prs = search_issues(f"author:{user} org:{ORG} type:pr merged:>{since}") + merged_prs = search_issues(f"author:{user} type:pr merged:>{since}") # PRs reviewed - reviewed_prs = search_issues( - f"reviewed-by:{user} org:{ORG} type:pr updated:>{since}" - ) + reviewed_prs = search_issues(f"reviewed-by:{user} type:pr updated:>{since}") # Exclude PRs the user authored (already counted above) reviewed_prs = [pr for pr in reviewed_prs if pr["user"]["login"] != user] # Issues opened - issues_opened = search_issues( - f"author:{user} org:{ORG} type:issue created:>{since}" - ) + issues_opened = search_issues(f"author:{user} type:issue created:>{since}") return merged_prs, reviewed_prs, issues_opened @@ -141,14 +164,14 @@ def gather_potential_bottlenecks(user, since_date): # Open PRs with no reviews open_prs = search_issues( - f"author:{user} org:{ORG} type:pr state:open review:none created:>{since}" + f"author:{user} type:pr state:open review:none created:>{since}" ) for pr in open_prs: bottlenecks.append(f"- PR awaiting review: [{pr['title']}]({pr['html_url']})") # PRs with requested changes changes_requested = search_issues( - f"author:{user} org:{ORG} type:pr state:open review:changes_requested" + f"author:{user} type:pr state:open review:changes_requested" ) for pr in changes_requested: bottlenecks.append(