Skip to content

Instantly share code, notes, and snippets.

@xv0nfers
Last active November 18, 2025 10:03
Show Gist options
  • Select an option

  • Save xv0nfers/3bda96dc50e8edfcdc1a10894b5a231b to your computer and use it in GitHub Desktop.

Select an option

Save xv0nfers/3bda96dc50e8edfcdc1a10894b5a231b to your computer and use it in GitHub Desktop.
A lightweight Python script that, given a Chrome bug ID, fetches its Stable Channel Update entry from Chrome Releases RSS and lists all related commits from GitHub and Gerrit
#!/usr/bin/env python3
import sys
import json
import requests
import feedparser
import re
BUG_ID = sys.argv[1] if len(sys.argv) > 1 else None
if not BUG_ID or not BUG_ID.isdigit():
print("Usage: python3 chrome-bug-commit-tracker.py <bug_id>")
sys.exit(1)
FEED_URL = "https://chromereleases.googleblog.com/feeds/posts/default/-/Stable%20updates"
GITHUB_API = "https://api.github.com/search/commits"
GITHUB_HEADERS = {"Accept": "application/vnd.github.cloak-preview"}
GITHUB_REPOS = ["v8/v8", "chromium/chromium"]
GERRIT_BASE = "https://chromium-review.googlesource.com/changes/"
ISSUES_URL = "https://issues.chromium.org/issues/"
def find_in_stable_updates(bug_id):
feed = feedparser.parse(FEED_URL)
return [
(e.published, e.title, e.link)
for e in feed.entries
if bug_id in ((e.title or "") + "\n" + (e.summary or ""))
]
def find_github_commits(bug_id, repo):
params = {"q": f"repo:{repo} {bug_id}", "sort": "indexed", "order": "desc"}
r = requests.get(GITHUB_API, headers=GITHUB_HEADERS, params=params, timeout=10)
if r.status_code != 200:
return []
return [item["html_url"] for item in r.json().get("items", [])]
def find_gerrit_changes(bug_id, project):
q = f"message:{bug_id} project:{project} status:merged"
r = requests.get(GERRIT_BASE, params={"q": q, "o": "CURRENT_REVISION"}, timeout=10)
payload = r.text.lstrip(")]}'\n")
try:
changes = json.loads(payload)
except json.JSONDecodeError:
return []
return [
f"https://chromium.googlesource.com/{ch['project']}/+/{ch['current_revision']}"
for ch in changes
]
def extract_issue_title(html, bug_id):
pattern = r'b\.IssueFetchResponse",\[.*?\[null,' + bug_id + r',\[.*?,.*?,.*?,.*?,.*?,"(.*?)"'
match = re.search(pattern, html, re.DOTALL)
if match:
return match.group(1)
return None
def check_chromium_issue(bug_id):
url = f"{ISSUES_URL}{bug_id}"
r = requests.get(url, timeout=10)
if r.status_code == 200:
if "b.IssueFetchResponse" in r.text:
title = extract_issue_title(r.text, bug_id)
if title:
return f"Issue is accessible: {title}"
else:
return "Issue is accessible, but title not found"
else:
return "Access to this issue is restricted"
else:
return "Failed to retrieve issue information"
def extract_sha_from_github_url(url):
return url.rstrip("/").split("/")[-1]
def main(bug_id):
print(f"\n๐Ÿ” Stable Channel Updates for {bug_id}:")
hits = find_in_stable_updates(bug_id)
if hits:
for date, title, link in hits:
print(f"- {date}: {title}\n {link}")
else:
print("- No Stable Channel Update entry found.")
print(f"\n๐Ÿ” GitHub commits for {bug_id}:")
all_github = []
for repo in GITHUB_REPOS:
commits = find_github_commits(bug_id, repo)
if commits:
print(f"\nโ€” {repo}:")
for c in commits:
print(f" {c}")
all_github += commits
if not all_github:
print("- No GitHub commits found.")
print(f"\n๐Ÿ” Gerrit (googlesource) for {bug_id}:")
if all_github:
allowed_shas = {extract_sha_from_github_url(u) for u in all_github}
filtered = []
for proj in ("v8/v8", "chromium/src"):
urls = find_gerrit_changes(bug_id, proj)
filtered += [u for u in urls if u.split("/")[-1] in allowed_shas]
if filtered:
for u in sorted(set(filtered)):
print(f" {u}")
else:
print("- No matching Gerrit revisions found.")
else:
print("- Skipping Gerrit (no GitHub commits to match).")
print(f"\n๐Ÿ” Chromium Issue Tracker for {bug_id}:")
issue_status = check_chromium_issue(bug_id)
print(f"- {issue_status}")
if __name__ == "__main__":
main(BUG_ID)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment