|
#!/bin/bash |
|
|
|
# List PRs merged/authored by user in a GitHub repo, grouped by day |
|
# Usage: ./list-merged-prs.sh [--force] [pages] |
|
|
|
# Parse arguments |
|
FORCE=false |
|
MAX_PAGES=10 |
|
for arg in "$@"; do |
|
case $arg in |
|
--force|-f) FORCE=true ;; |
|
[0-9]*) MAX_PAGES=$arg ;; |
|
esac |
|
done |
|
|
|
REPO_OWNER="langgenius" |
|
REPO_NAME="dify" |
|
USER="hyoban" |
|
OUTPUT_FILE="1-merged-prs.md" |
|
OUTPUT_FILE_AUTHORED="2-authored-prs.md" |
|
OUTPUT_FILE_ISSUES="3-issues.md" |
|
GIST_ID_FILE=".gist-id" |
|
GIST_DESCRIPTION="$USER's activity in $REPO_OWNER/$REPO_NAME" |
|
|
|
# === Utility Functions === |
|
|
|
# Append new results to existing results |
|
append_results() { |
|
local new="$1" existing="$2" |
|
if [ -n "$new" ]; then |
|
if [ -n "$existing" ]; then |
|
echo "$existing"$'\n'"$new" |
|
else |
|
echo "$new" |
|
fi |
|
else |
|
echo "$existing" |
|
fi |
|
} |
|
|
|
# Filter results by date, returns "STOP|filtered_results" or "CONTINUE|filtered_results" |
|
filter_by_date() { |
|
local results="$1" last_date="$2" |
|
[ -z "$last_date" ] || [ -z "$results" ] && { echo "CONTINUE|$results"; return; } |
|
|
|
local filtered="" line date stopped="CONTINUE" |
|
while IFS= read -r line; do |
|
date="${line%%|*}" |
|
if [ "$date" \< "$last_date" ] || [ "$date" = "$last_date" ]; then |
|
stopped="STOP" |
|
break |
|
fi |
|
filtered=$(append_results "$line" "$filtered") |
|
done <<< "$results" |
|
echo "$stopped|$filtered" |
|
} |
|
|
|
# Write handled PR markdown file with state and author |
|
write_handled_pr_file() { |
|
local title="$1" output_file="$2" new_data="$3" old_data="$4" |
|
{ |
|
echo "# $title" |
|
echo "" |
|
echo "Last updated: $(date '+%Y-%m-%d %H:%M:%S')" |
|
echo "" |
|
|
|
if [ -n "$new_data" ]; then |
|
local current_date="" date datetime pr_num state author item_title url state_icon |
|
while IFS='|' read -r date datetime pr_num state author item_title url; do |
|
if [ "$date" != "$current_date" ]; then |
|
[ -n "$current_date" ] && echo "" |
|
echo "## $date" |
|
echo "" |
|
current_date="$date" |
|
fi |
|
if [ "$state" = "MERGED" ]; then |
|
state_icon="🟢" |
|
else |
|
state_icon="🔴" |
|
fi |
|
echo "- $state_icon [#$pr_num]($url): $item_title (by [@$author](https://github.com/$author))" |
|
done <<< "$new_data" |
|
fi |
|
|
|
if [ -n "$old_data" ]; then |
|
echo "" |
|
echo "$old_data" |
|
fi |
|
} > "$output_file" |
|
} |
|
|
|
# Write authored PR markdown file with header and grouped by day |
|
write_authored_pr_file() { |
|
local title="$1" output_file="$2" new_data="$3" old_data="$4" |
|
{ |
|
echo "# $title" |
|
echo "" |
|
echo "Last updated: $(date '+%Y-%m-%d %H:%M:%S')" |
|
echo "" |
|
|
|
if [ -n "$new_data" ]; then |
|
local current_date="" date datetime pr_num item_title url |
|
while IFS='|' read -r date datetime pr_num item_title url; do |
|
if [ "$date" != "$current_date" ]; then |
|
[ -n "$current_date" ] && echo "" |
|
echo "## $date" |
|
echo "" |
|
current_date="$date" |
|
fi |
|
echo "- [#$pr_num]($url): $item_title" |
|
done <<< "$new_data" |
|
fi |
|
|
|
if [ -n "$old_data" ]; then |
|
echo "" |
|
echo "$old_data" |
|
fi |
|
} > "$output_file" |
|
} |
|
|
|
# Fetch from gist and extract last date |
|
fetch_gist_file() { |
|
local gist_id="$1" filename="$2" output_file="$3" |
|
local content |
|
content=$(gh gist view "$gist_id" --filename "$filename" --raw 2>/dev/null) |
|
if [ -n "$content" ]; then |
|
echo "$content" > "$output_file" |
|
echo "$content" | grep -m1 '^## ' | sed 's/^## //' | grep -v '^Open$' |
|
fi |
|
} |
|
|
|
# === Main Script === |
|
|
|
# Read gist ID |
|
GIST_ID="" |
|
[ -f "$GIST_ID_FILE" ] && GIST_ID=$(cat "$GIST_ID_FILE") |
|
|
|
# Fetch previous content from gist |
|
LAST_DATE="" |
|
LAST_DATE_AUTHORED="" |
|
if [ -n "$GIST_ID" ] && [ "$FORCE" = false ]; then |
|
echo "Fetching previous data from gist..." |
|
LAST_DATE=$(fetch_gist_file "$GIST_ID" "$OUTPUT_FILE" "$OUTPUT_FILE") |
|
LAST_DATE_AUTHORED=$(fetch_gist_file "$GIST_ID" "$OUTPUT_FILE_AUTHORED" "$OUTPUT_FILE_AUTHORED") |
|
fetch_gist_file "$GIST_ID" "$OUTPUT_FILE_ISSUES" "$OUTPUT_FILE_ISSUES" >/dev/null |
|
|
|
[ -n "$LAST_DATE" ] && echo "Last fetched date (merged): $LAST_DATE" |
|
[ -n "$LAST_DATE_AUTHORED" ] && echo "Last fetched date (authored): $LAST_DATE_AUTHORED" |
|
elif [ "$FORCE" = true ]; then |
|
echo "Force mode: ignoring cache..." |
|
fi |
|
|
|
echo "Querying PRs by $USER in $REPO_OWNER/$REPO_NAME (max $MAX_PAGES pages)..." |
|
|
|
# GraphQL query for PRs (MERGED and CLOSED) |
|
pr_query=' |
|
query($owner: String!, $name: String!, $cursor: String) { |
|
repository(owner: $owner, name: $name) { |
|
pullRequests(states: [MERGED, CLOSED], first: 100, after: $cursor, orderBy: {field: UPDATED_AT, direction: DESC}) { |
|
pageInfo { hasNextPage endCursor } |
|
nodes { |
|
number title mergedAt closedAt state url |
|
mergedBy { login } |
|
author { login } |
|
timelineItems(itemTypes: [CLOSED_EVENT], last: 1) { |
|
nodes { |
|
... on ClosedEvent { |
|
actor { login } |
|
} |
|
} |
|
} |
|
} |
|
} |
|
} |
|
}' |
|
|
|
# Fetch PRs with pagination |
|
all_merged="" |
|
all_authored="" |
|
cursor="" |
|
page=0 |
|
should_stop_merged=false |
|
should_stop_authored=false |
|
|
|
while [ $page -lt $MAX_PAGES ]; do |
|
[ "$should_stop_merged" = true ] && [ "$should_stop_authored" = true ] && { |
|
echo -ne "\rReached previously fetched dates, stopping early..." |
|
break |
|
} |
|
|
|
page=$((page + 1)) |
|
echo -ne "\rFetching page $page..." |
|
|
|
response=$(gh api graphql -f query="$pr_query" -F owner="$REPO_OWNER" -F name="$REPO_NAME" -F cursor="$cursor" 2>/dev/null) |
|
|
|
# Extract PRs handled by user (merged or closed by user, exclude self-authored) |
|
# Format: date|datetime|number|state|author|title|url |
|
page_merged=$(echo "$response" | jq -r --arg user "$USER" ' |
|
.data.repository.pullRequests.nodes[] | |
|
select(.author.login != $user) | |
|
select( |
|
(.state == "MERGED" and .mergedBy.login == $user) or |
|
(.state == "CLOSED" and .timelineItems.nodes[0].actor.login == $user) |
|
) | |
|
(if .state == "MERGED" then .mergedAt else .closedAt end) as $date | |
|
"\($date | split("T")[0])|\($date)|\(.number)|\(.state)|\(.author.login)|\(.title)|\(.url)" |
|
') |
|
|
|
# Extract authored PRs (only merged ones) |
|
page_authored=$(echo "$response" | jq -r --arg user "$USER" ' |
|
.data.repository.pullRequests.nodes[] | |
|
select(.author.login == $user and .state == "MERGED") | |
|
"\(.mergedAt | split("T")[0])|\(.mergedAt)|\(.number)|\(.title)|\(.url)" |
|
') |
|
|
|
# Filter and append merged PRs |
|
filter_result=$(filter_by_date "$page_merged" "$LAST_DATE") |
|
[ "${filter_result%%|*}" = "STOP" ] && should_stop_merged=true |
|
page_merged="${filter_result#*|}" |
|
all_merged=$(append_results "$page_merged" "$all_merged") |
|
|
|
# Filter and append authored PRs |
|
filter_result=$(filter_by_date "$page_authored" "$LAST_DATE_AUTHORED") |
|
[ "${filter_result%%|*}" = "STOP" ] && should_stop_authored=true |
|
page_authored="${filter_result#*|}" |
|
all_authored=$(append_results "$page_authored" "$all_authored") |
|
|
|
# Check for next page |
|
has_next=$(echo "$response" | jq -r '.data.repository.pullRequests.pageInfo.hasNextPage') |
|
[ "$has_next" != "true" ] && break |
|
cursor=$(echo "$response" | jq -r '.data.repository.pullRequests.pageInfo.endCursor') |
|
done |
|
|
|
echo -e "\r " |
|
|
|
# Fetch issues |
|
echo "Fetching issues..." |
|
issues_query=' |
|
query($owner: String!, $name: String!, $author: String!, $cursor: String) { |
|
repository(owner: $owner, name: $name) { |
|
issues(first: 100, after: $cursor, filterBy: {createdBy: $author}, orderBy: {field: CREATED_AT, direction: DESC}) { |
|
pageInfo { hasNextPage endCursor } |
|
nodes { number title state closedAt url } |
|
} |
|
} |
|
}' |
|
|
|
all_issues="" |
|
cursor="" |
|
page=0 |
|
while [ $page -lt 5 ]; do |
|
page=$((page + 1)) |
|
response=$(gh api graphql -f query="$issues_query" -F owner="$REPO_OWNER" -F name="$REPO_NAME" -F author="$USER" -F cursor="$cursor" 2>/dev/null) |
|
|
|
page_issues=$(echo "$response" | jq -r '.data.repository.issues.nodes[] | "\(.state)|\(.closedAt // "")|\(.number)|\(.title)|\(.url)"') |
|
all_issues=$(append_results "$page_issues" "$all_issues") |
|
|
|
has_next=$(echo "$response" | jq -r '.data.repository.issues.pageInfo.hasNextPage') |
|
[ "$has_next" != "true" ] && break |
|
cursor=$(echo "$response" | jq -r '.data.repository.issues.pageInfo.endCursor') |
|
done |
|
|
|
# Read old content for appending (skip if force mode) |
|
old_merged="" |
|
old_authored="" |
|
if [ "$FORCE" = false ]; then |
|
[ -f "$OUTPUT_FILE" ] && [ -n "$LAST_DATE" ] && old_merged=$(sed -n '/^## /,$p' "$OUTPUT_FILE") |
|
[ -f "$OUTPUT_FILE_AUTHORED" ] && [ -n "$LAST_DATE_AUTHORED" ] && old_authored=$(sed -n '/^## /,$p' "$OUTPUT_FILE_AUTHORED") |
|
fi |
|
|
|
# Sort and deduplicate |
|
[ -n "$all_merged" ] && all_merged=$(echo "$all_merged" | sort -t'|' -k2,2r | awk -F'|' '!seen[$3]++') |
|
[ -n "$all_authored" ] && all_authored=$(echo "$all_authored" | sort -t'|' -k2,2r | awk -F'|' '!seen[$3]++') |
|
|
|
# Write output files |
|
write_handled_pr_file "PRs handled by $USER in $REPO_OWNER/$REPO_NAME" "$OUTPUT_FILE" "$all_merged" "$old_merged" |
|
write_authored_pr_file "PRs authored by $USER in $REPO_OWNER/$REPO_NAME" "$OUTPUT_FILE_AUTHORED" "$all_authored" "$old_authored" |
|
|
|
# Write issues file |
|
{ |
|
echo "# Issues created by $USER in $REPO_OWNER/$REPO_NAME" |
|
echo "" |
|
echo "Last updated: $(date '+%Y-%m-%d %H:%M:%S')" |
|
echo "" |
|
|
|
if [ -n "$all_issues" ]; then |
|
open_issues=$(echo "$all_issues" | grep '^OPEN|') |
|
closed_issues=$(echo "$all_issues" | grep '^CLOSED|' | sort -t'|' -k2,2r) |
|
|
|
if [ -n "$open_issues" ]; then |
|
echo "## Open" |
|
echo "" |
|
while IFS='|' read -r _ _ num title url; do |
|
echo "- [#$num]($url): $title" |
|
done <<< "$open_issues" |
|
echo "" |
|
fi |
|
|
|
if [ -n "$closed_issues" ]; then |
|
current_date="" |
|
while IFS='|' read -r _ closed_at num title url; do |
|
date_only=${closed_at%%T*} |
|
if [ "$date_only" != "$current_date" ]; then |
|
[ -n "$current_date" ] && echo "" |
|
echo "## $date_only" |
|
echo "" |
|
current_date="$date_only" |
|
fi |
|
echo "- [#$num]($url): $title" |
|
done <<< "$closed_issues" |
|
fi |
|
fi |
|
} > "$OUTPUT_FILE_ISSUES" |
|
|
|
# Statistics |
|
new_count=$([ -n "$all_merged" ] && echo "$all_merged" | wc -l | tr -d ' ' || echo 0) |
|
total_count=$(grep -c '^\- \[#' "$OUTPUT_FILE") |
|
|
|
# Upload to gist |
|
SCRIPT_NAME="$(basename "$0")" |
|
echo "Uploading to gist..." |
|
if [ -z "$GIST_ID" ]; then |
|
gist_url=$(gh gist create "$OUTPUT_FILE" "$OUTPUT_FILE_AUTHORED" "$OUTPUT_FILE_ISSUES" "$0" --desc "$GIST_DESCRIPTION" --public 2>&1) |
|
if [ $? -eq 0 ]; then |
|
GIST_ID=$(echo "$gist_url" | grep -oE '[a-f0-9]{32}') |
|
echo "$GIST_ID" > "$GIST_ID_FILE" |
|
echo "Created new gist: $gist_url" |
|
else |
|
echo "Failed to create gist: $gist_url" |
|
fi |
|
else |
|
if gh api --method PATCH "gists/$GIST_ID" \ |
|
-f "files[$OUTPUT_FILE][content]=$(<"$OUTPUT_FILE")" \ |
|
-f "files[$OUTPUT_FILE_AUTHORED][content]=$(<"$OUTPUT_FILE_AUTHORED")" \ |
|
-f "files[$OUTPUT_FILE_ISSUES][content]=$(<"$OUTPUT_FILE_ISSUES")" \ |
|
-f "files[$SCRIPT_NAME][content]=$(<"$0")" \ |
|
>/dev/null 2>&1; then |
|
echo "Updated gist: https://gist.github.com/$GIST_ID" |
|
else |
|
echo "Failed to update gist" |
|
fi |
|
fi |
|
|
|
echo "" |
|
echo "New PRs: $new_count" |
|
echo "Total: $total_count PRs" |