Last active
August 13, 2024 07:13
-
-
Save zheng022/f0a916ea28568c085facb5e734dd07b7 to your computer and use it in GitHub Desktop.
map logs to github load test profile
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/bin/bash | |
| BUNDLE_PATH=$1 | |
| git_ssh_clone=$(zgrep 'proto=ssh.*cmd=git-upload-pack.*op done' $BUNDLE_PATH/babeld-logs/babeld.log.1.gz | grep -o 'ts=[^:]*' | uniq -c | sort -nr | head -n1 | awk '{printf "%.2f", $1/3600}') | |
| git_http_clone=$(zgrep 'proto=http.*cmd=git-upload-pack.*op done' $BUNDLE_PATH/babeld-logs/babeld.log.1.gz | grep -o 'ts=[^:]*' | uniq -c | sort -nr | head -n1 | awk '{printf "%.2f", $1/3600}') | |
| git_ssh_push=$(zgrep 'proto=ssh.*cmd=git-receive-pack.*op done' $BUNDLE_PATH/babeld-logs/babeld.log.1.gz | grep -o 'ts=[^:]*' | uniq -c | sort -nr | head -n1 | awk '{printf "%.2f", $1/60}') | |
| git_http_push=$(zgrep 'proto=http.*cmd=git-receive-pack.*op done' $BUNDLE_PATH/babeld-logs/babeld.log.1.gz | grep -o 'ts=[^:]*' | uniq -c | sort -nr | head -n1 | awk '{printf "%.2f", $1/60}') | |
| code_zip=$(grep -E 'GET .*/archive/.*.zip' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| code_load_raw=$(grep -E 'GET .*/raw/.*' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| comment_on_pr=$(grep -E 'POST .*/.*(/pull/[^/]+/comment)' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| direct_commit=$(grep -P 'POST .*/(?!pull/)[^/]+/create' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| create_branch_and_pr=$(grep -E 'POST .*/pull/create' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| webhook_get=$(grep -E 'GET .*/settings/hooks' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| webhook_post=$(grep -E 'POST .*/settings/hooks' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| issue_create=$(grep -E 'POST .*/issues' $BUNDLE_PATH/system-logs/haproxy.log | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| issue_comment=$(grep -E 'POST .*/issue_comments' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| issue_get=$(grep -E 'GET .*/issues' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| search_get=$(grep -E 'GET .*/search' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| create_repo=$(grep -E 'POST .*/repositories ' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| create_fork=$(grep -E 'POST .*/fork ' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| create_project=$(grep -E 'POST .*/projects ' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| get_commit=$(grep -E 'GET .*/commit' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| get_tree_commit=$(grep -E 'GET .*/tree-commit' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| get_file_list=$(grep -E 'GET .*/file-list' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| get_status=$(grep -E 'GET /status ' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| git_lfs_ssh=$(zgrep -E 'proto=ssh.*cmd="git-lfs-.*op done' $BUNDLE_PATH/babeld-logs/babeld.log.1.gz | grep -o 'ts=[^:]*' | uniq -c | sort -nr | head -n1 | awk '{printf "%.2f", $1/10}') | |
| git_lfs_http=$(zgrep -E 'proto=http.*cmd="git-lfs-.*op done' $BUNDLE_PATH/babeld-logs/babeld.log.1.gz | grep -o 'ts=[^:]*' | uniq -c | sort -nr | head -n1 | awk '{printf "%.2f", $1/10}') | |
| api_create_deployment=$(grep -E 'POST .*/api/v3/repos/.*/deployments' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| api_get_deployment=$(grep -E 'GET .*/api/v3/repos/.*/deployments' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| api_create_issue=$(grep -E 'POST .*/api/v3/repos/.*/issues ' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| api_comment_issue=$(grep -E 'POST .*/api/v3/repos/.*/issues/.*/comments ' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| api_update_issue=$(grep -E 'PATCH .*/api/v3/repos/.*/issues' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| api_get_issues=$(grep -E 'GET .*/api/v3/repos/.*/issues ' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| api_create_pr=$(grep -E 'POST .*/api/v3/repos/.*/pulls ' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| api_update_pr=$(grep -E 'PATCH .*/api/v3/repos/.*/pulls' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| api_merge_pr=$(grep -E 'PUT .*/api/v3/repos/.*/pulls/.*/merge ' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| api_get_repo=$(grep -E 'GET .*/api/v3/repos/.*/.* ' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/600}') | |
| api_search_issue=$(grep -E 'GET .*/api/v3/search/issues' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| api_search_user=$(grep -E 'GET .*/api/v3/search/users' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| api_search_repo=$(grep -E 'GET .*/api/v3/search/repositories' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| api_search_code=$(grep -E 'GET .*/api/v3/search/code' $BUNDLE_PATH/system-logs/haproxy.log.1 | cut -c 1-11 | uniq -c | sort -rn -k1 | head -n1 | awk '{printf "%.2f", $1/10}') | |
| cat << EOF | grep -v '^#' | |
| # noofhttpcloneusers: | |
| $git_http_clone/s | |
| # noofsshcloneusers: | |
| $git_ssh_clone/s | |
| # noofhttppushusers: | |
| $git_http_push/m | |
| # noofsshpushusers: | |
| $git_ssh_push/m | |
| # noofCLZusers: | |
| $code_zip/m | |
| # noofCLRusers: | |
| $code_load_raw/m | |
| # noofCPRusers: | |
| $comment_on_pr/m | |
| # noofCMRusers: | |
| $direct_commit/m | |
| # noofCBCPusers: | |
| $create_branch_and_pr/m | |
| # noofWHusers: | |
| create: $webhook_post/m + get: $webhook_get/m | |
| # noofCIusers: | |
| create: $issue_create/m + comment: $issue_comment/m + get: $issue_get/m | |
| # noofSRCHusers: | |
| $search_get/m | |
| # noofNRusers: | |
| $create_repo/m | |
| # noofFRusers: | |
| $create_fork/m | |
| # noofPRusers: | |
| $create_project/m | |
| # noofCHusers: | |
| commit: $get_commit/m + tree: $get_tree_commit/m + file: $get_file_list/m | |
| # noofStatususers: | |
| $get_status/m | |
| # nooflfshttpusers: | |
| $git_lfs_http/m | |
| # nooflfssshusers: | |
| $git_lfs_ssh/m | |
| # noofapideploymentssusers: | |
| create: $api_create_deployment/m + get: $api_get_deployment/m | |
| # noofapiissuesusers: | |
| create: $api_create_issue/m + comment: $api_comment_issue/m + update: $api_update_issue/m + get: $api_get_issues/m | |
| # noofapipullrequestsusers: | |
| create: $api_create_pr/m + update: $api_update_pr/m + merge: $api_merge_pr/m | |
| # noofapireposusers: | |
| $api_get_repo/s | |
| # noofapisearchusers: | |
| issue: $api_search_issue/m + user: $api_search_user/m + repo: $api_search_repo/m + code: $api_search_code/m | |
| EOF | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment