Skip to content

Instantly share code, notes, and snippets.

@prisonerr627
Last active October 13, 2025 10:34
Show Gist options
  • Select an option

  • Save prisonerr627/ef23d6a8db3eb01ab0b78121471aecc6 to your computer and use it in GitHub Desktop.

Select an option

Save prisonerr627/ef23d6a8db3eb01ab0b78121471aecc6 to your computer and use it in GitHub Desktop.
bug bounty script
#! /bin/bash
# Changed line here: This single line performs the check and exits on failure.
[ -f "subs.txt" ] || { echo "Error: subs.txt not found. Exiting." >&2; exit 1; }
# Check alive hosts by httpx ||| Screenshot not working
# httpx -list subs.txt -json -status-code -title -tech-detect -server -content-length -retries 2 -rate-limit 150 -timeout 10 -follow-redirects -match-code 100,101,200,201,202,203,204,205,206,207,208,226,300,301,302,303,304,305,307,308,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,421,422,423,424,426,428,429,431,451,500,501,502,503,504,505,506,507,508,510,511 -o httpx-output.json && cat httpx-output.json | jq -r '.url' > live-subs.txt
httpx -list subs.txt -retries 2 -rate-limit 150 -timeout 10 -follow-redirects -match-code 100,101,200,201,202,203,204,205,206,207,208,226,300,301,302,303,304,305,307,308,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,421,422,423,424,426,428,429,431,451,500,501,502,503,504,505,506,507,508,510,511 -o live-subs.txt
# Find urls from by katana/crawl/js
# katana -list live-subs.txt -no-scope -depth 5 -concurrency 15 -parallelism 15 -js-crawl -jsluice -crawl-duration 10 -known-files all -max-response-size 4194304 -timeout 30 -retry 2 -field qurl -output katana-urls.txt -v
katana -list live-subs.txt -depth 5 -concurrency 15 -parallelism 15 -js-crawl -jsluice -crawl-duration 30 -timeout 40 -retry 2 -output katana-urls.txt -v && cat katana-urls.txt | awk '{print $NF}' > 2katana-urls.txt
# Find urls from wayback || check the wayback machines are live or not
cat live-subs.txt | waybackurls > waybackurls-urls.txt
# Merging all urls
cat 2katana-urls.txt waybackurls-urls.txt > all-urls.txt
#uniq all-urls.txt > uniq-all-urls.txt
sort -u all-urls.txt > uniq-all-urls.txt
# Go to all wayback urls collect content-length && Response code && title
httpx -list all-urls.txt -no-color -retries 2 -rate-limit 150 -timeout 30 -match-code 100,101,200,201,202,203,204,205,206,207,208,226,300,301,302,303,304,305,307,308,400,401,402,403,404,405,406,407,408,409,410,411,412,413,414,415,416,417,418,421,422,423,424,426,428,429,431,451,500,501,502,503,504,505,506,507,508,510,511 -sc -cl -o httpx-uniq-all-urls.txt
#Check for subdomain takeover
nuclei -l subs.txt -t ~/nuclei-templates/takeovers/
comm -3 <(awk '{sub(/^https?:\/\//,""); print $1}' httpx-uniq-all-urls.txt | sort -u) <(awk '{sub(/^https?:\/\//,""); print $1}' uniq-all-urls.txt | sort -u) >> httpx-uniq-all-urls.txt
#grep -v -E '\.(png|jpg|jpeg|gif|svg|ico|css) ' httpx-uniq-all-urls.txt
grep -v -E '\.(png|jpg|jpeg|gif|svg|ico|ttf|css)(\?|\s|$)' httpx-uniq-all-urls.txt
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment