Last active
July 4, 2025 22:26
-
-
Save NotWadeGrimridge/7ba8fe7aa41f3e5b7501827b4db06525 to your computer and use it in GitHub Desktop.
Tweet from your terminal
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env -S uv --quiet run --script | |
| # /// script | |
| # requires-python = ">=3.13" | |
| # dependencies = [ | |
| # "beautifulsoup4", | |
| # "curl-cffi", | |
| # "pycryptodome", | |
| # "xclienttransaction", | |
| # ] | |
| # /// | |
| import asyncio | |
| import binascii | |
| import hashlib | |
| import json | |
| import os | |
| import sys | |
| import time | |
| from http.cookies import SimpleCookie | |
| from urllib.parse import urlparse | |
| from bs4 import BeautifulSoup | |
| from Crypto.Cipher import AES | |
| from Crypto.Random import get_random_bytes | |
| from curl_cffi.requests import AsyncSession | |
| from x_client_transaction import ClientTransaction | |
| from x_client_transaction.utils import get_ondemand_file_url, handle_x_migration_async | |
| def generate_xpff(base_key: str, plaintext: str, guest_id: str) -> str: | |
| combined = base_key + guest_id | |
| key = hashlib.sha256(combined.encode()).digest() | |
| nonce = get_random_bytes(12) | |
| cipher = AES.new(key, AES.MODE_GCM, nonce=nonce) | |
| ciphertext, tag = cipher.encrypt_and_digest(plaintext.encode()) | |
| return binascii.hexlify(nonce + ciphertext + tag).decode() | |
| def load_env(file_path=".env"): | |
| if not os.path.exists(file_path): | |
| return | |
| with open(file_path) as f: | |
| for line in f: | |
| if line.strip() and not line.startswith("#"): | |
| key, value = map(str.strip, line.split("=", 1)) | |
| os.environ.setdefault(key, value.strip("'\"")) | |
| async def get_httpbin_headers(session: AsyncSession) -> dict: | |
| headers_res = await session.get("https://httpbin.org/headers") | |
| headers_res.raise_for_status() | |
| return {k.lower(): v for k, v in headers_res.json()["headers"].items()} | |
| def generate_post_tokens(guest_id, user_agent, ct, post_url): | |
| base_key = "0e6be1f1e21ffc33590b888fd4dc81b19713e570e805d4e5df80a493c9571a05" | |
| created_at = int(time.time() * 1000) | |
| xpff_payload = { | |
| "navigator_properties": { | |
| "hasBeenActive": "true", | |
| "userAgent": user_agent, | |
| "webdriver": "false", | |
| }, | |
| "created_at": created_at, | |
| } | |
| xpff_plain = json.dumps(xpff_payload, separators=(",", ":")) | |
| encrypted_xpff = generate_xpff(base_key, xpff_plain, guest_id) | |
| path = urlparse(url=post_url).path | |
| tid = ct.generate_transaction_id(method="POST", path=path) | |
| return encrypted_xpff, tid | |
| def prepare_post_headers(httpbin_headers, encrypted_xpff, tid, csrf_token): | |
| base_headers = { | |
| "authorization": "Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA", | |
| "content-type": "application/json", | |
| "origin": "https://x.com", | |
| "referer": "https://x.com/compose/post", | |
| "sec-fetch-dest": "empty", | |
| "sec-fetch-mode": "cors", | |
| "sec-fetch-site": "same-origin", | |
| "x-twitter-active-user": "yes", | |
| "x-twitter-auth-type": "OAuth2Session", | |
| "x-twitter-client-language": "en", | |
| "x-xp-forwarded-for": encrypted_xpff, | |
| } | |
| headers_to_copy = [ | |
| "user-agent", | |
| "accept", | |
| "accept-language", | |
| "sec-ch-ua", | |
| "sec-ch-ua-mobile", | |
| "sec-ch-ua-platform", | |
| ] | |
| for h in headers_to_copy: | |
| if h in httpbin_headers: | |
| base_headers[h] = httpbin_headers[h] | |
| return base_headers | { | |
| "x-client-transaction-id": tid, | |
| "x-csrf-token": csrf_token, | |
| } | |
| def prepare_post_payload(tweet_text, query_id): | |
| payload = { | |
| "variables": { | |
| "tweet_text": tweet_text, | |
| "dark_request": False, | |
| "media": {"media_entities": [], "possibly_sensitive": False}, | |
| "semantic_annotation_ids": [], | |
| "disallowed_reply_options": None, | |
| }, | |
| "features": { | |
| "premium_content_api_read_enabled": False, | |
| "communities_web_enable_tweet_community_results_fetch": True, | |
| "c9s_tweet_anatomy_moderator_badge_enabled": True, | |
| "responsive_web_grok_analyze_button_fetch_trends_enabled": False, | |
| "responsive_web_grok_analyze_post_followups_enabled": False, | |
| "responsive_web_jetfuel_frame": False, | |
| "responsive_web_grok_share_attachment_enabled": True, | |
| "responsive_web_edit_tweet_api_enabled": True, | |
| "graphql_is_translatable_rweb_tweet_is_translatable_enabled": True, | |
| "view_counts_everywhere_api_enabled": True, | |
| "longform_notetweets_consumption_enabled": True, | |
| "responsive_web_twitter_article_tweet_consumption_enabled": True, | |
| "tweet_awards_web_tipping_enabled": False, | |
| "responsive_web_grok_show_grok_translated_post": False, | |
| "responsive_web_grok_analysis_button_from_backend": False, | |
| "creator_subscriptions_quote_tweet_preview_enabled": False, | |
| "longform_notetweets_rich_text_read_enabled": True, | |
| "longform_notetweets_inline_media_enabled": True, | |
| "payments_enabled": False, | |
| "profile_label_improvements_pcf_label_in_post_enabled": True, | |
| "rweb_tipjar_consumption_enabled": True, | |
| "verified_phone_label_enabled": False, | |
| "articles_preview_enabled": True, | |
| "responsive_web_graphql_skip_user_profile_image_extensions_enabled": False, | |
| "freedom_of_speech_not_reach_fetch_enabled": True, | |
| "standardized_nudges_misinfo": True, | |
| "tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled": True, | |
| "responsive_web_grok_image_annotation_enabled": True, | |
| "responsive_web_graphql_timeline_navigation_enabled": True, | |
| "responsive_web_enhance_cards_enabled": False, | |
| }, | |
| "queryId": query_id, | |
| } | |
| return payload | |
| async def get_client_transaction(session, x_cookie): | |
| home_page_response = await handle_x_migration_async(session=session) | |
| session_cookies = session.cookies.get_dict() | |
| cookie = SimpleCookie() | |
| cookie.load(x_cookie) | |
| env_cookies = {k: v.value for k, v in cookie.items()} | |
| session_cookies.update(env_cookies) | |
| session.cookies.clear() | |
| for k, v in session_cookies.items(): | |
| session.cookies.set(k, v, domain=".x.com") | |
| guest_id = session.cookies.get("guest_id") | |
| ondemand_file_url = get_ondemand_file_url(response=home_page_response) | |
| ondemand_file = await session.get(url=ondemand_file_url) | |
| ondemand_file_response = BeautifulSoup(ondemand_file.content, "html.parser") | |
| ct = ClientTransaction( | |
| home_page_response=home_page_response, | |
| ondemand_file_response=ondemand_file_response, | |
| ) | |
| return ct, guest_id | |
| async def main(): | |
| load_env() | |
| x_cookie_str = os.getenv("X_COOKIE") | |
| if not x_cookie_str: | |
| sys.exit("Error: X_COOKIE environment variable not set.") | |
| cookie = SimpleCookie() | |
| cookie.load(x_cookie_str) | |
| required_keys = ["auth_token", "ct0", "twid"] | |
| parsed_cookie = {key: morsel.value for key, morsel in cookie.items()} | |
| missing_keys = [key for key in required_keys if key not in parsed_cookie] | |
| if missing_keys: | |
| sys.exit( | |
| f"Error: Missing required cookies in X_COOKIE: {', '.join(missing_keys)}" | |
| ) | |
| csrf_token = parsed_cookie["ct0"] | |
| content = " ".join(sys.argv[1:]) | |
| if not content: | |
| sys.exit("Error: No content provided to post.") | |
| session = AsyncSession(impersonate="chrome") | |
| try: | |
| httpbin_headers = await get_httpbin_headers(session) | |
| ct, guest_id = await get_client_transaction( | |
| session=session, x_cookie=x_cookie_str | |
| ) | |
| if not guest_id: | |
| sys.exit("Error: Could not extract guest_id from session.") | |
| query_id = "LBFRMJBLzXkI-zdK3fCj1Q" | |
| post_url = f"https://x.com/i/api/graphql/{query_id}/CreateTweet" | |
| encrypted_xpff, tid = generate_post_tokens( | |
| guest_id, httpbin_headers["user-agent"], ct, post_url | |
| ) | |
| headers = prepare_post_headers(httpbin_headers, encrypted_xpff, tid, csrf_token) | |
| payload = prepare_post_payload(content, query_id) | |
| r = await session.post( | |
| url=post_url, | |
| headers=headers, | |
| json=payload, | |
| ) | |
| except Exception as e: | |
| sys.exit(f"An error occurred: {e}") | |
| finally: | |
| await session.close() | |
| if r.status_code != 200: | |
| sys.exit(f"Error: status code {r.status_code}. Response:\n{r.text}") | |
| try: | |
| resp = r.json() | |
| result = resp["data"]["create_tweet"]["tweet_results"]["result"] | |
| tweet_id = result["rest_id"] | |
| user_result = result["core"]["user_results"]["result"] | |
| screen_name = user_result["core"]["screen_name"] | |
| print(f"Posted: https://x.com/{screen_name}/status/{tweet_id}") | |
| except Exception: | |
| print("Could not extract tweet URL. Full response:") | |
| print(r.text) | |
| if __name__ == "__main__": | |
| asyncio.run(main()) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment