Last active
July 25, 2025 18:04
-
-
Save unixsysdev/8c8b58bd69d563f8a8ce81486627ad01 to your computer and use it in GitHub Desktop.
Guess what ! What ? Code Proxy - needs refinement
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env python3 | |
| """ | |
| Claude OAuth Proxy Server | |
| Translates OpenAI-compatible requests to Claude Code OAuth requests | |
| """ | |
| from flask import Flask, request, jsonify, Response | |
| import requests | |
| import json | |
| import time | |
| import os | |
| app = Flask(__name__) | |
| # Configuration | |
| CLAUDE_API_URL = "https://api.anthropic.com/v1/messages?beta=true" | |
| AUTH_TOKEN = os.getenv("CLAUDE_AUTH_TOKEN", "sk-ant-oat01-23213123-123123") | |
| USER_ID = "user_f8c0af8347042b0234234234234baf307b7db4602894ba089772db6f6ab_account_cad8bc23423423423409aa60b3a" | |
| # Common headers for all requests | |
| COMMON_HEADERS = { | |
| "host": "api.anthropic.com", | |
| "connection": "keep-alive", | |
| "Accept": "application/json", | |
| "X-Stainless-Retry-Count": "0", | |
| "X-Stainless-Timeout": "60", | |
| "X-Stainless-Lang": "js", | |
| "X-Stainless-Package-Version": "0.55.1", | |
| "X-Stainless-OS": "MacOS", | |
| "X-Stainless-Arch": "arm64", | |
| "X-Stainless-Runtime": "node", | |
| "X-Stainless-Runtime-Version": "v23.11.0", | |
| "anthropic-dangerous-direct-browser-access": "true", | |
| "anthropic-version": "2023-06-01", | |
| "authorization": f"Bearer {AUTH_TOKEN}", | |
| "x-app": "cli", | |
| "User-Agent": "claude-cli/1.0.60 (external, cli)", | |
| "content-type": "application/json", | |
| "accept-language": "*", | |
| "sec-fetch-mode": "cors", | |
| "accept-encoding": "gzip, deflate" | |
| } | |
| def make_claude_request(session_id, user_message, model="claude-opus-4-20250514", max_tokens=1000, stream=False): | |
| """Make the three-step Claude Code request sequence""" | |
| # Step 1: Quota check | |
| quota_headers = COMMON_HEADERS.copy() | |
| quota_headers["anthropic-beta"] = "oauth-2025-04-20,fine-grained-tool-streaming-2025-05-14" | |
| quota_data = { | |
| "model": "claude-3-5-haiku-20241022", | |
| "max_tokens": 1, | |
| "messages": [{"role": "user", "content": "quota"}], | |
| "metadata": {"user_id": f"{USER_ID}_session_{session_id}"} | |
| } | |
| print(f"[DEBUG] Making quota request...") | |
| quota_resp = requests.post(CLAUDE_API_URL, headers=quota_headers, json=quota_data) | |
| print(f"[DEBUG] Quota response: {quota_resp.status_code} - {quota_resp.text[:200]}...") | |
| if quota_resp.status_code != 200: | |
| return None, f"Quota check failed: {quota_resp.text}" | |
| time.sleep(0.1) # Small delay | |
| # Step 2: Topic analysis (optional, but mimics Claude Code) | |
| topic_headers = COMMON_HEADERS.copy() | |
| topic_headers["anthropic-beta"] = "oauth-2025-04-20,fine-grained-tool-streaming-2025-05-14" | |
| topic_headers["x-stainless-helper-method"] = "stream" | |
| topic_data = { | |
| "model": "claude-3-5-haiku-20241022", | |
| "max_tokens": 512, | |
| "messages": [{"role": "user", "content": user_message}], | |
| "system": [{ | |
| "type": "text", | |
| "text": "Analyze if this message indicates a new conversation topic. If it does, extract a 2-3 word title that captures the new topic. Format your response as a JSON object with two fields: 'isNewTopic' (boolean) and 'title' (string, or null if isNewTopic is false). Only include these fields, no other text." | |
| }], | |
| "temperature": 0, | |
| "metadata": {"user_id": f"{USER_ID}_session_{session_id}"}, | |
| "stream": True | |
| } | |
| print(f"[DEBUG] Making topic analysis request...") | |
| # We don't need to process this response, just make the request | |
| topic_resp = requests.post(CLAUDE_API_URL, headers=topic_headers, json=topic_data, stream=True) | |
| print(f"[DEBUG] Topic response: {topic_resp.status_code}") | |
| time.sleep(0.1) | |
| # Step 3: Main request | |
| main_headers = COMMON_HEADERS.copy() | |
| main_headers["anthropic-beta"] = "claude-code-20250219,oauth-2025-04-20,interleaved-thinking-2025-05-14,fine-grained-tool-streaming-2025-05-14" | |
| if stream: | |
| main_headers["x-stainless-helper-method"] = "stream" | |
| # Map OpenAI models to Claude models | |
| if "gpt-4" in model: | |
| claude_model = "claude-opus-4-20250514" | |
| elif "gpt-3.5" in model: | |
| claude_model = "claude-3-5-haiku-20241022" | |
| else: | |
| claude_model = model | |
| main_data = { | |
| "model": claude_model, | |
| "messages": [{"role": "user", "content": user_message}], | |
| "temperature": 1, | |
| "system": [{ | |
| "type": "text", | |
| "text": "You are Claude, an AI assistant created by Anthropic to be helpful, harmless, and honest." | |
| }], | |
| "metadata": {"user_id": f"{USER_ID}_session_{session_id}"}, | |
| "max_tokens": max_tokens, | |
| "stream": stream | |
| } | |
| print(f"[DEBUG] Making main request with model {claude_model}...") | |
| response = requests.post(CLAUDE_API_URL, headers=main_headers, json=main_data, stream=stream) | |
| print(f"[DEBUG] Main response: {response.status_code}") | |
| if not stream: | |
| print(f"[DEBUG] Main response body: {response.text[:500]}...") | |
| return response, None | |
| @app.route('/v1/chat/completions', methods=['POST']) | |
| def chat_completions(): | |
| """Handle OpenAI-compatible chat completion requests""" | |
| try: | |
| data = request.json | |
| # Extract parameters | |
| messages = data.get('messages', []) | |
| model = data.get('model', 'claude-opus-4-20250514') | |
| max_tokens = data.get('max_tokens', 1000) | |
| stream = data.get('stream', False) | |
| # Get the last user message | |
| user_message = "" | |
| for msg in reversed(messages): | |
| if msg.get('role') == 'user': | |
| user_message = msg.get('content', '') | |
| break | |
| if not user_message: | |
| return jsonify({"error": "No user message found"}), 400 | |
| # Generate a session ID (could be from headers or generated) | |
| session_id = request.headers.get('X-Session-Id', str(int(time.time()))) | |
| # Make the Claude request | |
| response, error = make_claude_request(session_id, user_message, model, max_tokens, stream) | |
| if error: | |
| return jsonify({"error": error}), 500 | |
| if stream: | |
| # Stream the response | |
| def generate(): | |
| for line in response.iter_lines(): | |
| if line: | |
| yield line + b'\n' | |
| return Response(generate(), mimetype='text/event-stream') | |
| else: | |
| # Return the full response | |
| claude_resp = response.json() | |
| print(f"[DEBUG] Claude response: {json.dumps(claude_resp, indent=2)}") | |
| # Check if it's an error response | |
| if "error" in claude_resp: | |
| return jsonify(claude_resp), 500 | |
| # Extract content - handle both single text and array format | |
| content = "" | |
| if "content" in claude_resp and claude_resp["content"]: | |
| if isinstance(claude_resp["content"], list): | |
| for item in claude_resp["content"]: | |
| if item.get("type") == "text": | |
| content += item.get("text", "") | |
| else: | |
| content = str(claude_resp["content"]) | |
| # Convert to OpenAI format | |
| openai_resp = { | |
| "id": claude_resp.get("id", ""), | |
| "object": "chat.completion", | |
| "created": int(time.time()), | |
| "model": model, | |
| "choices": [{ | |
| "index": 0, | |
| "message": { | |
| "role": "assistant", | |
| "content": content | |
| }, | |
| "finish_reason": "stop" | |
| }], | |
| "usage": { | |
| "prompt_tokens": claude_resp.get("usage", {}).get("input_tokens", 0), | |
| "completion_tokens": claude_resp.get("usage", {}).get("output_tokens", 0), | |
| "total_tokens": claude_resp.get("usage", {}).get("input_tokens", 0) + claude_resp.get("usage", {}).get("output_tokens", 0) | |
| } | |
| } | |
| return jsonify(openai_resp) | |
| except Exception as e: | |
| return jsonify({"error": str(e)}), 500 | |
| @app.route('/v1/models', methods=['GET']) | |
| def models(): | |
| """Return available models""" | |
| return jsonify({ | |
| "object": "list", | |
| "data": [ | |
| {"id": "claude-opus-4-20250514", "object": "model", "owned_by": "anthropic"}, | |
| {"id": "claude-3-5-haiku-20241022", "object": "model", "owned_by": "anthropic"}, | |
| {"id": "claude-sonnet-4-20250514", "object": "model", "owned_by": "anthropic"} | |
| ] | |
| }) | |
| @app.route('/health', methods=['GET']) | |
| def health(): | |
| """Health check endpoint""" | |
| return jsonify({"status": "ok", "auth_configured": bool(AUTH_TOKEN)}) | |
| if __name__ == '__main__': | |
| print(f"Claude OAuth Proxy starting on port 6000...") | |
| print(f"Auth token configured: {bool(AUTH_TOKEN)}") | |
| app.run(host='0.0.0.0', port=6000, debug=True) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment