Skip to content

Instantly share code, notes, and snippets.

@legel
Last active October 13, 2025 01:59
Show Gist options
  • Select an option

  • Save legel/ebd0bbc012bf019a1db5212b825e7d16 to your computer and use it in GitHub Desktop.

Select an option

Save legel/ebd0bbc012bf019a1db5212b825e7d16 to your computer and use it in GitHub Desktop.
Did Cursor crash? Did you lose your Cursor chat history? Need to export your data? This script was generated by Cursor to export your chat history in .md format from its potential location on a local SQL server. This can be fed back into a new Cursor chat as Context. Happy vibe coding.
#!/usr/bin/env python3
import sqlite3
import json
from pathlib import Path
from datetime import datetime
import os
import base64
def extract_conversations_from_db(db_path):
"""Extract all conversations from the database with detailed error handling."""
try:
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
# Get all composer data entries
cursor.execute("SELECT key, value FROM cursorDiskKV WHERE key LIKE 'composerData:%'")
rows = cursor.fetchall()
conversations = []
for key, value in rows:
try:
# Try different methods to decode the value
try:
data = json.loads(value)
except json.JSONDecodeError:
try:
# Try decoding as base64
decoded = base64.b64decode(value)
data = json.loads(decoded)
except:
print(f"Failed to decode data for key {key}")
continue
# Only include conversations with messages
if data.get('conversation') and len(data['conversation']) > 0:
conversations.append({
'id': key.split(':')[1],
'data': data
})
except Exception as e:
print(f"Error processing conversation {key}: {e}")
conn.close()
return conversations
except Exception as e:
print(f"Error accessing database {db_path}: {e}")
return []
def format_message(msg):
"""Format a single message with all its components."""
if not isinstance(msg, dict):
return "Invalid message format"
formatted = []
# Add timestamp and role
timestamp = datetime.fromtimestamp(
msg.get('timingInfo', {}).get('clientStartTime', 0) / 1000
).strftime("%Y-%m-%d %H:%M:%S")
role = "User" if msg.get('type') == 1 else "Assistant"
formatted.append(f"**{role}** ({timestamp}):")
# Add message text
text = msg.get('text', '').strip()
if text:
formatted.append(text)
# Add code blocks
if 'codeBlocks' in msg:
for block in msg['codeBlocks']:
lang = block.get('language', '')
code = block.get('code', '')
if code:
formatted.append(f"```{lang}\n{code}\n```")
# Add file actions
if 'fileActions' in msg:
formatted.append("\n**File Actions:**")
for action in msg['fileActions']:
formatted.append(f"- {action['type']}: {action.get('path', 'unknown path')}")
if 'content' in action:
formatted.append(f"```\n{action['content']}\n```")
return "\n\n".join(formatted)
def main():
"""Main function to export all chat history."""
cursor_dir = Path(os.path.expanduser("~")) / "Library/Application Support/Cursor"
global_db = cursor_dir / "User/globalStorage/state.vscdb"
print(f"Extracting conversations from {global_db}")
conversations = extract_conversations_from_db(global_db)
# Sort conversations by timestamp
conversations.sort(
key=lambda x: x['data']['conversation'][0].get('timingInfo', {}).get('clientStartTime', 0)
if x['data'].get('conversation') else 0
)
# Generate markdown
output = ["# Cursor Chat History\n"]
for conv in conversations:
first_msg_time = datetime.fromtimestamp(
conv['data']['conversation'][0].get('timingInfo', {}).get('clientStartTime', 0) / 1000
).strftime("%Y-%m-%d %H:%M:%S")
output.append(f"## Conversation {conv['id']}\n")
output.append(f"*Started at: {first_msg_time}*\n")
for msg in conv['data']['conversation']:
output.append(format_message(msg))
output.append("\n---\n")
output.append("\n")
# Write to file
output_dir = Path("chat_history_exports")
output_dir.mkdir(exist_ok=True)
output_file = output_dir / f"cursor_chat_history_{datetime.now().strftime('%Y%m%d_%H%M%S')}.md"
with open(output_file, 'w', encoding='utf-8') as f:
f.write("\n".join(output))
print(f"Exported {len(conversations)} conversations to {output_file}")
if __name__ == "__main__":
main()
@pathikspec
Copy link

I get the following error @legel

➜  ~ python3 cursor_chat.py                                                   
Extracting conversations from /Users/apple/Library/Application Support/Cursor/User/globalStorage/state.vscdb
Error processing conversation composerData:921fb13e-9b45-4ca3-a90e-9e24a031a940: the JSON object must be str, bytes or bytearray, not NoneType
Error processing conversation composerData:fa779ae7-81de-42e9-b265-65b873fca82d: the JSON object must be str, bytes or bytearray, not NoneType
Error processing conversation composerData:4e8afc4e-89d1-48ee-895d-1bb3c1992a38: the JSON object must be str, bytes or bytearray, not NoneType
Error processing conversation composerData:dda7ad35-0a5e-4c95-9416-1990ea191cd5: the JSON object must be str, bytes or bytearray, not NoneType
Error processing conversation composerData:e9592fbb-ccde-4a61-bd1b-60cd02733d47: the JSON object must be str, bytes or bytearray, not NoneType
Exported 0 conversations to chat_history_exports/cursor_chat_history_20250508_124742.md

@Xinihiko
Copy link

Try this @pathikspec if you still need that

#!/usr/bin/env python3
import sqlite3
import json
from pathlib import Path
from datetime import datetime
import os
import base64

def extract_conversations_from_db(db_path):
    """Extract all conversations from the database with detailed error handling."""
    try:
        conn = sqlite3.connect(db_path)
        cursor = conn.cursor()
        
        
        # Get all bubbleId entries which contain the actual chat messages
        cursor.execute("SELECT key, value FROM cursorDiskKV WHERE key LIKE 'bubbleId:%'")
        bubble_rows = cursor.fetchall()
        print(f"Found {len(bubble_rows)} bubbleId entries")
        
        # Group messages by composer ID
        conversations_by_composer = {}
        
        for key, value in bubble_rows:
            if value is None:
                continue
                
            try:
                # Parse the key to extract composer ID and bubble ID
                # Format: bubbleId:composerId:bubbleId
                parts = key.split(':')
                if len(parts) >= 3:
                    composer_id = parts[1]
                    bubble_id = parts[2]
                    
                    # Decode the message data
                    try:
                        data = json.loads(value)
                    except json.JSONDecodeError:
                        try:
                            decoded = base64.b64decode(value)
                            data = json.loads(decoded)
                        except:
                            continue
                    
                    # Only include bubbles that have text content
                    if isinstance(data, dict) and 'text' in data and data['text'].strip():
                        # Initialize composer conversation if not exists
                        if composer_id not in conversations_by_composer:
                            conversations_by_composer[composer_id] = []
                        
                        # Add message to conversation
                        conversations_by_composer[composer_id].append({
                            'id': bubble_id,
                            'data': data,
                            'key': key
                        })
                    
            except Exception as e:
                print(f"Error processing bubble {key}: {e}")
        
        # Convert to conversations list
        conversations = []
        for composer_id, messages in conversations_by_composer.items():
            if messages:  # Only include composers with messages
                conversations.append({
                    'id': composer_id,
                    'messages': messages
                })
        
        print(f"Found {len(conversations)} conversations with messages")
        
        conn.close()
        return conversations
    except Exception as e:
        print(f"Error accessing database {db_path}: {e}")
        return []

def format_message(msg_data):
    """Format a single message with all its components."""
    if not isinstance(msg_data, dict) or 'data' not in msg_data:
        return "Invalid message format"
    
    msg = msg_data['data']
    formatted = []
    
    # Add timestamp if available
    timestamp_str = ""
    if 'timingInfo' in msg and 'clientStartTime' in msg['timingInfo']:
        timestamp = datetime.fromtimestamp(
            msg['timingInfo']['clientStartTime'] / 1000
        ).strftime("%Y-%m-%d %H:%M:%S")
        timestamp_str = f" ({timestamp})"
    
    # Determine message role based on type
    msg_type = msg.get('type', 0)
    if msg_type == 1:  # User message
        role = "User"
    elif msg_type == 2:  # Assistant message
        role = "Assistant"
    else:
        role = "Message"
    
    formatted.append(f"**{role}**{timestamp_str}:")
    
    # Add the main text content
    text = msg.get('text', '').strip()
    if text:
        formatted.append(text)
    
    # Add code blocks if present
    if 'codeBlocks' in msg and msg['codeBlocks']:
        for block in msg['codeBlocks']:
            if isinstance(block, dict):
                lang = block.get('language', '')
                code = block.get('code', '')
                if code:
                    formatted.append(f"```{lang}\n{code}\n```")
    
    # Add tool results if present
    if 'toolResults' in msg and msg['toolResults']:
        formatted.append("\n**Tool Results:**")
        for result in msg['toolResults']:
            if isinstance(result, dict):
                tool_name = result.get('toolName', 'Unknown Tool')
                formatted.append(f"- {tool_name}")
                if 'result' in result:
                    formatted.append(f"  Result: {result['result']}")
    
    return "\n\n".join(formatted) if formatted else "Empty message"

def main():
    """Main function to export all chat history."""
    cursor_dir = Path(os.path.expanduser("~")) / "Library/Application Support/Cursor"
    global_db = cursor_dir / "User/globalStorage/state.vscdb"

    print("Extracting conversations from database...")

    print(f"Extracting conversations from {global_db}")
    conversations = extract_conversations_from_db(global_db)
    
    # Sort conversations by first message timestamp (if available)
    def get_conversation_timestamp(conv):
        if conv['messages']:
            # Try to find a timestamp in the first message
            first_msg = conv['messages'][0]['data']
            if isinstance(first_msg, dict) and 'timingInfo' in first_msg:
                timing = first_msg['timingInfo']
                if 'clientStartTime' in timing:
                    return timing['clientStartTime']
        return 0
    
    conversations.sort(key=get_conversation_timestamp)

    print("Generating markdown...")
    
    # Generate markdown
    output = ["# Cursor Chat History\n"]
    for conv in conversations:
        output.append(f"## Conversation {conv['id']}\n")
        output.append(f"*Messages: {len(conv['messages'])}*\n")
        
        # Sort messages within conversation by timestamp
        def get_message_timestamp(msg):
            if 'data' in msg and 'timingInfo' in msg['data']:
                timing = msg['data']['timingInfo']
                if 'clientStartTime' in timing:
                    return timing['clientStartTime']
            return 0
        
        sorted_messages = sorted(conv['messages'], key=get_message_timestamp)
        
        for msg in sorted_messages:
            output.append(format_message(msg))
            output.append("\n---\n")
        
        output.append("\n")
    
    # Write to file
    output_dir = Path("chat_history_exports")
    output_dir.mkdir(exist_ok=True)
    output_file = output_dir / f"cursor_chat_history_{datetime.now().strftime('%Y%m%d_%H%M%S')}.md"
    
    with open(output_file, 'w', encoding='utf-8') as f:
        f.write("\n".join(output))
    
    print(f"Exported {len(conversations)} conversations to {output_file}")

if __name__ == "__main__":
    main()

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment