Hi! @Ben_Hutchison
Yes, I’ve cleaned up the script into a universal version that should work for anyone. Here it is:
How to use:
- Completely quit Antigravity (Cmd+Q on macOS, not just close the window)
- Save the script below as
fix_antigravity_history.py
- Run in your system terminal:
python3 fix_antigravity_history.py
- Reopen Antigravity — your recovered conversations should appear in Chat History
The script will:
Automatically detect your OS and find the correct database path (macOS/Linux/Windows)
Scan all .pb files in ~/.gemini/antigravity/conversations/
Compare against the trajectorySummaries index in the SQLite database
Show you exactly which conversations are missing
Ask for confirmation before making any changes
Back up your database before modifying anything
Validate the protobuf format after the repair
Important: Make sure Antigravity is completely closed before running the script. If it’s still running, the app will overwrite your changes when it exits.
If anything goes wrong, the script prints a restore command you can use to revert to the backup.
I just tested it on my machine — it found and recovered 4 new missing conversations that had disappeared since my last fix. So this bug is definitely still happening with every workspace-less conversation.
#!/usr/bin/env python3
"""
Antigravity Chat History Index Repair Tool (Universal Version)
Bug: Conversations created without a workspace are not written to the
trajectorySummaries index, making them invisible in the Chat History UI.
The conversation .pb files are fully saved on disk — only the index is missing.
This script automatically detects and repairs the missing index entries.
Related forum post:
https://discuss.ai.google.dev/t/bug-conversations-created-without-a-workspace-silently-disappear-from-chat-history-data-saved-but-index-not-updated/135008
Usage:
1. Completely quit Antigravity (Cmd+Q on macOS)
2. Run in system terminal: python3 fix_antigravity_history.py
3. Reopen Antigravity
"""
import base64
import sqlite3
import os
import re
import shutil
import subprocess
import sys
import time
import datetime
import platform
# ===== Low-level Protobuf Encoding Utilities =====
def encode_varint(value):
"""Encode a varint (variable-length integer encoding)."""
result = []
while value > 0x7f:
result.append((value & 0x7f) | 0x80)
value >>= 7
result.append(value & 0x7f)
return bytes(result)
def encode_field_varint(field_number, value):
"""Encode a varint-type protobuf field."""
tag = (field_number << 3) | 0 # wire type 0 = varint
return encode_varint(tag) + encode_varint(value)
def encode_field_bytes(field_number, data):
"""Encode a length-delimited protobuf field (for bytes/string/embedded message)."""
if isinstance(data, str):
data = data.encode('utf-8')
tag = (field_number << 3) | 2 # wire type 2 = length-delimited
return encode_varint(tag) + encode_varint(len(data)) + data
def encode_timestamp_message(field_number, seconds, nanos=0):
"""Encode a nested Timestamp message (field 1=seconds, field 2=nanos)."""
inner = encode_field_varint(1, seconds)
if nanos:
inner = inner + encode_field_varint(2, nanos)
return encode_field_bytes(field_number, inner)
def build_trajectory_inner(title, step_count, created_seconds, created_nanos,
session_id, status, last_modified_seconds,
last_modified_nanos):
"""
Build the inner protobuf data for a trajectory entry.
Structure reverse-engineered from existing conversation entries:
field 1: string title
field 2: varint step_count
field 3: Timestamp created_at
field 4: string session_id
field 5: varint status (1=completed)
field 7: Timestamp last_modified
field 10: Timestamp (reference time, usually same as created_at)
field 15: string (empty)
field 16: varint (auxiliary step count)
"""
data = b''
data += encode_field_bytes(1, title)
data += encode_field_varint(2, step_count)
data += encode_timestamp_message(3, created_seconds, created_nanos)
data += encode_field_bytes(4, session_id)
data += encode_field_varint(5, status)
data += encode_timestamp_message(7, last_modified_seconds, last_modified_nanos)
# NOTE: field 9 (workspace info) is intentionally skipped,
# because these conversations have no workspace association
data += encode_timestamp_message(10, created_seconds, created_nanos)
data += encode_field_bytes(15, "")
data += encode_field_varint(16, max(1, step_count - 8))
return data
def build_trajectory_entry(conversation_id, inner_data):
"""
Build the outer trajectory entry.
Format: field 1=conversation_id, field 2={ field 1=base64(inner_data) }
"""
b64_inner = base64.b64encode(inner_data).decode('ascii')
field_2_inner = encode_field_bytes(1, b64_inner)
entry = encode_field_bytes(1, conversation_id)
entry += encode_field_bytes(2, field_2_inner)
return entry
def wrap_as_outer_entry(entry_data):
"""Wrap as a top-level repeated field 1 message."""
return encode_field_bytes(1, entry_data)
# ===== Path Detection =====
def get_db_path():
"""Detect the Antigravity database path based on the operating system."""
system = platform.system()
if system == 'Darwin': # macOS
return os.path.expanduser(
'~/Library/Application Support/Antigravity/User/globalStorage/state.vscdb'
)
elif system == 'Linux':
return os.path.expanduser(
'~/.config/Antigravity/User/globalStorage/state.vscdb'
)
elif system == 'Windows':
appdata = os.environ.get('APPDATA', '')
return os.path.join(appdata, 'Antigravity', 'User', 'globalStorage', 'state.vscdb')
else:
print(f"Warning: Unknown OS: {system}. Trying macOS path...")
return os.path.expanduser(
'~/Library/Application Support/Antigravity/User/globalStorage/state.vscdb'
)
def get_conversations_dir():
"""Detect the conversations directory path."""
return os.path.expanduser('~/.gemini/antigravity/conversations')
# ===== Auto-Detection of Missing Conversations =====
def get_file_conversations(conversations_dir):
"""
Scan the conversations directory and return a dict of
conversation_id -> file_metadata for all .pb files.
"""
conversations = {}
if not os.path.isdir(conversations_dir):
return conversations
for filename in os.listdir(conversations_dir):
if filename.endswith('.pb'):
conv_id = filename[:-3] # remove .pb extension
# Validate UUID format
if re.match(r'^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$', conv_id):
filepath = os.path.join(conversations_dir, filename)
stat = os.stat(filepath)
conversations[conv_id] = {
'id': conv_id,
'filepath': filepath,
'size': stat.st_size,
'mtime': stat.st_mtime,
'ctime': getattr(stat, 'st_birthtime', stat.st_ctime),
}
return conversations
def get_indexed_uuids(existing_data):
"""Extract all UUIDs from the existing trajectorySummaries data."""
return set(
u.decode() for u in re.findall(
b'[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}',
existing_data
)
)
def estimate_step_count(file_size):
"""
Estimate the step count based on file size.
Rough heuristic: larger files generally have more conversation turns.
"""
if file_size < 10000:
return 3
elif file_size < 50000:
return 5
elif file_size < 200000:
return 10
elif file_size < 500000:
return 15
elif file_size < 1000000:
return 20
else:
return 30
# ===== Process Check =====
def check_antigravity_running():
"""Check if Antigravity is currently running."""
system = platform.system()
try:
if system == 'Darwin' or system == 'Linux':
result = subprocess.run(
['pgrep', '-f', 'Antigravity'],
capture_output=True, text=True
)
return result.returncode == 0
elif system == 'Windows':
result = subprocess.run(
['tasklist', '/FI', 'IMAGENAME eq Antigravity.exe'],
capture_output=True, text=True
)
return 'Antigravity.exe' in result.stdout
except Exception:
pass
return False
# ===== Main Logic =====
def main():
print("=" * 60)
print(" Antigravity Chat History Index Repair Tool")
print(" (Universal Version)")
print("=" * 60)
print()
# 1. Locate paths
db_path = get_db_path()
conversations_dir = get_conversations_dir()
print(f"Database: {db_path}")
print(f"Conversations: {conversations_dir}")
print()
if not os.path.exists(db_path):
print("ERROR: Antigravity database file not found.")
print(f" Expected at: {db_path}")
print(" Make sure Antigravity is installed and has been used at least once.")
return False
if not os.path.isdir(conversations_dir):
print("ERROR: Conversations directory not found.")
print(f" Expected at: {conversations_dir}")
return False
# 2. Check if Antigravity is running
if check_antigravity_running():
print("WARNING: Antigravity appears to be running!")
print(" Please completely quit the Antigravity app first (Cmd+Q on macOS).")
print(" If you modify the database while Antigravity is running,")
print(" the changes may be overwritten when the app exits.")
response = input("\n Continue anyway? (y/N): ").strip().lower()
if response != 'y':
print(" Cancelled.")
return False
print()
# 3. Scan conversation files
file_conversations = get_file_conversations(conversations_dir)
print(f"Conversation files on disk: {len(file_conversations)}")
# 4. Read existing trajectory summaries
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
cursor.execute(
"SELECT value FROM ItemTable WHERE key = 'antigravityUnifiedStateSync.trajectorySummaries'"
)
row = cursor.fetchone()
if not row:
print("ERROR: trajectorySummaries data not found in database.")
print(" This might mean you have a different version of Antigravity.")
conn.close()
return False
existing_b64 = row[0]
existing_data = base64.b64decode(existing_b64)
existing_uuids = get_indexed_uuids(existing_data)
print(f"Conversations in index: {len(existing_uuids)}")
# 5. Find missing conversations
missing = {}
for conv_id, meta in file_conversations.items():
if conv_id not in existing_uuids:
missing[conv_id] = meta
if not missing:
print("\nNo missing conversations found! All conversations are properly indexed.")
conn.close()
return True
print(f"\nFound {len(missing)} conversation(s) missing from the index:\n")
for conv_id, meta in sorted(missing.items(), key=lambda x: x[1]['mtime']):
dt = datetime.datetime.fromtimestamp(meta['mtime'])
size_kb = meta['size'] / 1024
print(f" - {conv_id} | {dt.strftime('%Y-%m-%d %H:%M')} | {size_kb:.1f} KB")
# 6. Ask for confirmation
print(f"\nThis will add {len(missing)} conversation(s) to the history index.")
response = input(" Proceed? (y/N): ").strip().lower()
if response != 'y':
print(" Cancelled.")
conn.close()
return False
# 7. Backup database
timestamp = int(time.time())
backup_path = db_path + f'.backup_{timestamp}'
shutil.copy2(db_path, backup_path)
print(f"\nDatabase backed up to: {backup_path}")
# 8. Build new entries
new_entries = b''
added_count = 0
for conv_id, meta in missing.items():
created_seconds = int(meta.get('ctime', meta['mtime']))
modified_seconds = int(meta['mtime'])
step_count = estimate_step_count(meta['size'])
title = f"Recovered Conversation {conv_id[:8]}"
inner = build_trajectory_inner(
title=title,
step_count=step_count,
created_seconds=created_seconds,
created_nanos=0,
session_id=conv_id,
status=1,
last_modified_seconds=modified_seconds,
last_modified_nanos=0,
)
entry = build_trajectory_entry(conv_id, inner)
wrapped = wrap_as_outer_entry(entry)
new_entries += wrapped
added_count += 1
dt = datetime.datetime.fromtimestamp(modified_seconds)
print(f" + Added: {conv_id[:8]}... | {dt.strftime('%Y-%m-%d %H:%M')}")
# 9. Merge data
updated_data = existing_data + new_entries
print(f"\nIndex size: {len(existing_data)} -> {len(updated_data)} bytes (+{len(new_entries)})")
# 10. Validate protobuf format (optional, requires protoc)
try:
result = subprocess.run(
['protoc', '--decode_raw'],
input=updated_data,
capture_output=True
)
if result.returncode != 0:
print(f"ERROR: Protobuf format validation failed: {result.stderr.decode()}")
print(" Aborting. Your database has NOT been modified.")
conn.close()
return False
print("Protobuf format validation passed")
except FileNotFoundError:
print("Note: protoc not found, skipping format validation (this is usually fine)")
# 11. Verify new UUIDs are in the updated data
updated_uuids = get_indexed_uuids(updated_data)
all_verified = True
for conv_id in missing:
if conv_id not in updated_uuids:
print(f"ERROR: Verification failed: {conv_id} not found in updated data")
all_verified = False
if not all_verified:
print("ERROR: Data verification failed. Aborting. Your database has NOT been modified.")
conn.close()
return False
# 12. Write to database
updated_b64 = base64.b64encode(updated_data).decode('ascii')
cursor.execute(
"UPDATE ItemTable SET value = ? WHERE key = 'antigravityUnifiedStateSync.trajectorySummaries'",
(updated_b64,)
)
conn.commit()
conn.close()
print(f"\nSuccess! Added {added_count} conversation(s) to the history index.")
print("\nNext steps:")
print(" 1. Open the Antigravity app")
print(" 2. Check Chat History - your recovered conversations should now be visible")
print(" 3. The titles will show as 'Recovered Conversation XXXXXXXX'")
print(" (they will update to the real title once you open them)")
print(f"\n If anything goes wrong, restore the backup:")
print(f" cp '{backup_path}' '{db_path}'")
return True
if __name__ == '__main__':
success = main()
print()
if not success:
print("Repair not completed. Check the error messages above.")
print("=" * 60)
Let me know if it works for you! And if you’re on Linux or Windows, the script should auto-detect the correct paths — but let me know if you run into any issues.