✅ PHASE 1 ACHIEVEMENTS: - Successfully migrated from Qdrant to self-hosted Supabase - Fixed mem0 Supabase integration collection naming issues - Resolved vector dimension mismatches (1536→768 for Ollama) - All containers connected to localai docker network - Comprehensive documentation updates completed ✅ TESTING COMPLETED: - Database storage verification: Data properly stored in PostgreSQL - Vector operations: 768-dimensional embeddings working perfectly - Memory operations: Add, search, retrieve, delete all functional - Multi-user support: User isolation verified - LLM integration: Ollama qwen2.5:7b + nomic-embed-text operational - Search functionality: Semantic search with relevance scores working ✅ INFRASTRUCTURE READY: - Supabase PostgreSQL with pgvector: ✅ OPERATIONAL - Neo4j graph database: ✅ READY (for Phase 2) - Ollama LLM + embeddings: ✅ WORKING - mem0 v0.1.115: ✅ FULLY FUNCTIONAL PHASE 2 READY: Core memory system and API development can begin 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
133 lines
5.3 KiB
Python
133 lines
5.3 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Inspect current data in Supabase database
|
|
"""
|
|
|
|
import psycopg2
|
|
import json
|
|
|
|
def inspect_supabase_data():
|
|
"""Inspect all data currently stored in Supabase"""
|
|
print("=" * 70)
|
|
print("SUPABASE DATABASE INSPECTION")
|
|
print("=" * 70)
|
|
|
|
connection_string = "postgresql://supabase_admin:CzkaYmRvc26Y@localhost:5435/postgres"
|
|
|
|
try:
|
|
conn = psycopg2.connect(connection_string)
|
|
cursor = conn.cursor()
|
|
|
|
# Get all tables in vecs schema
|
|
print("📊 All tables in vecs schema:")
|
|
cursor.execute("SELECT table_name FROM information_schema.tables WHERE table_schema = 'vecs';")
|
|
tables = cursor.fetchall()
|
|
table_names = [t[0] for t in tables]
|
|
print(f" Tables: {table_names}")
|
|
|
|
for table_name in table_names:
|
|
print(f"\n🔍 Inspecting table: {table_name}")
|
|
try:
|
|
# Get table structure
|
|
cursor.execute(f"""
|
|
SELECT column_name, data_type, is_nullable
|
|
FROM information_schema.columns
|
|
WHERE table_schema = 'vecs' AND table_name = %s
|
|
ORDER BY ordinal_position;
|
|
""", (table_name,))
|
|
columns = cursor.fetchall()
|
|
|
|
print(" Table structure:")
|
|
for col in columns:
|
|
print(f" - {col[0]}: {col[1]} (nullable: {col[2]})")
|
|
|
|
# Get record count
|
|
cursor.execute(f'SELECT COUNT(*) FROM vecs."{table_name}";')
|
|
count = cursor.fetchone()[0]
|
|
print(f" Record count: {count}")
|
|
|
|
if count > 0:
|
|
# Get sample records
|
|
cursor.execute(f"""
|
|
SELECT id, metadata,
|
|
CASE WHEN vec IS NOT NULL THEN 'Vector present' ELSE 'No vector' END as vec_status
|
|
FROM vecs."{table_name}"
|
|
LIMIT 5;
|
|
""")
|
|
records = cursor.fetchall()
|
|
|
|
print(" Sample records:")
|
|
for i, record in enumerate(records):
|
|
record_id = record[0]
|
|
metadata = record[1] if record[1] else {}
|
|
vec_status = record[2]
|
|
|
|
print(f" Record {i+1}:")
|
|
print(f" ID: {record_id}")
|
|
print(f" Vector: {vec_status}")
|
|
if isinstance(metadata, dict):
|
|
print(f" Metadata keys: {list(metadata.keys())}")
|
|
if 'user_id' in metadata:
|
|
print(f" User ID: {metadata['user_id']}")
|
|
if 'content' in metadata:
|
|
content = metadata['content'][:100] + "..." if len(str(metadata['content'])) > 100 else metadata['content']
|
|
print(f" Content: {content}")
|
|
if 'created_at' in metadata:
|
|
print(f" Created: {metadata['created_at']}")
|
|
else:
|
|
print(f" Metadata: {metadata}")
|
|
print()
|
|
|
|
except Exception as e:
|
|
print(f" ❌ Error inspecting {table_name}: {e}")
|
|
|
|
# Summary statistics
|
|
print("\n📊 SUMMARY:")
|
|
total_records = 0
|
|
for table_name in table_names:
|
|
try:
|
|
cursor.execute(f'SELECT COUNT(*) FROM vecs."{table_name}";')
|
|
count = cursor.fetchone()[0]
|
|
total_records += count
|
|
print(f" {table_name}: {count} records")
|
|
except:
|
|
print(f" {table_name}: Error getting count")
|
|
|
|
print(f" Total records across all tables: {total_records}")
|
|
|
|
# Check for different users
|
|
print("\n👥 USER ANALYSIS:")
|
|
for table_name in table_names:
|
|
try:
|
|
cursor.execute(f"""
|
|
SELECT metadata->>'user_id' as user_id, COUNT(*) as count
|
|
FROM vecs."{table_name}"
|
|
WHERE metadata->>'user_id' IS NOT NULL
|
|
GROUP BY metadata->>'user_id'
|
|
ORDER BY count DESC;
|
|
""")
|
|
users = cursor.fetchall()
|
|
|
|
if users:
|
|
print(f" {table_name} users:")
|
|
for user, count in users:
|
|
print(f" - {user}: {count} memories")
|
|
else:
|
|
print(f" {table_name}: No user data found")
|
|
except Exception as e:
|
|
print(f" Error analyzing users in {table_name}: {e}")
|
|
|
|
cursor.close()
|
|
conn.close()
|
|
|
|
print("\n" + "=" * 70)
|
|
print("🎉 DATABASE INSPECTION COMPLETE")
|
|
print("=" * 70)
|
|
|
|
except Exception as e:
|
|
print(f"❌ Inspection failed: {str(e)}")
|
|
import traceback
|
|
traceback.print_exc()
|
|
|
|
if __name__ == "__main__":
|
|
inspect_supabase_data() |