Major updates: LinkedIn auto-posting, timezone fixes, and Docker improvements

Features:
- Add LinkedIn OAuth integration and auto-posting functionality
- Add scheduler service for automated post publishing
- Add metadata field to generated_posts for LinkedIn URLs
- Add privacy policy page for LinkedIn API compliance
- Add company management features and employee accounts
- Add license key system for company registrations

Fixes:
- Fix timezone issues (use UTC consistently across app)
- Fix datetime serialization errors in database operations
- Fix scheduling timezone conversion (local time to UTC)
- Fix import errors (get_database -> db)

Infrastructure:
- Update Docker setup to use port 8001 (avoid conflicts)
- Add SSL support with nginx-proxy and Let's Encrypt
- Add LinkedIn setup documentation
- Add migration scripts for schema updates

Services:
- Add linkedin_service.py for LinkedIn API integration
- Add scheduler_service.py for background job processing
- Add storage_service.py for Supabase Storage
- Add email_service.py improvements
- Add encryption utilities for token storage

Co-Authored-By: Claude Sonnet 4.5 <noreply@anthropic.com>
This commit is contained in:
2026-02-11 11:30:20 +01:00
parent b50594dbfa
commit f14515e9cf
94 changed files with 21601 additions and 5111 deletions

View File

@@ -0,0 +1,145 @@
"""Backfill api_usage_logs for all generated posts not yet tracked.
Assumes per post:
- 1x gpt-4o call: ~20,000 tokens (14,000 prompt + 6,000 completion)
- 1x gpt-4o-mini call: ~17,000 tokens (13,000 prompt + 4,000 completion)
Only processes posts older than 20 minutes whose created_at is not already
covered by an existing api_usage_log entry for the same customer.
"""
import asyncio
import sys
import os
from datetime import datetime, timedelta, timezone
from uuid import UUID
# Add project root to path
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from src.config import estimate_cost
from src.database.client import db
# ── Estimated token splits per post ──────────────────────────────
GPT4O_PROMPT = 14_000
GPT4O_COMP = 6_000
GPT4O_TOTAL = GPT4O_PROMPT + GPT4O_COMP # 20 000
MINI_PROMPT = 13_000
MINI_COMP = 4_000
MINI_TOTAL = MINI_PROMPT + MINI_COMP # 17 000
async def main():
cutoff = datetime.now(timezone.utc) - timedelta(minutes=20)
print(f"Cutoff: posts created before {cutoff.isoformat()}")
# ── 1. Load all generated posts ──────────────────────────────
customers = await db.list_customers()
print(f"Found {len(customers)} customers")
all_posts = []
for cust in customers:
posts = await db.get_generated_posts(cust.id)
all_posts.extend(posts)
print(f"Found {len(all_posts)} total generated posts")
# Filter to posts older than 20 min
eligible = [
p for p in all_posts
if p.created_at and p.created_at.replace(tzinfo=timezone.utc) < cutoff
]
print(f"{len(eligible)} posts older than 20 min")
# ── 2. Load existing logs to avoid duplicates ────────────────
try:
existing_logs = await asyncio.to_thread(
lambda: db.client.table("api_usage_logs")
.select("customer_id, created_at")
.eq("operation", "post_creation_backfill")
.execute()
)
already_logged = set()
for log in existing_logs.data:
key = (log.get("customer_id"), log.get("created_at", "")[:19])
already_logged.add(key)
print(f"{len(already_logged)} existing backfill entries found")
except Exception as e:
print(f"Could not read existing logs (table may be new): {e}")
already_logged = set()
# ── 3. Build customer → user_id / company_id map ─────────────
cust_map = {}
for cust in customers:
cust_map[str(cust.id)] = {
"user_id": str(cust.user_id) if cust.user_id else None,
"company_id": str(cust.company_id) if cust.company_id else None,
}
# ── 4. Insert two log rows per post ──────────────────────────
inserted = 0
skipped = 0
for post in eligible:
cid = str(post.customer_id)
ts = post.created_at.isoformat()[:19] if post.created_at else ""
key = (cid, ts)
if key in already_logged:
skipped += 1
continue
ids = cust_map.get(cid, {})
user_id = ids.get("user_id")
company_id = ids.get("company_id")
base = {
"customer_id": cid,
"operation": "post_creation_backfill",
"created_at": post.created_at.isoformat() if post.created_at else None,
}
if user_id:
base["user_id"] = user_id
if company_id:
base["company_id"] = company_id
# Row 1: gpt-4o
gpt4o_cost = estimate_cost("gpt-4o", GPT4O_PROMPT, GPT4O_COMP)
row_4o = {
**base,
"provider": "openai",
"model": "gpt-4o",
"prompt_tokens": GPT4O_PROMPT,
"completion_tokens": GPT4O_COMP,
"total_tokens": GPT4O_TOTAL,
"estimated_cost_usd": round(gpt4o_cost, 6),
}
# Row 2: gpt-4o-mini
mini_cost = estimate_cost("gpt-4o-mini", MINI_PROMPT, MINI_COMP)
row_mini = {
**base,
"provider": "openai",
"model": "gpt-4o-mini",
"prompt_tokens": MINI_PROMPT,
"completion_tokens": MINI_COMP,
"total_tokens": MINI_TOTAL,
"estimated_cost_usd": round(mini_cost, 6),
}
try:
await asyncio.to_thread(
lambda r1=row_4o, r2=row_mini: db.client.table("api_usage_logs")
.insert([r1, r2]).execute()
)
inserted += 2
name = post.topic_title[:40] if post.topic_title else "?"
print(f" + {name} (gpt-4o ${gpt4o_cost:.4f} + mini ${mini_cost:.4f})")
except Exception as e:
print(f" ! Error for post {post.id}: {e}")
print(f"\nDone: {inserted} log rows inserted, {skipped} posts skipped (already backfilled)")
print(f"Estimated totals per post: gpt-4o ${estimate_cost('gpt-4o', GPT4O_PROMPT, GPT4O_COMP):.4f} + mini ${estimate_cost('gpt-4o-mini', MINI_PROMPT, MINI_COMP):.4f}")
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -0,0 +1,21 @@
#!/usr/bin/env python3
"""
Generate a Fernet encryption key for token encryption.
Usage:
python scripts/generate_encryption_key.py
Add the output to your .env file as ENCRYPTION_KEY.
"""
from cryptography.fernet import Fernet
if __name__ == "__main__":
key = Fernet.generate_key().decode()
print("=" * 70)
print("Generated Encryption Key:")
print("=" * 70)
print(key)
print("=" * 70)
print("\nAdd this to your .env file:")
print(f"ENCRYPTION_KEY={key}")
print("\n⚠️ Keep this key secure and never commit it to git!")

171
scripts/setup_linkedin_auth.py Executable file
View File

@@ -0,0 +1,171 @@
#!/usr/bin/env python3
"""
Script to configure LinkedIn (OIDC) authentication for self-hosted Supabase.
This script updates your .env file or docker-compose.yml with the necessary GoTrue environment variables.
"""
import os
import sys
from pathlib import Path
# ==================== CONFIGURATION ====================
# TODO: Fill in your LinkedIn credentials below
LINKEDIN_CLIENT_ID = "your-linkedin-client-id"
LINKEDIN_CLIENT_SECRET = "your-linkedin-client-secret"
# Your Supabase instance URL (where your self-hosted instance is accessible)
SUPABASE_URL = "https://your-supabase-domain.com" # e.g., https://supabase.example.com
# Path to your Supabase docker-compose directory (where .env or docker-compose.yml is located)
DOCKER_COMPOSE_DIR = "/path/to/your/supabase" # e.g., /home/user/supabase
# =======================================================
def validate_config():
"""Validate that all required configuration is set."""
errors = []
if LINKEDIN_CLIENT_ID.startswith("your-"):
errors.append("LINKEDIN_CLIENT_ID")
if LINKEDIN_CLIENT_SECRET.startswith("your-"):
errors.append("LINKEDIN_CLIENT_SECRET")
if SUPABASE_URL.startswith("https://your-"):
errors.append("SUPABASE_URL")
if DOCKER_COMPOSE_DIR.startswith("/path/to/"):
errors.append("DOCKER_COMPOSE_DIR")
if errors:
print("❌ Error: Please update the following configuration variables in this script:")
for var in errors:
print(f" - {var}")
sys.exit(1)
def find_env_file():
"""Find the .env file in the docker-compose directory."""
compose_dir = Path(DOCKER_COMPOSE_DIR)
if not compose_dir.exists():
print(f"❌ Error: Directory not found: {DOCKER_COMPOSE_DIR}")
sys.exit(1)
# Look for common .env file locations
env_files = [
compose_dir / ".env",
compose_dir / "docker" / ".env",
compose_dir / ".env.local",
]
for env_file in env_files:
if env_file.exists():
return env_file
return None
def update_env_file(env_file):
"""Update the .env file with LinkedIn OAuth configuration."""
# LinkedIn OAuth configuration for GoTrue
linkedin_config = f"""
# LinkedIn OAuth Configuration (added by setup script)
GOTRUE_EXTERNAL_LINKEDIN_ENABLED=true
GOTRUE_EXTERNAL_LINKEDIN_CLIENT_ID={LINKEDIN_CLIENT_ID}
GOTRUE_EXTERNAL_LINKEDIN_SECRET={LINKEDIN_CLIENT_SECRET}
GOTRUE_EXTERNAL_LINKEDIN_REDIRECT_URI={SUPABASE_URL}/auth/v1/callback
"""
# Read existing content
if env_file:
with open(env_file, 'r') as f:
content = f.read()
# Check if LinkedIn config already exists
if "GOTRUE_EXTERNAL_LINKEDIN_ENABLED" in content:
print(f"⚠️ LinkedIn OAuth configuration already exists in {env_file}")
print(" Please update it manually or remove the existing lines first.")
return False
# Backup original file
backup_file = env_file.with_suffix('.env.backup')
with open(backup_file, 'w') as f:
f.write(content)
print(f"📋 Backup created: {backup_file}")
# Append LinkedIn config
with open(env_file, 'a') as f:
f.write(linkedin_config)
print(f"✅ LinkedIn OAuth configuration added to {env_file}")
return True
return False
def create_env_snippet():
"""Create a snippet file if no .env file is found."""
snippet_file = Path(DOCKER_COMPOSE_DIR) / "linkedin_oauth_snippet.env"
linkedin_config = f"""# LinkedIn OAuth Configuration for GoTrue
# Add these lines to your .env file or docker-compose.yml environment section
GOTRUE_EXTERNAL_LINKEDIN_ENABLED=true
GOTRUE_EXTERNAL_LINKEDIN_CLIENT_ID={LINKEDIN_CLIENT_ID}
GOTRUE_EXTERNAL_LINKEDIN_SECRET={LINKEDIN_CLIENT_SECRET}
GOTRUE_EXTERNAL_LINKEDIN_REDIRECT_URI={SUPABASE_URL}/auth/v1/callback
"""
with open(snippet_file, 'w') as f:
f.write(linkedin_config)
print(f"📄 Created configuration snippet: {snippet_file}")
print(" Copy these variables to your .env file or docker-compose.yml")
return True
def print_next_steps():
"""Print instructions for completing the setup."""
print("\n" + "="*60)
print("✅ Configuration complete!")
print("="*60)
print("\nNext steps:\n")
print("1. Restart your Supabase services:")
print(f" cd {DOCKER_COMPOSE_DIR}")
print(" docker-compose down")
print(" docker-compose up -d")
print()
print("2. Add redirect URL in LinkedIn Developer Portal:")
print(f" {SUPABASE_URL}/auth/v1/callback")
print()
print("3. Test the authentication in your application")
print()
print("4. Check GoTrue logs for any errors:")
print(" docker-compose logs -f auth")
print()
def main():
"""Main function."""
print("🔧 Configuring LinkedIn OAuth for self-hosted Supabase")
print("="*60)
validate_config()
env_file = find_env_file()
if env_file:
print(f"📁 Found .env file: {env_file}")
if update_env_file(env_file):
print_next_steps()
else:
print("\n⚠️ Please update your configuration manually.")
else:
print("⚠️ No .env file found in the docker-compose directory")
create_env_snippet()
print_next_steps()
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,131 @@
#!/usr/bin/env python3
"""
Test script to verify LinkedIn auto-posting setup.
Usage:
python scripts/test_linkedin_setup.py
"""
import asyncio
import sys
from pathlib import Path
# Add project root to path
sys.path.insert(0, str(Path(__file__).parent.parent))
from src.config import settings
from src.database.client import db
async def main():
"""Run setup checks."""
print("=" * 70)
print("LinkedIn Auto-Posting Setup Verification")
print("=" * 70)
print()
checks_passed = 0
checks_total = 0
# Check 1: Environment variables
print("📋 Checking environment variables...")
checks_total += 4
if settings.linkedin_client_id:
print(" ✅ LINKEDIN_CLIENT_ID is set")
checks_passed += 1
else:
print(" ❌ LINKEDIN_CLIENT_ID is missing")
if settings.linkedin_client_secret:
print(" ✅ LINKEDIN_CLIENT_SECRET is set")
checks_passed += 1
else:
print(" ❌ LINKEDIN_CLIENT_SECRET is missing")
if settings.linkedin_redirect_uri:
print(" ✅ LINKEDIN_REDIRECT_URI is set")
checks_passed += 1
print(f" URI: {settings.linkedin_redirect_uri}")
else:
print(" ❌ LINKEDIN_REDIRECT_URI is missing")
if settings.encryption_key:
print(" ✅ ENCRYPTION_KEY is set")
checks_passed += 1
else:
print(" ❌ ENCRYPTION_KEY is missing")
print()
# Check 2: Encryption
print("🔐 Testing encryption...")
checks_total += 1
try:
from src.utils.encryption import encrypt_token, decrypt_token
test_token = "test_access_token_12345"
encrypted = encrypt_token(test_token)
decrypted = decrypt_token(encrypted)
if decrypted == test_token:
print(" ✅ Encryption/decryption working")
checks_passed += 1
else:
print(" ❌ Encryption/decryption mismatch")
except Exception as e:
print(f" ❌ Encryption error: {e}")
print()
# Check 3: Database table
print("💾 Checking database schema...")
checks_total += 1
try:
# Try to query the table (will fail if it doesn't exist)
result = await asyncio.to_thread(
lambda: db.client.table("linkedin_accounts").select("id").limit(0).execute()
)
print(" ✅ linkedin_accounts table exists")
checks_passed += 1
except Exception as e:
print(f" ❌ linkedin_accounts table not found: {e}")
print(" Run: psql $DATABASE_URL -f config/migrate_add_linkedin_accounts.sql")
print()
# Check 4: LinkedIn service
print("🔧 Checking LinkedIn service...")
checks_total += 1
try:
from src.services.linkedin_service import linkedin_service
print(" ✅ LinkedIn service initialized")
checks_passed += 1
except Exception as e:
print(f" ❌ LinkedIn service error: {e}")
print()
# Summary
print("=" * 70)
print(f"Summary: {checks_passed}/{checks_total} checks passed")
print("=" * 70)
if checks_passed == checks_total:
print("✅ All checks passed! LinkedIn auto-posting is ready.")
print("\nNext steps:")
print("1. Restart your application")
print("2. Log in as an employee")
print("3. Go to Settings and connect your LinkedIn account")
print("4. Schedule a test post")
return 0
else:
print("❌ Some checks failed. Please fix the issues above.")
print("\nSetup guide: See LINKEDIN_SETUP.md for detailed instructions")
return 1
if __name__ == "__main__":
exit_code = asyncio.run(main())
sys.exit(exit_code)