Add HTTPS image proxy and auto-fix for mixed content
This commit is contained in:
@@ -98,6 +98,40 @@ class StorageService:
|
||||
await asyncio.to_thread(_delete)
|
||||
logger.info(f"Deleted image: {file_path}")
|
||||
|
||||
@staticmethod
|
||||
def get_proxy_url(image_url: str, app_base_url: str = "") -> str:
|
||||
"""
|
||||
Convert a Supabase storage URL to a proxied HTTPS URL.
|
||||
|
||||
This solves mixed content issues when Supabase storage uses HTTP
|
||||
but the app uses HTTPS.
|
||||
|
||||
Args:
|
||||
image_url: The original Supabase storage URL
|
||||
app_base_url: Base URL of the app (e.g., https://linkedin.onyva.dev)
|
||||
|
||||
Returns:
|
||||
Proxied URL through the app (e.g., /proxy/image/post-images/...)
|
||||
"""
|
||||
if not image_url:
|
||||
return ""
|
||||
|
||||
# Extract bucket and path from Supabase URL
|
||||
# Format: http://supabase.../storage/v1/object/public/{bucket}/{path}
|
||||
parts = image_url.split("/storage/v1/object/public/")
|
||||
if len(parts) != 2:
|
||||
return image_url # Return original if format doesn't match
|
||||
|
||||
bucket_and_path = parts[1] # e.g., "post-images/user-id/file.jpg"
|
||||
parts2 = bucket_and_path.split("/", 1)
|
||||
if len(parts2) != 2:
|
||||
return image_url
|
||||
|
||||
bucket, path = parts2
|
||||
|
||||
# Return proxy URL
|
||||
return f"{app_base_url}/proxy/image/{bucket}/{path}"
|
||||
|
||||
|
||||
# Global singleton
|
||||
storage = StorageService()
|
||||
|
||||
@@ -252,6 +252,45 @@
|
||||
})();
|
||||
</script>
|
||||
|
||||
<!-- HTTPS Image Proxy for Supabase Storage -->
|
||||
<script>
|
||||
(function() {
|
||||
function proxySupabaseImages() {
|
||||
// Find all images with Supabase storage URLs
|
||||
document.querySelectorAll('img[src*="/storage/v1/object/public/"]').forEach(img => {
|
||||
const originalSrc = img.src;
|
||||
|
||||
// Only proxy HTTP URLs (HTTPS is fine)
|
||||
if (originalSrc.startsWith('http://')) {
|
||||
// Extract bucket and path from URL
|
||||
// Format: http://.../storage/v1/object/public/{bucket}/{path}
|
||||
const match = originalSrc.match(/\/storage\/v1\/object\/public\/([^\/]+)\/(.+)/);
|
||||
|
||||
if (match) {
|
||||
const bucket = match[1];
|
||||
const path = match[2];
|
||||
const proxyUrl = `/proxy/image/${bucket}/${path}`;
|
||||
|
||||
img.src = proxyUrl;
|
||||
console.log('Proxied image:', originalSrc, '->', proxyUrl);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Run on page load
|
||||
if (document.readyState === 'loading') {
|
||||
document.addEventListener('DOMContentLoaded', proxySupabaseImages);
|
||||
} else {
|
||||
proxySupabaseImages();
|
||||
}
|
||||
|
||||
// Also run after dynamic content loads (e.g., AJAX)
|
||||
const observer = new MutationObserver(proxySupabaseImages);
|
||||
observer.observe(document.body, { childList: true, subtree: true });
|
||||
})();
|
||||
</script>
|
||||
|
||||
{% block scripts %}{% endblock %}
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -109,6 +109,34 @@
|
||||
</div>
|
||||
</main>
|
||||
|
||||
<!-- HTTPS Image Proxy for Supabase Storage -->
|
||||
<script>
|
||||
(function() {
|
||||
function proxySupabaseImages() {
|
||||
document.querySelectorAll('img[src*="/storage/v1/object/public/"]').forEach(img => {
|
||||
const originalSrc = img.src;
|
||||
if (originalSrc.startsWith('http://')) {
|
||||
const match = originalSrc.match(/\/storage\/v1\/object\/public\/([^\/]+)\/(.+)/);
|
||||
if (match) {
|
||||
const bucket = match[1];
|
||||
const path = match[2];
|
||||
img.src = `/proxy/image/${bucket}/${path}`;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if (document.readyState === 'loading') {
|
||||
document.addEventListener('DOMContentLoaded', proxySupabaseImages);
|
||||
} else {
|
||||
proxySupabaseImages();
|
||||
}
|
||||
|
||||
const observer = new MutationObserver(proxySupabaseImages);
|
||||
observer.observe(document.body, { childList: true, subtree: true });
|
||||
})();
|
||||
</script>
|
||||
|
||||
{% block scripts %}{% endblock %}
|
||||
</body>
|
||||
</html>
|
||||
|
||||
@@ -3529,3 +3529,44 @@ async def delete_post_image(request: Request, post_id: str):
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to delete post image: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
# ==================== IMAGE PROXY FOR HTTPS ====================
|
||||
|
||||
@user_router.get("/proxy/image/{bucket}/{path:path}")
|
||||
async def proxy_supabase_image(bucket: str, path: str):
|
||||
"""
|
||||
Proxy Supabase storage images via HTTPS to avoid mixed content warnings.
|
||||
|
||||
This allows HTTPS pages to load images from HTTP Supabase storage.
|
||||
"""
|
||||
import httpx
|
||||
from fastapi.responses import Response
|
||||
|
||||
try:
|
||||
# Build the Supabase storage URL
|
||||
storage_url = settings.supabase_url.replace("https://", "http://").replace("http://", "http://")
|
||||
image_url = f"{storage_url}/storage/v1/object/public/{bucket}/{path}"
|
||||
|
||||
# Fetch the image from Supabase
|
||||
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||
response = await client.get(image_url)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise HTTPException(status_code=404, detail="Image not found")
|
||||
|
||||
# Return the image with proper headers
|
||||
return Response(
|
||||
content=response.content,
|
||||
media_type=response.headers.get("content-type", "image/jpeg"),
|
||||
headers={
|
||||
"Cache-Control": "public, max-age=31536000",
|
||||
"Access-Control-Allow-Origin": "*"
|
||||
}
|
||||
)
|
||||
|
||||
except httpx.TimeoutException:
|
||||
raise HTTPException(status_code=504, detail="Image fetch timeout")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to proxy image {bucket}/{path}: {e}")
|
||||
raise HTTPException(status_code=500, detail="Failed to load image")
|
||||
|
||||
Reference in New Issue
Block a user